text
stringlengths
2
100k
meta
dict
<?xml version="1.0" encoding="utf-8"?><!--<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"--> <!-- android:layout_width="match_parent"--> <!-- android:layout_height="match_parent"--> <!-- android:orientation="vertical">--> <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" android:layout_width="match_parent" android:layout_height="match_parent" android:orientation="vertical"> <LinearLayout android:id="@+id/layout_configure_audio" android:layout_width="match_parent" android:layout_height="wrap_content" android:orientation="vertical"> <FrameLayout android:layout_width="match_parent" android:layout_height="wrap_content" android:orientation="horizontal"> <LinearLayout android:layout_width="match_parent" android:layout_height="match_parent" android:layout_gravity="left" android:layout_marginRight="60dp" android:orientation="vertical"> <TextView android:id="@+id/configure_audio_title" style="@style/AppTextView" android:layout_width="match_parent" android:layout_height="wrap_content" android:layout_alignParentLeft="true" android:layout_alignParentTop="true" android:inputType="textMultiLine|textNoSuggestions" android:scrollHorizontally="false" android:text="> audio volume" android:textColor="#FFFFFF" android:textSize="@dimen/normal_text_size" /> <TextView android:id="@+id/configure_audio_subtitle" style="@style/AppTextView" android:layout_width="match_parent" android:layout_height="wrap_content" android:layout_marginTop="@dimen/margin_title_subtitle" android:inputType="textMultiLine|textNoSuggestions" android:scrollHorizontally="false" android:text="configure volume of playback" android:textColor="#C4BCAF" android:textSize="@dimen/normal_text_small" /> </LinearLayout> </FrameLayout> <View android:layout_width="match_parent" android:layout_height="1dp" android:layout_marginTop="15dp" android:layout_marginBottom="15dp" android:background="#08C4BCAF" /> </LinearLayout> <LinearLayout android:id="@+id/layout_music_dir" android:layout_width="match_parent" android:layout_height="wrap_content" android:orientation="vertical"> <FrameLayout android:layout_width="match_parent" android:layout_height="wrap_content" android:orientation="horizontal"> <LinearLayout android:layout_width="match_parent" android:layout_height="match_parent" android:layout_gravity="left" android:layout_marginRight="60dp" android:orientation="vertical"> <TextView android:id="@+id/music_dir_title" style="@style/AppTextView" android:layout_width="match_parent" android:layout_height="wrap_content" android:layout_alignParentLeft="true" android:layout_alignParentTop="true" android:inputType="textMultiLine|textNoSuggestions" android:scrollHorizontally="false" android:text="> music directory" android:textColor="#FFFFFF" android:textSize="@dimen/normal_text_size" /> <TextView android:id="@+id/music_dir_subtitle" style="@style/AppTextView" android:layout_width="match_parent" android:layout_height="wrap_content" android:layout_marginTop="@dimen/margin_title_subtitle" android:inputType="textMultiLine|textNoSuggestions" android:scrollHorizontally="false" android:text="directory is not set" android:textColor="#C4BCAF" android:textSize="@dimen/normal_text_small" /> </LinearLayout> </FrameLayout> <View android:layout_width="match_parent" android:layout_height="1dp" android:layout_marginTop="15dp" android:layout_marginBottom="15dp" android:background="#08C4BCAF" /> </LinearLayout> <LinearLayout android:id="@+id/layout_play_until_end" android:layout_width="match_parent" android:layout_height="wrap_content" android:orientation="vertical"> <FrameLayout android:layout_width="match_parent" android:layout_height="wrap_content" android:orientation="horizontal"> <LinearLayout android:layout_width="match_parent" android:layout_height="match_parent" android:layout_gravity="left" android:layout_marginRight="@dimen/margin_right_subtitle_switch" android:orientation="vertical"> <TextView android:id="@+id/local_music_title_play_until_end" style="@style/AppTextView" android:layout_width="match_parent" android:layout_height="wrap_content" android:layout_alignParentLeft="true" android:layout_alignParentTop="true" android:inputType="textMultiLine|textNoSuggestions" android:scrollHorizontally="false" android:text="> play until end" android:textColor="#FFFFFF" android:textSize="@dimen/normal_text_size" /> <TextView android:id="@+id/local_music_play_until_end_subtext" style="@style/AppTextView" android:layout_width="match_parent" android:layout_height="wrap_content" android:layout_marginTop="@dimen/margin_title_subtitle" android:inputType="textMultiLine|textNoSuggestions" android:scrollHorizontally="false" android:text="play music until end before unmuting ad" android:textColor="#C4BCAF" android:textSize="@dimen/normal_text_small" /> </LinearLayout> <android.support.v7.widget.SwitchCompat android:id="@+id/local_music_play_until_end_switch" android:layout_width="wrap_content" android:layout_height="match_parent" android:layout_alignParentTop="true" android:layout_alignParentEnd="true" android:layout_gravity="right|center" android:layout_marginTop="0dp" android:layout_marginRight="0dp" android:gravity="center_horizontal|center_vertical" /> </FrameLayout> <View android:layout_width="match_parent" android:layout_height="1dp" android:layout_marginTop="15dp" android:layout_marginBottom="15dp" android:background="#08C4BCAF" /> </LinearLayout> <LinearLayout android:id="@+id/layout_loop" android:layout_width="match_parent" android:layout_height="wrap_content" android:orientation="vertical"> <FrameLayout android:layout_width="match_parent" android:layout_height="wrap_content" android:orientation="horizontal"> <LinearLayout android:layout_width="match_parent" android:layout_height="match_parent" android:layout_gravity="left" android:layout_marginRight="@dimen/margin_right_subtitle_switch" android:orientation="vertical"> <TextView android:id="@+id/local_music_title_loop" style="@style/AppTextView" android:layout_width="match_parent" android:layout_height="wrap_content" android:layout_alignParentLeft="true" android:layout_alignParentTop="true" android:inputType="textMultiLine|textNoSuggestions" android:scrollHorizontally="false" android:text="> loop music" android:textColor="#FFFFFF" android:textSize="@dimen/normal_text_size" /> <TextView android:id="@+id/local_music_loop_subtext" style="@style/AppTextView" android:layout_width="match_parent" android:layout_height="wrap_content" android:layout_marginTop="@dimen/margin_title_subtitle" android:inputType="textMultiLine|textNoSuggestions" android:scrollHorizontally="false" android:text="loop music until end of ad" android:textColor="#C4BCAF" android:textSize="@dimen/normal_text_small" /> </LinearLayout> <android.support.v7.widget.SwitchCompat android:id="@+id/local_music_loop_switch" android:layout_width="wrap_content" android:layout_height="match_parent" android:layout_alignParentTop="true" android:layout_alignParentEnd="true" android:layout_gravity="right|center" android:layout_marginTop="0dp" android:layout_marginRight="0dp" android:gravity="center_horizontal|center_vertical" /> </FrameLayout> <!-- <View--> <!-- android:layout_width="match_parent"--> <!-- android:layout_height="1dp"--> <!-- android:layout_marginTop="15dp"--> <!-- android:layout_marginBottom="15dp"--> <!-- android:background="#08C4BCAF" />--> </LinearLayout> <!-- <TextView--> <!-- android:id="@+id/plugin_localmusic_audio_volume_text"--> <!-- android:layout_width="match_parent"--> <!-- android:layout_height="wrap_content"--> <!-- android:visibility="gone"--> <!-- android:layout_marginBottom="20dp"--> <!-- android:inputType="textMultiLine|textNoSuggestions"--> <!-- android:scrollHorizontally="false"--> <!-- android:text="configure audio volume"--> <!-- android:textColor="#C4BCAF"--> <!-- android:textSize="@dimen/normal_text_size" />--> <!-- <TextView--> <!-- android:id="@+id/plugin_localmusic_choose_audio_directory_text"--> <!-- android:layout_width="match_parent"--> <!-- android:layout_height="wrap_content"--> <!-- android:layout_below="@+id/plugin_localmusic_audio_volume_text"--> <!-- android:layout_alignParentStart="true"--> <!-- android:layout_marginBottom="20dp"--> <!-- android:inputType="textMultiLine|textNoSuggestions"--> <!-- android:scrollHorizontally="false"--> <!-- android:text="choose audio directory"--> <!-- android:visibility="gone"--> <!-- android:textColor="#C4BCAF"--> <!-- android:textSize="@dimen/normal_text_size" />--> <!-- <LinearLayout--> <!-- android:id="@+id/plugin_localmusic_play_till_end_container"--> <!-- android:layout_width="match_parent"--> <!-- android:layout_height="wrap_content"--> <!-- android:layout_below="@+id/plugin_localmusic_choose_audio_directory_text"--> <!-- android:layout_alignParentStart="true"--> <!-- android:layout_marginBottom="50dp"--> <!-- android:visibility="gone"--> <!-- android:orientation="horizontal">--> <!-- <TextView--> <!-- android:id="@+id/plugin_localmusic_play_till_end"--> <!-- android:layout_width="wrap_content"--> <!-- android:layout_height="wrap_content"--> <!-- android:inputType="textMultiLine|textNoSuggestions"--> <!-- android:scrollHorizontally="false"--> <!-- android:text="> play until the end"--> <!-- android:textColor="#C4BCAF"--> <!-- android:textSize="@dimen/normal_text_size" />--> <!-- <TextView--> <!-- android:id="@+id/plugin_localmusic_play_till_end_answer"--> <!-- android:layout_width="wrap_content"--> <!-- android:layout_height="wrap_content"--> <!-- android:layout_marginLeft="3dp"--> <!-- android:inputType="textMultiLine|textNoSuggestions"--> <!-- android:scrollHorizontally="false"--> <!-- android:text="sure thing"--> <!-- android:textColor="#C4BCAF"--> <!-- android:textSize="@dimen/normal_text_size" />--> <!-- </LinearLayout>--> </LinearLayout>
{ "pile_set_name": "Github" }
/*** * * Copyright (c) 1999, Valve LLC. All rights reserved. * * This product contains software technology licensed from Id * Software, Inc. ("Id Technology"). Id Technology (c) 1996 Id Software, Inc. * All Rights Reserved. * * Use, distribution, and modification of this source code and/or resulting * object code is restricted to non-commercial enhancements to products from * Valve LLC. All other use, distribution, or modification is prohibited * without written permission from Valve LLC. * ****/ #if !defined( OEM_BUILD ) #include "extdll.h" #include "util.h" #include "cbase.h" #include "monsters.h" #include "weapons.h" #include "nodes.h" #include "player.h" #include "gamerules.h" #include "mod/AvHNetworkMessages.h" enum rpg_e { RPG_IDLE = 0, RPG_FIDGET, RPG_RELOAD, // to reload RPG_FIRE2, // to empty RPG_HOLSTER1, // loaded RPG_DRAW1, // loaded RPG_HOLSTER2, // unloaded RPG_DRAW_UL, // unloaded RPG_IDLE_UL, // unloaded idle RPG_FIDGET_UL, // unloaded fidget }; LINK_ENTITY_TO_CLASS( weapon_rpg, CRpg ); #ifndef CLIENT_DLL LINK_ENTITY_TO_CLASS( laser_spot, CLaserSpot ); //========================================================= //========================================================= CLaserSpot *CLaserSpot::CreateSpot( void ) { CLaserSpot *pSpot = GetClassPtr( (CLaserSpot *)NULL ); pSpot->Spawn(); pSpot->pev->classname = MAKE_STRING("laser_spot"); return pSpot; } //========================================================= //========================================================= void CLaserSpot::Spawn( void ) { Precache( ); pev->movetype = MOVETYPE_NONE; pev->solid = SOLID_NOT; pev->rendermode = kRenderGlow; pev->renderfx = kRenderFxNoDissipation; pev->renderamt = 255; SET_MODEL(ENT(pev), "sprites/laserdot.spr"); UTIL_SetOrigin( pev, pev->origin ); }; //========================================================= // Suspend- make the laser sight invisible. //========================================================= void CLaserSpot::Suspend( float flSuspendTime ) { pev->effects |= EF_NODRAW; SetThink( &CLaserSpot::Revive ); pev->nextthink = gpGlobals->time + flSuspendTime; } //========================================================= // Revive - bring a suspended laser sight back. //========================================================= void CLaserSpot::Revive( void ) { pev->effects &= ~EF_NODRAW; SetThink( NULL ); } void CLaserSpot::Precache( void ) { PRECACHE_MODEL("sprites/laserdot.spr"); }; LINK_ENTITY_TO_CLASS( rpg_rocket, CRpgRocket ); //========================================================= //========================================================= CRpgRocket *CRpgRocket::CreateRpgRocket( Vector vecOrigin, Vector vecAngles, CBaseEntity *pOwner, CRpg *pLauncher ) { CRpgRocket *pRocket = GetClassPtr( (CRpgRocket *)NULL ); UTIL_SetOrigin( pRocket->pev, vecOrigin ); pRocket->pev->angles = vecAngles; pRocket->Spawn(); pRocket->SetTouch( &CRpgRocket::RocketTouch ); pRocket->m_pLauncher = pLauncher;// remember what RPG fired me. pRocket->m_pLauncher->m_cActiveRockets++;// register this missile as active for the launcher pRocket->pev->owner = pOwner->edict(); return pRocket; } //========================================================= //========================================================= void CRpgRocket :: Spawn( void ) { Precache( ); // motor pev->movetype = MOVETYPE_BOUNCE; pev->solid = SOLID_BBOX; SET_MODEL(ENT(pev), "models/rpgrocket.mdl"); UTIL_SetSize(pev, Vector( 0, 0, 0), Vector(0, 0, 0)); UTIL_SetOrigin( pev, pev->origin ); pev->classname = MAKE_STRING("rpg_rocket"); SetThink( &CRpgRocket::IgniteThink ); SetTouch( &CRpgRocket::ExplodeTouch ); pev->angles.x -= 30; UTIL_MakeVectors( pev->angles ); pev->angles.x = -(pev->angles.x + 30); pev->velocity = gpGlobals->v_forward * 250; pev->gravity = 0.5; pev->nextthink = gpGlobals->time + 0.4; pev->dmg = gSkillData.plrDmgRPG; } //========================================================= //========================================================= void CRpgRocket :: RocketTouch ( CBaseEntity *pOther ) { if ( m_pLauncher ) { // my launcher is still around, tell it I'm dead. m_pLauncher->m_cActiveRockets--; } STOP_SOUND( edict(), CHAN_VOICE, "weapons/rocket1.wav" ); ExplodeTouch( pOther ); } //========================================================= //========================================================= void CRpgRocket :: Precache( void ) { PRECACHE_MODEL("models/rpgrocket.mdl"); m_iTrail = PRECACHE_MODEL("sprites/smoke.spr"); PRECACHE_SOUND ("weapons/rocket1.wav"); } void CRpgRocket :: IgniteThink( void ) { // pev->movetype = MOVETYPE_TOSS; pev->movetype = MOVETYPE_FLY; pev->effects |= EF_LIGHT; // make rocket sound EMIT_SOUND( ENT(pev), CHAN_VOICE, "weapons/rocket1.wav", 1, 0.5 ); // rocket trail MESSAGE_BEGIN( MSG_BROADCAST, SVC_TEMPENTITY ); WRITE_BYTE( TE_BEAMFOLLOW ); WRITE_SHORT(entindex()); // entity WRITE_SHORT(m_iTrail ); // model WRITE_BYTE( 40 ); // life WRITE_BYTE( 5 ); // width WRITE_BYTE( 224 ); // r, g, b WRITE_BYTE( 224 ); // r, g, b WRITE_BYTE( 255 ); // r, g, b WRITE_BYTE( 255 ); // brightness MESSAGE_END(); // move PHS/PVS data sending into here (SEND_ALL, SEND_PVS, SEND_PHS) m_flIgniteTime = gpGlobals->time; // set to follow laser spot SetThink( &CRpgRocket::FollowThink ); pev->nextthink = gpGlobals->time + 0.1; } void CRpgRocket :: FollowThink( void ) { CBaseEntity *pOther = NULL; Vector vecTarget; Vector vecDir; float flDist, flMax, flDot; TraceResult tr; UTIL_MakeAimVectors( pev->angles ); vecTarget = gpGlobals->v_forward; flMax = 4096; // Examine all entities within a reasonable radius while ((pOther = UTIL_FindEntityByClassname( pOther, "laser_spot" )) != NULL) { UTIL_TraceLine ( pev->origin, pOther->pev->origin, dont_ignore_monsters, ENT(pev), &tr ); // ALERT( at_console, "%f\n", tr.flFraction ); if (tr.flFraction >= 0.90) { vecDir = pOther->pev->origin - pev->origin; flDist = vecDir.Length( ); vecDir = vecDir.Normalize( ); flDot = DotProduct( gpGlobals->v_forward, vecDir ); if ((flDot > 0) && (flDist * (1 - flDot) < flMax)) { flMax = flDist * (1 - flDot); vecTarget = vecDir; } } } pev->angles = UTIL_VecToAngles( vecTarget ); // this acceleration and turning math is totally wrong, but it seems to respond well so don't change it. float flSpeed = pev->velocity.Length(); if (gpGlobals->time - m_flIgniteTime < 1.0) { pev->velocity = pev->velocity * 0.2 + vecTarget * (flSpeed * 0.8 + 400); if (pev->waterlevel == 3) { // go slow underwater if (pev->velocity.Length() > 300) { pev->velocity = pev->velocity.Normalize() * 300; } UTIL_BubbleTrail( pev->origin - pev->velocity * 0.1, pev->origin, 4 ); } else { if (pev->velocity.Length() > 2000) { pev->velocity = pev->velocity.Normalize() * 2000; } } } else { if (pev->effects & EF_LIGHT) { pev->effects = 0; STOP_SOUND( ENT(pev), CHAN_VOICE, "weapons/rocket1.wav" ); } pev->velocity = pev->velocity * 0.2 + vecTarget * flSpeed * 0.798; if (pev->waterlevel == 0 && pev->velocity.Length() < 1500) { Detonate( ); } } // ALERT( at_console, "%.0f\n", flSpeed ); pev->nextthink = gpGlobals->time + 0.1; } #endif void CRpg::Reload( void ) { int iResult; if ( m_iClip == 1 ) { // don't bother with any of this if don't need to reload. return; } if ( m_pPlayer->ammo_rockets <= 0 ) return; // because the RPG waits to autoreload when no missiles are active while the LTD is on, the // weapons code is constantly calling into this function, but is often denied because // a) missiles are in flight, but the LTD is on // or // b) player is totally out of ammo and has nothing to switch to, and should be allowed to // shine the designator around // // Set the next attack time into the future so that WeaponIdle will get called more often // than reload, allowing the RPG LTD to be updated m_flNextPrimaryAttack = UTIL_WeaponTimeBase() + 0.5; if ( m_cActiveRockets && m_fSpotActive ) { // no reloading when there are active missiles tracking the designator. // ward off future autoreload attempts by setting next attack time into the future for a bit. return; } #ifndef CLIENT_DLL if ( m_pSpot && m_fSpotActive ) { m_pSpot->Suspend( 2.1 ); m_flNextSecondaryAttack = UTIL_WeaponTimeBase() + 2.1; } #endif if ( m_iClip == 0 ) iResult = DefaultReload( RPG_MAX_CLIP, RPG_RELOAD, 2 ); if ( iResult ) m_flTimeWeaponIdle = UTIL_WeaponTimeBase() + UTIL_SharedRandomFloat( m_pPlayer->random_seed, 10, 15 ); } void CRpg::Spawn( ) { Precache( ); m_iId = WEAPON_RPG; SET_MODEL(ENT(pev), "models/w_rpg.mdl"); m_fSpotActive = 1; #ifdef CLIENT_DLL if ( bIsMultiplayer() ) #else if ( g_pGameRules->IsMultiplayer() ) #endif { // more default ammo in multiplay. m_iDefaultAmmo = RPG_DEFAULT_GIVE * 2; } else { m_iDefaultAmmo = RPG_DEFAULT_GIVE; } FallInit();// get ready to fall down. } void CRpg::Precache( void ) { PRECACHE_MODEL("models/w_rpg.mdl"); PRECACHE_MODEL("models/v_rpg.mdl"); PRECACHE_MODEL("models/p_rpg.mdl"); PRECACHE_SOUND("items/9mmclip1.wav"); UTIL_PrecacheOther( "laser_spot" ); UTIL_PrecacheOther( "rpg_rocket" ); PRECACHE_SOUND("weapons/rocketfire1.wav"); PRECACHE_SOUND("weapons/glauncher.wav"); // alternative fire sound m_usRpg = PRECACHE_EVENT ( 1, "events/rpg.sc" ); } int CRpg::GetItemInfo(ItemInfo *p) { p->pszName = STRING(pev->classname); p->pszAmmo1 = "rockets"; p->iMaxAmmo1 = ROCKET_MAX_CARRY; p->pszAmmo2 = NULL; p->iMaxAmmo2 = -1; p->iMaxClip = RPG_MAX_CLIP; p->iSlot = 3; p->iPosition = 0; p->iId = m_iId = WEAPON_RPG; p->iFlags = 0; p->iWeight = RPG_WEIGHT; return 1; } int CRpg::AddToPlayer( CBasePlayer *pPlayer ) { if ( CBasePlayerWeapon::AddToPlayer( pPlayer ) ) { NetMsg_WeapPickup( pev, m_iId ); //MESSAGE_BEGIN( MSG_ONE, gmsgWeapPickup, NULL, pPlayer->pev ); // WRITE_BYTE( m_iId ); //MESSAGE_END(); return TRUE; } return FALSE; } BOOL CRpg::Deploy( ) { if ( m_iClip == 0 ) { return DefaultDeploy( "models/v_rpg.mdl", "models/p_rpg.mdl", RPG_DRAW_UL, "rpg" ); } return DefaultDeploy( "models/v_rpg.mdl", "models/p_rpg.mdl", RPG_DRAW1, "rpg" ); } BOOL CRpg::CanHolster( void ) { if ( m_fSpotActive && m_cActiveRockets ) { // can't put away while guiding a missile. return FALSE; } return TRUE; } void CRpg::Holster( int skiplocal /* = 0 */ ) { m_fInReload = FALSE;// cancel any reload in progress. m_pPlayer->m_flNextAttack = UTIL_WeaponTimeBase() + 0.5; SendWeaponAnim( RPG_HOLSTER1 ); #ifndef CLIENT_DLL if (m_pSpot) { m_pSpot->Killed( NULL, GIB_NEVER ); m_pSpot = NULL; } #endif } void CRpg::PrimaryAttack() { if ( m_iClip ) { m_pPlayer->m_iWeaponVolume = LOUD_GUN_VOLUME; m_pPlayer->m_iWeaponFlash = BRIGHT_GUN_FLASH; #ifndef CLIENT_DLL // player "shoot" animation m_pPlayer->SetAnimation( PLAYER_ATTACK1 ); UTIL_MakeVectors( m_pPlayer->pev->v_angle ); Vector vecSrc = m_pPlayer->GetGunPosition( ) + gpGlobals->v_forward * 16 + gpGlobals->v_right * 8 + gpGlobals->v_up * -8; CRpgRocket *pRocket = CRpgRocket::CreateRpgRocket( vecSrc, m_pPlayer->pev->v_angle, m_pPlayer, this ); UTIL_MakeVectors( m_pPlayer->pev->v_angle );// RpgRocket::Create stomps on globals, so remake. pRocket->pev->velocity = pRocket->pev->velocity + gpGlobals->v_forward * DotProduct( m_pPlayer->pev->velocity, gpGlobals->v_forward ); #endif // firing RPG no longer turns on the designator. ALT fire is a toggle switch for the LTD. // Ken signed up for this as a global change (sjb) int flags; #if defined( CLIENT_WEAPONS ) flags = FEV_NOTHOST; #else flags = 0; #endif PLAYBACK_EVENT( flags, m_pPlayer->edict(), m_usRpg ); m_iClip--; m_flNextPrimaryAttack = UTIL_WeaponTimeBase() + 1.5; m_flTimeWeaponIdle = UTIL_WeaponTimeBase() + 1.5; } else { PlayEmptySound( ); } UpdateSpot( ); } void CRpg::SecondaryAttack() { m_fSpotActive = ! m_fSpotActive; #ifndef CLIENT_DLL if (!m_fSpotActive && m_pSpot) { m_pSpot->Killed( NULL, GIB_NORMAL ); m_pSpot = NULL; } #endif m_flNextSecondaryAttack = UTIL_WeaponTimeBase() + 0.2; } void CRpg::WeaponIdle( void ) { UpdateSpot( ); ResetEmptySound( ); if ( m_flTimeWeaponIdle > UTIL_WeaponTimeBase() ) return; if ( m_pPlayer->m_rgAmmo[m_iPrimaryAmmoType]) { int iAnim; float flRand = UTIL_SharedRandomFloat( m_pPlayer->random_seed, 0, 1 ); if (flRand <= 0.75 || m_fSpotActive) { if ( m_iClip == 0 ) iAnim = RPG_IDLE_UL; else iAnim = RPG_IDLE; m_flTimeWeaponIdle = UTIL_WeaponTimeBase() + 90.0 / 15.0; } else { if ( m_iClip == 0 ) iAnim = RPG_FIDGET_UL; else iAnim = RPG_FIDGET; m_flTimeWeaponIdle = UTIL_WeaponTimeBase() + 3.0; } SendWeaponAnim( iAnim ); } else { m_flTimeWeaponIdle = UTIL_WeaponTimeBase() + 1; } } void CRpg::UpdateSpot( void ) { #ifndef CLIENT_DLL if (m_fSpotActive) { if (!m_pSpot) { m_pSpot = CLaserSpot::CreateSpot(); } UTIL_MakeVectors( m_pPlayer->pev->v_angle ); Vector vecSrc = m_pPlayer->GetGunPosition( );; Vector vecAiming = gpGlobals->v_forward; TraceResult tr; UTIL_TraceLine ( vecSrc, vecSrc + vecAiming * 8192, dont_ignore_monsters, ENT(m_pPlayer->pev), &tr ); UTIL_SetOrigin( m_pSpot->pev, tr.vecEndPos ); } #endif } class CRpgAmmo : public CBasePlayerAmmo { void Spawn( void ) { Precache( ); SET_MODEL(ENT(pev), "models/w_rpgammo.mdl"); CBasePlayerAmmo::Spawn( ); } void Precache( void ) { PRECACHE_MODEL ("models/w_rpgammo.mdl"); PRECACHE_SOUND("items/9mmclip1.wav"); } BOOL AddAmmo( CBaseEntity *pOther ) { int iGive; #ifdef CLIENT_DLL if ( bIsMultiplayer() ) #else if ( g_pGameRules->IsMultiplayer() ) #endif { // hand out more ammo per rocket in multiplayer. iGive = AMMO_RPGCLIP_GIVE * 2; } else { iGive = AMMO_RPGCLIP_GIVE; } if (pOther->GiveAmmo( iGive, "rockets", ROCKET_MAX_CARRY ) != -1) { EMIT_SOUND(ENT(pev), CHAN_ITEM, "items/9mmclip1.wav", 1, ATTN_NORM); return TRUE; } return FALSE; } }; LINK_ENTITY_TO_CLASS( ammo_rpgclip, CRpgAmmo ); #endif
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <title>Title</title> </head> <body> <script> let test={ name:'yanle', age(){ console.log(`age is 15`) } }; let func=Object.create(test); </script> </body> </html>
{ "pile_set_name": "Github" }
<?php require_once("http.php"); require_once("auth_digest.php"); // ---------------------------------------------------------- // class Qiniu_Rio_PutExtra class Qiniu_Rio_PutExtra { public $Bucket = null; // 必选(未来会没有这个字段)。 public $Params = null; public $MimeType = null; public $ChunkSize = 0; // 可选。每次上传的Chunk大小 public $TryTimes = 3; // 可选。尝试次数 public $Progresses = null; // 可选。上传进度:[]BlkputRet public $Notify = null; // 进度通知:func(blkIdx int, blkSize int, ret *BlkputRet) public $NotifyErr = null; // 错误通知:func(blkIdx int, blkSize int, err error) public function __construct($bucket = null) { $this->Bucket = $bucket; } } // ---------------------------------------------------------- // func Qiniu_Rio_BlockCount define('QINIU_RIO_BLOCK_BITS', 22); define('QINIU_RIO_BLOCK_SIZE', 1 << QINIU_RIO_BLOCK_BITS); // 4M function Qiniu_Rio_BlockCount($fsize) // => $blockCnt { return ($fsize + (QINIU_RIO_BLOCK_SIZE - 1)) >> QINIU_RIO_BLOCK_BITS; } // ---------------------------------------------------------- // internal func Qiniu_Rio_Mkblock/Mkfile function Qiniu_Rio_Mkblock($self, $host, $reader, $size) // => ($blkputRet, $err) { if (is_resource($reader)) { $body = fread($reader, $size); if ($body === false) { $err = new Qiniu_Error(0, 'fread failed'); return array(null, $err); } } else { list($body, $err) = $reader->Read($size); if ($err !== null) { return array(null, $err); } } if (strlen($body) != $size) { $err = new Qiniu_Error(0, 'fread failed: unexpected eof'); return array(null, $err); } $url = $host . '/mkblk/' . $size; return Qiniu_Client_CallWithForm($self, $url, $body, 'application/octet-stream'); } function dzz_Qiniu_Rio_Mkblock($upToken, $fileContent, $size){ global $QINIU_UP_HOST; $self = new Qiniu_Rio_UploadClient($upToken); if (strlen($fileContent) != $size) { $err = new Qiniu_Error(0, 'fread failed: unexpected eof'); return array(null, $err); } $url = $QINIU_UP_HOST . '/mkblk/' . $size; return Qiniu_Client_CallWithForm($self, $url, $fileContent, 'application/octet-stream'); } function dzz_Qiniu_Rio_Mkfile($upToken, $key, $fsize, $extra) // => ($putRet, $err) { global $QINIU_UP_HOST; $url = $QINIU_UP_HOST . '/mkfile/' . $fsize; $self = new Qiniu_Rio_UploadClient($upToken); if ($key !== null) { $url .= '/key/' . Qiniu_Encode($key); } if (!empty($extra->MimeType)) { $url .= '/mimeType/' . Qiniu_Encode($extra->MimeType); } if (!empty($extra->Params)) { foreach ($extra->Params as $k=>$v) { $url .= "/" . $k . "/" . Qiniu_Encode($v); } } $ctxs = array(); foreach ($extra->Progresses as $prog) { $ctxs []= $prog['ctx']; } $body = implode(',', $ctxs); return Qiniu_Client_CallWithForm($self, $url, $body, 'application/octet-stream'); } function Qiniu_Rio_Mkfile($self, $host, $key, $fsize, $extra) // => ($putRet, $err) { $url = $host . '/mkfile/' . $fsize; if ($key !== null) { $url .= '/key/' . Qiniu_Encode($key); } if (!empty($extra->MimeType)) { $url .= '/mimeType/' . Qiniu_Encode($extra->MimeType); } if (!empty($extra->Params)) { foreach ($extra->Params as $k=>$v) { $url .= "/" . $k . "/" . Qiniu_Encode($v); } } $ctxs = array(); foreach ($extra->Progresses as $prog) { $ctxs []= $prog['ctx']; } $body = implode(',', $ctxs); return Qiniu_Client_CallWithForm($self, $url, $body, 'application/octet-stream'); } // ---------------------------------------------------------- // class Qiniu_Rio_UploadClient class Qiniu_Rio_UploadClient { public $uptoken; public function __construct($uptoken) { $this->uptoken = $uptoken; } public function RoundTrip($req) // => ($resp, $error) { $token = $this->uptoken; $req->Header['Authorization'] = "UpToken $token"; return Qiniu_Client_do($req); } } // ---------------------------------------------------------- // class Qiniu_Rio_Put/PutFile function Qiniu_Rio_Put($upToken, $key, $body, $fsize, $putExtra) // => ($putRet, $err) { global $QINIU_UP_HOST; $self = new Qiniu_Rio_UploadClient($upToken); $progresses = array(); $uploaded = 0; while ($uploaded < $fsize) { $tried = 0; $tryTimes = ($putExtra->TryTimes > 0) ? $putExtra->TryTimes : 1; $blkputRet = null; $err = null; if ($fsize < $uploaded + QINIU_RIO_BLOCK_SIZE) { $bsize = $fsize - $uploaded; } else { $bsize = QINIU_RIO_BLOCK_SIZE; } while ($tried < $tryTimes) { list($blkputRet, $err) = Qiniu_Rio_Mkblock($self, $QINIU_UP_HOST, $body, $bsize); if ($err === null) { break; } $tried += 1; continue; } if ($err !== null) { return array(null, $err); } if ($blkputRet === null ) { $err = new Qiniu_Error(0, "rio: uploaded without ret"); return array(null, $err); } $uploaded += $bsize; $progresses []= $blkputRet; } $putExtra->Progresses = $progresses; return Qiniu_Rio_Mkfile($self, $QINIU_UP_HOST, $key, $fsize, $putExtra); } function Qiniu_Rio_PutFile($upToken, $key, $localFile, $putExtra) // => ($putRet, $err) { $fp = fopen($localFile, 'rb'); if ($fp === false) { $err = new Qiniu_Error(0, 'fopen failed'); return array(null, $err); } $fi = fstat($fp); $result = Qiniu_Rio_Put($upToken, $key, $fp, $fi['size'], $putExtra); fclose($fp); return $result; } // ----------------------------------------------------------
{ "pile_set_name": "Github" }
jasmine.TrivialReporter = function(doc) { this.document = doc || document; this.suiteDivs = {}; this.logRunningSpecs = false; }; jasmine.TrivialReporter.prototype.createDom = function(type, attrs, childrenVarArgs) { var el = document.createElement(type); for (var i = 2; i < arguments.length; i++) { var child = arguments[i]; if (typeof child === 'string') { el.appendChild(document.createTextNode(child)); } else { if (child) { el.appendChild(child); } } } for (var attr in attrs) { if (attr == "className") { el[attr] = attrs[attr]; } else { el.setAttribute(attr, attrs[attr]); } } return el; }; jasmine.TrivialReporter.prototype.reportRunnerStarting = function(runner) { var showPassed, showSkipped; this.outerDiv = this.createDom('div', { className: 'jasmine_reporter' }, this.createDom('div', { className: 'banner' }, this.createDom('div', { className: 'logo' }, this.createDom('a', { href: 'http://pivotal.github.com/jasmine/', target: "_blank" }, "Jasmine"), this.createDom('span', { className: 'version' }, runner.env.versionString())), this.createDom('div', { className: 'options' }, "Show ", showPassed = this.createDom('input', { id: "__jasmine_TrivialReporter_showPassed__", type: 'checkbox' }), this.createDom('label', { "for": "__jasmine_TrivialReporter_showPassed__" }, " passed "), showSkipped = this.createDom('input', { id: "__jasmine_TrivialReporter_showSkipped__", type: 'checkbox' }), this.createDom('label', { "for": "__jasmine_TrivialReporter_showSkipped__" }, " skipped") ) ), this.runnerDiv = this.createDom('div', { className: 'runner running' }, this.createDom('a', { className: 'run_spec', href: '?' }, "run all"), this.runnerMessageSpan = this.createDom('span', {}, "Running..."), this.finishedAtSpan = this.createDom('span', { className: 'finished-at' }, "")) ); this.document.body.appendChild(this.outerDiv); var suites = runner.suites(); for (var i = 0; i < suites.length; i++) { var suite = suites[i]; var suiteDiv = this.createDom('div', { className: 'suite' }, this.createDom('a', { className: 'run_spec', href: '?spec=' + encodeURIComponent(suite.getFullName()) }, "run"), this.createDom('a', { className: 'description', href: '?spec=' + encodeURIComponent(suite.getFullName()) }, suite.description)); this.suiteDivs[suite.id] = suiteDiv; var parentDiv = this.outerDiv; if (suite.parentSuite) { parentDiv = this.suiteDivs[suite.parentSuite.id]; } parentDiv.appendChild(suiteDiv); } this.startedAt = new Date(); var self = this; showPassed.onclick = function(evt) { if (showPassed.checked) { self.outerDiv.className += ' show-passed'; } else { self.outerDiv.className = self.outerDiv.className.replace(/ show-passed/, ''); } }; showSkipped.onclick = function(evt) { if (showSkipped.checked) { self.outerDiv.className += ' show-skipped'; } else { self.outerDiv.className = self.outerDiv.className.replace(/ show-skipped/, ''); } }; }; jasmine.TrivialReporter.prototype.reportRunnerResults = function(runner) { var results = runner.results(); var className = (results.failedCount > 0) ? "runner failed" : "runner passed"; this.runnerDiv.setAttribute("class", className); //do it twice for IE this.runnerDiv.setAttribute("className", className); var specs = runner.specs(); var specCount = 0; for (var i = 0; i < specs.length; i++) { if (this.specFilter(specs[i])) { specCount++; } } var message = "" + specCount + " spec" + (specCount == 1 ? "" : "s" ) + ", " + results.failedCount + " failure" + ((results.failedCount == 1) ? "" : "s"); message += " in " + ((new Date().getTime() - this.startedAt.getTime()) / 1000) + "s"; this.runnerMessageSpan.replaceChild(this.createDom('a', { className: 'description', href: '?'}, message), this.runnerMessageSpan.firstChild); this.finishedAtSpan.appendChild(document.createTextNode("Finished at " + new Date().toString())); }; jasmine.TrivialReporter.prototype.reportSuiteResults = function(suite) { var results = suite.results(); var status = results.passed() ? 'passed' : 'failed'; if (results.totalCount == 0) { // todo: change this to check results.skipped status = 'skipped'; } this.suiteDivs[suite.id].className += " " + status; }; jasmine.TrivialReporter.prototype.reportSpecStarting = function(spec) { if (this.logRunningSpecs) { this.log('>> Jasmine Running ' + spec.suite.description + ' ' + spec.description + '...'); } }; jasmine.TrivialReporter.prototype.reportSpecResults = function(spec) { var results = spec.results(); var status = results.passed() ? 'passed' : 'failed'; if (results.skipped) { status = 'skipped'; } var specDiv = this.createDom('div', { className: 'spec ' + status }, this.createDom('a', { className: 'run_spec', href: '?spec=' + encodeURIComponent(spec.getFullName()) }, "run"), this.createDom('a', { className: 'description', href: '?spec=' + encodeURIComponent(spec.getFullName()), title: spec.getFullName() }, spec.description)); var resultItems = results.getItems(); var messagesDiv = this.createDom('div', { className: 'messages' }); for (var i = 0; i < resultItems.length; i++) { var result = resultItems[i]; if (result.type == 'log') { messagesDiv.appendChild(this.createDom('div', {className: 'resultMessage log'}, result.toString())); } else if (result.type == 'expect' && result.passed && !result.passed()) { messagesDiv.appendChild(this.createDom('div', {className: 'resultMessage fail'}, result.message)); if (result.trace.stack) { messagesDiv.appendChild(this.createDom('div', {className: 'stackTrace'}, result.trace.stack)); } } } if (messagesDiv.childNodes.length > 0) { specDiv.appendChild(messagesDiv); } this.suiteDivs[spec.suite.id].appendChild(specDiv); }; jasmine.TrivialReporter.prototype.log = function() { var console = jasmine.getGlobal().console; if (console && console.log) { if (console.log.apply) { console.log.apply(console, arguments); } else { console.log(arguments); // ie fix: console.log.apply doesn't exist on ie } } }; jasmine.TrivialReporter.prototype.getLocation = function() { return this.document.location; }; jasmine.TrivialReporter.prototype.specFilter = function(spec) { var paramMap = {}; var params = this.getLocation().search.substring(1).split('&'); for (var i = 0; i < params.length; i++) { var p = params[i].split('='); paramMap[decodeURIComponent(p[0])] = decodeURIComponent(p[1]); } if (!paramMap["spec"]) return true; return spec.getFullName().indexOf(paramMap["spec"]) == 0; };
{ "pile_set_name": "Github" }
{ "query": "query {\n field(parameter: \"\u043F\u0440\u0438\u0432\u0435\u0442\") {\n subField\n }\n}\n", "variables": null }
{ "pile_set_name": "Github" }
-- 21.07.2015 07:30 -- URL zum Konzept INSERT INTO AD_Process_Para (AD_Client_ID,AD_Element_ID,AD_Org_ID,AD_Process_ID,AD_Process_Para_ID,AD_Reference_ID,ColumnName,Created,CreatedBy,DefaultValue,Description,EntityType,FieldLength,IsActive,IsAutocomplete,IsCentrallyMaintained,IsEncrypted,IsMandatory,IsRange,Name,SeqNo,Updated,UpdatedBy,ValueMin) VALUES (0,505211,0,540324,540724,11,'Copies',TO_TIMESTAMP('2015-07-21 07:30:35','YYYY-MM-DD HH24:MI:SS'),100,'1','Anzahl der zu erstellenden/zu druckenden Exemplare','de.metas.printing',0,'Y','N','Y','N','Y','N','Kopien',20,TO_TIMESTAMP('2015-07-21 07:30:35','YYYY-MM-DD HH24:MI:SS'),100,'1') ; -- 21.07.2015 07:30 -- URL zum Konzept INSERT INTO AD_Process_Para_Trl (AD_Language,AD_Process_Para_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_Para_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process_Para t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_Para_ID=540724 AND NOT EXISTS (SELECT * FROM AD_Process_Para_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_Para_ID=t.AD_Process_Para_ID) ;
{ "pile_set_name": "Github" }
#!/usr/bin/python # -*- coding: utf-8 -*- # # (c) 2015, René Moser <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'status': ['stableinterface'], 'supported_by': 'community', 'version': '1.0'} DOCUMENTATION = ''' --- module: cs_instance short_description: Manages instances and virtual machines on Apache CloudStack based clouds. description: - Deploy, start, update, scale, restart, restore, stop and destroy instances. version_added: '2.0' author: "René Moser (@resmo)" options: name: description: - Host name of the instance. C(name) can only contain ASCII letters. - Name will be generated (UUID) by CloudStack if not specified and can not be changed afterwards. - Either C(name) or C(display_name) is required. required: false default: null display_name: description: - Custom display name of the instances. - Display name will be set to C(name) if not specified. - Either C(name) or C(display_name) is required. required: false default: null group: description: - Group in where the new instance should be in. required: false default: null state: description: - State of the instance. required: false default: 'present' choices: [ 'deployed', 'started', 'stopped', 'restarted', 'restored', 'destroyed', 'expunged', 'present', 'absent' ] service_offering: description: - Name or id of the service offering of the new instance. - If not set, first found service offering is used. required: false default: null cpu: description: - The number of CPUs to allocate to the instance, used with custom service offerings required: false default: null cpu_speed: description: - The clock speed/shares allocated to the instance, used with custom service offerings required: false default: null memory: description: - The memory allocated to the instance, used with custom service offerings required: false default: null template: description: - Name or id of the template to be used for creating the new instance. - Required when using C(state=present). - Mutually exclusive with C(ISO) option. required: false default: null iso: description: - Name or id of the ISO to be used for creating the new instance. - Required when using C(state=present). - Mutually exclusive with C(template) option. required: false default: null template_filter: description: - Name of the filter used to search for the template or iso. - Used for params C(iso) or C(template) on C(state=present). required: false default: 'executable' choices: [ 'featured', 'self', 'selfexecutable', 'sharedexecutable', 'executable', 'community' ] aliases: [ 'iso_filter' ] version_added: '2.1' hypervisor: description: - Name the hypervisor to be used for creating the new instance. - Relevant when using C(state=present), but only considered if not set on ISO/template. - If not set or found on ISO/template, first found hypervisor will be used. required: false default: null choices: [ 'KVM', 'VMware', 'BareMetal', 'XenServer', 'LXC', 'HyperV', 'UCS', 'OVM' ] keyboard: description: - Keyboard device type for the instance. required: false default: null choices: [ 'de', 'de-ch', 'es', 'fi', 'fr', 'fr-be', 'fr-ch', 'is', 'it', 'jp', 'nl-be', 'no', 'pt', 'uk', 'us' ] networks: description: - List of networks to use for the new instance. required: false default: [] aliases: [ 'network' ] ip_address: description: - IPv4 address for default instance's network during creation. required: false default: null ip6_address: description: - IPv6 address for default instance's network. required: false default: null ip_to_networks: description: - "List of mappings in the form {'network': NetworkName, 'ip': 1.2.3.4}" - Mutually exclusive with C(networks) option. required: false default: null aliases: [ 'ip_to_network' ] disk_offering: description: - Name of the disk offering to be used. required: false default: null disk_size: description: - Disk size in GByte required if deploying instance from ISO. required: false default: null root_disk_size: description: - Root disk size in GByte required if deploying instance with KVM hypervisor and want resize the root disk size at startup (need CloudStack >= 4.4, cloud-initramfs-growroot installed and enabled in the template) required: false default: null security_groups: description: - List of security groups the instance to be applied to. required: false default: null aliases: [ 'security_group' ] domain: description: - Domain the instance is related to. required: false default: null account: description: - Account the instance is related to. required: false default: null project: description: - Name of the project the instance to be deployed in. required: false default: null zone: description: - Name of the zone in which the instance shoud be deployed. - If not set, default zone is used. required: false default: null ssh_key: description: - Name of the SSH key to be deployed on the new instance. required: false default: null affinity_groups: description: - Affinity groups names to be applied to the new instance. required: false default: [] aliases: [ 'affinity_group' ] user_data: description: - Optional data (ASCII) that can be sent to the instance upon a successful deployment. - The data will be automatically base64 encoded. - Consider switching to HTTP_POST by using C(CLOUDSTACK_METHOD=post) to increase the HTTP_GET size limit of 2KB to 32 KB. required: false default: null vpc: description: - Name of the VPC. required: false default: null version_added: "2.3" force: description: - Force stop/start the instance if required to apply changes, otherwise a running instance will not be changed. required: false default: false tags: description: - List of tags. Tags are a list of dictionaries having keys C(key) and C(value). - "If you want to delete all tags, set a empty list e.g. C(tags: [])." required: false default: null aliases: [ 'tag' ] poll_async: description: - Poll async jobs until job has finished. required: false default: true extends_documentation_fragment: cloudstack ''' EXAMPLES = ''' # Create a instance from an ISO # NOTE: Names of offerings and ISOs depending on the CloudStack configuration. - cs_instance: name: web-vm-1 iso: Linux Debian 7 64-bit hypervisor: VMware project: Integration zone: ch-zrh-ix-01 service_offering: 1cpu_1gb disk_offering: PerfPlus Storage disk_size: 20 networks: - Server Integration - Sync Integration - Storage Integration delegate_to: localhost # For changing a running instance, use the 'force' parameter - cs_instance: name: web-vm-1 display_name: web-vm-01.example.com iso: Linux Debian 7 64-bit service_offering: 2cpu_2gb force: yes delegate_to: localhost # Create or update a instance on Exoscale's public cloud using display_name. # Note: user_data can be used to kickstart the instance using cloud-init yaml config. - cs_instance: display_name: web-vm-1 template: Linux Debian 7 64-bit service_offering: Tiny ssh_key: [email protected] tags: - key: admin value: john - key: foo value: bar user_data: | #cloud-config packages: - nginx delegate_to: localhost # Create an instance with multiple interfaces specifying the IP addresses - cs_instance: name: web-vm-1 template: Linux Debian 7 64-bit service_offering: Tiny ip_to_networks: - network: NetworkA ip: 10.1.1.1 - network: NetworkB ip: 192.0.2.1 delegate_to: localhost # Ensure an instance is stopped - cs_instance: name: web-vm-1 state: stopped delegate_to: localhost # Ensure an instance is running - cs_instance: name: web-vm-1 state: started delegate_to: localhost # Remove an instance - cs_instance: name: web-vm-1 state: absent delegate_to: localhost ''' RETURN = ''' --- id: description: UUID of the instance. returned: success type: string sample: 04589590-ac63-4ffc-93f5-b698b8ac38b6 name: description: Name of the instance. returned: success type: string sample: web-01 display_name: description: Display name of the instance. returned: success type: string sample: web-01 group: description: Group name of the instance is related. returned: success type: string sample: web created: description: Date of the instance was created. returned: success type: string sample: 2014-12-01T14:57:57+0100 password_enabled: description: True if password setting is enabled. returned: success type: boolean sample: true password: description: The password of the instance if exists. returned: success type: string sample: Ge2oe7Do ssh_key: description: Name of SSH key deployed to instance. returned: success type: string sample: key@work domain: description: Domain the instance is related to. returned: success type: string sample: example domain account: description: Account the instance is related to. returned: success type: string sample: example account project: description: Name of project the instance is related to. returned: success type: string sample: Production default_ip: description: Default IP address of the instance. returned: success type: string sample: 10.23.37.42 public_ip: description: Public IP address with instance via static NAT rule. returned: success type: string sample: 1.2.3.4 iso: description: Name of ISO the instance was deployed with. returned: success type: string sample: Debian-8-64bit template: description: Name of template the instance was deployed with. returned: success type: string sample: Debian-8-64bit service_offering: description: Name of the service offering the instance has. returned: success type: string sample: 2cpu_2gb zone: description: Name of zone the instance is in. returned: success type: string sample: ch-gva-2 state: description: State of the instance. returned: success type: string sample: Running security_groups: description: Security groups the instance is in. returned: success type: list sample: '[ "default" ]' affinity_groups: description: Affinity groups the instance is in. returned: success type: list sample: '[ "webservers" ]' tags: description: List of resource tags associated with the instance. returned: success type: dict sample: '[ { "key": "foo", "value": "bar" } ]' hypervisor: description: Hypervisor related to this instance. returned: success type: string sample: KVM instance_name: description: Internal name of the instance (ROOT admin only). returned: success type: string sample: i-44-3992-VM ''' import base64 # import cloudstack common from ansible.module_utils.cloudstack import * class AnsibleCloudStackInstance(AnsibleCloudStack): def __init__(self, module): super(AnsibleCloudStackInstance, self).__init__(module) self.returns = { 'group': 'group', 'hypervisor': 'hypervisor', 'instancename': 'instance_name', 'publicip': 'public_ip', 'passwordenabled': 'password_enabled', 'password': 'password', 'serviceofferingname': 'service_offering', 'isoname': 'iso', 'templatename': 'template', 'keypair': 'ssh_key', } self.instance = None self.template = None self.iso = None def get_service_offering_id(self): service_offering = self.module.params.get('service_offering') service_offerings = self.cs.listServiceOfferings() if service_offerings: if not service_offering: return service_offerings['serviceoffering'][0]['id'] for s in service_offerings['serviceoffering']: if service_offering in [ s['name'], s['id'] ]: return s['id'] self.module.fail_json(msg="Service offering '%s' not found" % service_offering) def get_template_or_iso(self, key=None): template = self.module.params.get('template') iso = self.module.params.get('iso') if not template and not iso: return None args = {} args['account'] = self.get_account(key='name') args['domainid'] = self.get_domain(key='id') args['projectid'] = self.get_project(key='id') args['zoneid'] = self.get_zone(key='id') args['isrecursive'] = True if template: if self.template: return self._get_by_key(key, self.template) args['templatefilter'] = self.module.params.get('template_filter') templates = self.cs.listTemplates(**args) if templates: for t in templates['template']: if template in [ t['displaytext'], t['name'], t['id'] ]: self.template = t return self._get_by_key(key, self.template) self.module.fail_json(msg="Template '%s' not found" % template) elif iso: if self.iso: return self._get_by_key(key, self.iso) args['isofilter'] = self.module.params.get('template_filter') isos = self.cs.listIsos(**args) if isos: for i in isos['iso']: if iso in [ i['displaytext'], i['name'], i['id'] ]: self.iso = i return self._get_by_key(key, self.iso) self.module.fail_json(msg="ISO '%s' not found" % iso) def get_disk_offering_id(self): disk_offering = self.module.params.get('disk_offering') if not disk_offering: return None disk_offerings = self.cs.listDiskOfferings() if disk_offerings: for d in disk_offerings['diskoffering']: if disk_offering in [ d['displaytext'], d['name'], d['id'] ]: return d['id'] self.module.fail_json(msg="Disk offering '%s' not found" % disk_offering) def get_instance(self): instance = self.instance if not instance: instance_name = self.get_or_fallback('name', 'display_name') vpc_id = self.get_vpc(key='id') args = { 'account': self.get_account(key='name'), 'domainid': self.get_domain(key='id'), 'projectid': self.get_project(key='id'), 'vpcid': vpc_id, } # Do not pass zoneid, as the instance name must be unique across zones. instances = self.cs.listVirtualMachines(**args) if instances: for v in instances['virtualmachine']: # Due the limitation of the API, there is no easy way (yet) to get only those VMs # not belonging to a VPC. if not vpc_id and self.is_vm_in_vpc(vm=v): continue if instance_name.lower() in [ v['name'].lower(), v['displayname'].lower(), v['id'] ]: self.instance = v break return self.instance def get_iptonetwork_mappings(self): network_mappings = self.module.params.get('ip_to_networks') if network_mappings is None: return if network_mappings and self.module.params.get('networks'): self.module.fail_json(msg="networks and ip_to_networks are mutually exclusive.") network_names = [n['network'] for n in network_mappings] ids = self.get_network_ids(network_names) res = [] for i, data in enumerate(network_mappings): res.append({'networkid': ids[i], 'ip': data['ip']}) return res def security_groups_has_changed(self): security_groups = self.module.params.get('security_groups') if security_groups is None: return False security_groups = [s.lower() for s in security_groups] instance_security_groups = self.instance.get('securitygroup',[]) instance_security_group_names = [] for instance_security_group in instance_security_groups: if instance_security_group['name'].lower() not in security_groups: return True else: instance_security_group_names.append(instance_security_group['name'].lower()) for security_group in security_groups: if security_group not in instance_security_group_names: return True return False def get_network_ids(self, network_names=None): if network_names is None: network_names = self.module.params.get('networks') if not network_names: return None args = { 'account': self.get_account(key='name'), 'domainid': self.get_domain(key='id'), 'projectid': self.get_project(key='id'), 'zoneid': self.get_zone(key='id'), 'vpcid': self.get_vpc(key='id'), } networks = self.cs.listNetworks(**args) if not networks: self.module.fail_json(msg="No networks available") network_ids = [] network_displaytexts = [] for network_name in network_names: for n in networks['network']: if network_name in [ n['displaytext'], n['name'], n['id'] ]: network_ids.append(n['id']) network_displaytexts.append(n['name']) break if len(network_ids) != len(network_names): self.module.fail_json(msg="Could not find all networks, networks list found: %s" % network_displaytexts) return network_ids def present_instance(self, start_vm=True): instance = self.get_instance() if not instance: instance = self.deploy_instance(start_vm=start_vm) else: instance = self.recover_instance(instance=instance) instance = self.update_instance(instance=instance, start_vm=start_vm) # In check mode, we do not necessarely have an instance if instance: instance = self.ensure_tags(resource=instance, resource_type='UserVm') # refresh instance data self.instance = instance return instance def get_user_data(self): user_data = self.module.params.get('user_data') if user_data is not None: user_data = base64.b64encode(str(user_data)) return user_data def get_details(self): res = None cpu = self.module.params.get('cpu') cpu_speed = self.module.params.get('cpu_speed') memory = self.module.params.get('memory') if all([cpu, cpu_speed, memory]): res = [{ 'cpuNumber': cpu, 'cpuSpeed': cpu_speed, 'memory': memory, }] return res def deploy_instance(self, start_vm=True): self.result['changed'] = True networkids = self.get_network_ids() if networkids is not None: networkids = ','.join(networkids) args = {} args['templateid'] = self.get_template_or_iso(key='id') if not args['templateid']: self.module.fail_json(msg="Template or ISO is required.") args['zoneid'] = self.get_zone(key='id') args['serviceofferingid'] = self.get_service_offering_id() args['account'] = self.get_account(key='name') args['domainid'] = self.get_domain(key='id') args['projectid'] = self.get_project(key='id') args['diskofferingid'] = self.get_disk_offering_id() args['networkids'] = networkids args['iptonetworklist'] = self.get_iptonetwork_mappings() args['userdata'] = self.get_user_data() args['keyboard'] = self.module.params.get('keyboard') args['ipaddress'] = self.module.params.get('ip_address') args['ip6address'] = self.module.params.get('ip6_address') args['name'] = self.module.params.get('name') args['displayname'] = self.get_or_fallback('display_name', 'name') args['group'] = self.module.params.get('group') args['keypair'] = self.module.params.get('ssh_key') args['size'] = self.module.params.get('disk_size') args['startvm'] = start_vm args['rootdisksize'] = self.module.params.get('root_disk_size') args['affinitygroupnames'] = ','.join(self.module.params.get('affinity_groups')) args['details'] = self.get_details() security_groups = self.module.params.get('security_groups') if security_groups is not None: args['securitygroupnames'] = ','.join(security_groups) template_iso = self.get_template_or_iso() if 'hypervisor' not in template_iso: args['hypervisor'] = self.get_hypervisor() instance = None if not self.module.check_mode: instance = self.cs.deployVirtualMachine(**args) if 'errortext' in instance: self.module.fail_json(msg="Failed: '%s'" % instance['errortext']) poll_async = self.module.params.get('poll_async') if poll_async: instance = self.poll_job(instance, 'virtualmachine') return instance def update_instance(self, instance, start_vm=True): # Service offering data args_service_offering = {} args_service_offering['id'] = instance['id'] if self.module.params.get('service_offering'): args_service_offering['serviceofferingid'] = self.get_service_offering_id() service_offering_changed = self.has_changed(args_service_offering, instance) # Instance data args_instance_update = {} args_instance_update['id'] = instance['id'] args_instance_update['userdata'] = self.get_user_data() args_instance_update['ostypeid'] = self.get_os_type(key='id') if self.module.params.get('group'): args_instance_update['group'] = self.module.params.get('group') if self.module.params.get('display_name'): args_instance_update['displayname'] = self.module.params.get('display_name') instance_changed = self.has_changed(args_instance_update, instance) # SSH key data args_ssh_key = {} args_ssh_key['id'] = instance['id'] args_ssh_key['projectid'] = self.get_project(key='id') if self.module.params.get('ssh_key'): args_ssh_key['keypair'] = self.module.params.get('ssh_key') ssh_key_changed = self.has_changed(args_ssh_key, instance) security_groups_changed = self.security_groups_has_changed() changed = [ service_offering_changed, instance_changed, security_groups_changed, ssh_key_changed, ] if True in changed: force = self.module.params.get('force') instance_state = instance['state'].lower() if instance_state == 'stopped' or force: self.result['changed'] = True if not self.module.check_mode: # Ensure VM has stopped instance = self.stop_instance() instance = self.poll_job(instance, 'virtualmachine') self.instance = instance # Change service offering if service_offering_changed: res = self.cs.changeServiceForVirtualMachine(**args_service_offering) if 'errortext' in res: self.module.fail_json(msg="Failed: '%s'" % res['errortext']) instance = res['virtualmachine'] self.instance = instance # Update VM if instance_changed or security_groups_changed: if security_groups_changed: args_instance_update['securitygroupnames'] = ','.join(self.module.params.get('security_groups')) res = self.cs.updateVirtualMachine(**args_instance_update) if 'errortext' in res: self.module.fail_json(msg="Failed: '%s'" % res['errortext']) instance = res['virtualmachine'] self.instance = instance # Reset SSH key if ssh_key_changed: instance = self.cs.resetSSHKeyForVirtualMachine(**args_ssh_key) if 'errortext' in instance: self.module.fail_json(msg="Failed: '%s'" % instance['errortext']) instance = self.poll_job(instance, 'virtualmachine') self.instance = instance # Start VM again if it was running before if instance_state == 'running' and start_vm: instance = self.start_instance() return instance def recover_instance(self, instance): if instance['state'].lower() in [ 'destroying', 'destroyed' ]: self.result['changed'] = True if not self.module.check_mode: res = self.cs.recoverVirtualMachine(id=instance['id']) if 'errortext' in res: self.module.fail_json(msg="Failed: '%s'" % res['errortext']) instance = res['virtualmachine'] return instance def absent_instance(self): instance = self.get_instance() if instance: if instance['state'].lower() not in ['expunging', 'destroying', 'destroyed']: self.result['changed'] = True if not self.module.check_mode: res = self.cs.destroyVirtualMachine(id=instance['id']) if 'errortext' in res: self.module.fail_json(msg="Failed: '%s'" % res['errortext']) poll_async = self.module.params.get('poll_async') if poll_async: instance = self.poll_job(res, 'virtualmachine') return instance def expunge_instance(self): instance = self.get_instance() if instance: res = {} if instance['state'].lower() in [ 'destroying', 'destroyed' ]: self.result['changed'] = True if not self.module.check_mode: res = self.cs.destroyVirtualMachine(id=instance['id'], expunge=True) elif instance['state'].lower() not in [ 'expunging' ]: self.result['changed'] = True if not self.module.check_mode: res = self.cs.destroyVirtualMachine(id=instance['id'], expunge=True) if res and 'errortext' in res: self.module.fail_json(msg="Failed: '%s'" % res['errortext']) poll_async = self.module.params.get('poll_async') if poll_async: res = self.poll_job(res, 'virtualmachine') return instance def stop_instance(self): instance = self.get_instance() # in check mode intance may not be instanciated if instance: if instance['state'].lower() in ['stopping', 'stopped']: return instance if instance['state'].lower() in ['starting', 'running']: self.result['changed'] = True if not self.module.check_mode: instance = self.cs.stopVirtualMachine(id=instance['id']) if 'errortext' in instance: self.module.fail_json(msg="Failed: '%s'" % instance['errortext']) poll_async = self.module.params.get('poll_async') if poll_async: instance = self.poll_job(instance, 'virtualmachine') return instance def start_instance(self): instance = self.get_instance() # in check mode intance may not be instanciated if instance: if instance['state'].lower() in ['starting', 'running']: return instance if instance['state'].lower() in ['stopped', 'stopping']: self.result['changed'] = True if not self.module.check_mode: instance = self.cs.startVirtualMachine(id=instance['id']) if 'errortext' in instance: self.module.fail_json(msg="Failed: '%s'" % instance['errortext']) poll_async = self.module.params.get('poll_async') if poll_async: instance = self.poll_job(instance, 'virtualmachine') return instance def restart_instance(self): instance = self.get_instance() # in check mode intance may not be instanciated if instance: if instance['state'].lower() in [ 'running', 'starting' ]: self.result['changed'] = True if not self.module.check_mode: instance = self.cs.rebootVirtualMachine(id=instance['id']) if 'errortext' in instance: self.module.fail_json(msg="Failed: '%s'" % instance['errortext']) poll_async = self.module.params.get('poll_async') if poll_async: instance = self.poll_job(instance, 'virtualmachine') elif instance['state'].lower() in [ 'stopping', 'stopped' ]: instance = self.start_instance() return instance def restore_instance(self): instance = self.get_instance() self.result['changed'] = True # in check mode intance may not be instanciated if instance: args = {} args['templateid'] = self.get_template_or_iso(key='id') args['virtualmachineid'] = instance['id'] res = self.cs.restoreVirtualMachine(**args) if 'errortext' in res: self.module.fail_json(msg="Failed: '%s'" % res['errortext']) poll_async = self.module.params.get('poll_async') if poll_async: instance = self.poll_job(res, 'virtualmachine') return instance def get_result(self, instance): super(AnsibleCloudStackInstance, self).get_result(instance) if instance: if 'securitygroup' in instance: security_groups = [] for securitygroup in instance['securitygroup']: security_groups.append(securitygroup['name']) self.result['security_groups'] = security_groups if 'affinitygroup' in instance: affinity_groups = [] for affinitygroup in instance['affinitygroup']: affinity_groups.append(affinitygroup['name']) self.result['affinity_groups'] = affinity_groups if 'nic' in instance: for nic in instance['nic']: if nic['isdefault'] and 'ipaddress' in nic: self.result['default_ip'] = nic['ipaddress'] return self.result def main(): argument_spec = cs_argument_spec() argument_spec.update(dict( name = dict(default=None), display_name = dict(default=None), group = dict(default=None), state = dict(choices=['present', 'deployed', 'started', 'stopped', 'restarted', 'restored', 'absent', 'destroyed', 'expunged'], default='present'), service_offering = dict(default=None), cpu = dict(default=None, type='int'), cpu_speed = dict(default=None, type='int'), memory = dict(default=None, type='int'), template = dict(default=None), iso = dict(default=None), template_filter = dict(default="executable", aliases=['iso_filter'], choices=['featured', 'self', 'selfexecutable', 'sharedexecutable', 'executable', 'community']), networks = dict(type='list', aliases=[ 'network' ], default=None), ip_to_networks = dict(type='list', aliases=['ip_to_network'], default=None), ip_address = dict(defaul=None), ip6_address = dict(defaul=None), disk_offering = dict(default=None), disk_size = dict(type='int', default=None), root_disk_size = dict(type='int', default=None), keyboard = dict(choices=['de', 'de-ch', 'es', 'fi', 'fr', 'fr-be', 'fr-ch', 'is', 'it', 'jp', 'nl-be', 'no', 'pt', 'uk', 'us'], default=None), hypervisor = dict(choices=CS_HYPERVISORS, default=None), security_groups = dict(type='list', aliases=[ 'security_group' ], default=None), affinity_groups = dict(type='list', aliases=[ 'affinity_group' ], default=[]), domain = dict(default=None), account = dict(default=None), project = dict(default=None), user_data = dict(default=None), zone = dict(default=None), ssh_key = dict(default=None), force = dict(type='bool', default=False), tags = dict(type='list', aliases=[ 'tag' ], default=None), vpc = dict(default=None), poll_async = dict(type='bool', default=True), )) required_together = cs_required_together() required_together.extend([ ['cpu', 'cpu_speed', 'memory'], ]) module = AnsibleModule( argument_spec=argument_spec, required_together=required_together, required_one_of = ( ['display_name', 'name'], ), mutually_exclusive = ( ['template', 'iso'], ), supports_check_mode=True ) try: acs_instance = AnsibleCloudStackInstance(module) state = module.params.get('state') if state in ['absent', 'destroyed']: instance = acs_instance.absent_instance() elif state in ['expunged']: instance = acs_instance.expunge_instance() elif state in ['restored']: acs_instance.present_instance() instance = acs_instance.restore_instance() elif state in ['present', 'deployed']: instance = acs_instance.present_instance() elif state in ['stopped']: acs_instance.present_instance(start_vm=False) instance = acs_instance.stop_instance() elif state in ['started']: acs_instance.present_instance() instance = acs_instance.start_instance() elif state in ['restarted']: acs_instance.present_instance() instance = acs_instance.restart_instance() if instance and 'state' in instance and instance['state'].lower() == 'error': module.fail_json(msg="Instance named '%s' in error state." % module.params.get('name')) result = acs_instance.get_result(instance) except CloudStackException as e: module.fail_json(msg='CloudStackException: %s' % str(e)) module.exit_json(**result) # import module snippets from ansible.module_utils.basic import * if __name__ == '__main__': main()
{ "pile_set_name": "Github" }
<?xml version="1.0"?> <!-- Copyright (C) 2006, 2007 Anton Yuzhaninov Copyright (C) Nginx, Inc. --> <!DOCTYPE module SYSTEM "../../../../dtd/module.dtd"> <module name="Module ngx_mail_imap_module" link="/en/docs/mail/ngx_mail_imap_module.html" lang="en" rev="1"> <section id="directives" name="Directives"> <directive name="imap_auth"> <syntax><value>method</value> ...</syntax> <default>plain</default> <context>mail</context> <context>server</context> <para> Sets permitted methods of authentication for IMAP clients. Supported methods are: <list type="tag"> <tag-name><literal>login</literal></tag-name> <tag-desc> <link url="http://tools.ietf.org/html/draft-murchison-sasl-login-00">AUTH=LOGIN</link> </tag-desc> <tag-name><literal>plain</literal></tag-name> <tag-desc> <link url="http://tools.ietf.org/html/rfc4616">AUTH=PLAIN</link> </tag-desc> <tag-name><literal>cram-md5</literal></tag-name> <tag-desc> <link url="http://tools.ietf.org/html/rfc2195">AUTH=CRAM-MD5</link>. In order for this method to work, the password must be stored unencrypted. </tag-desc> </list> </para> </directive> <directive name="imap_capabilities"> <syntax><value>extension</value> ...</syntax> <default>IMAP4 IMAP4rev1 UIDPLUS</default> <context>mail</context> <context>server</context> <para> Allows to specify the <link url="http://tools.ietf.org/html/rfc3501">IMAP protocol</link> extensions list to be passed to the client upon issuing the <literal>CAPABILITY</literal> command. Authentication methods specified in the <link id="imap_auth"/> and <link url="http://tools.ietf.org/html/rfc2595">STARTTLS</link> directives are automatically added to this list if the <link doc="ngx_mail_ssl_module.xml" id="starttls"/> directive is enabled. </para> <para> It makes sense to specify extensions supported by IMAP backends to which clients are proxied (if this extensions are related to commands used after the authentication, when nginx transparently proxies the client connection to the backend). </para> <para> The current list of standardized extensions is published at the <link url="http://www.iana.org/assignments/imap4-capabilities">www.iana.org</link>. </para> </directive> <directive name="imap_client_buffer"> <syntax><value>size</value></syntax> <default>4k|8k</default> <context>mail</context> <context>server</context> <para> Sets the IMAP commands read buffer size. By default, the buffer size is equal to one memory page. This is either 4K or 8K, depending on a platform. </para> </directive> </section> </module>
{ "pile_set_name": "Github" }
struct acpi_smb_hc; enum acpi_smb_protocol { SMBUS_WRITE_QUICK = 2, SMBUS_READ_QUICK = 3, SMBUS_SEND_BYTE = 4, SMBUS_RECEIVE_BYTE = 5, SMBUS_WRITE_BYTE = 6, SMBUS_READ_BYTE = 7, SMBUS_WRITE_WORD = 8, SMBUS_READ_WORD = 9, SMBUS_WRITE_BLOCK = 0xa, SMBUS_READ_BLOCK = 0xb, SMBUS_PROCESS_CALL = 0xc, SMBUS_BLOCK_PROCESS_CALL = 0xd, }; static const u8 SMBUS_PEC = 0x80; enum acpi_sbs_device_addr { ACPI_SBS_CHARGER = 0x9, ACPI_SBS_MANAGER = 0xa, ACPI_SBS_BATTERY = 0xb, }; typedef void (*smbus_alarm_callback)(void *context); extern int acpi_smbus_read(struct acpi_smb_hc *hc, u8 protocol, u8 address, u8 command, u8 * data); extern int acpi_smbus_write(struct acpi_smb_hc *hc, u8 protocol, u8 slave_address, u8 command, u8 * data, u8 length); extern int acpi_smbus_register_callback(struct acpi_smb_hc *hc, smbus_alarm_callback callback, void *context); extern int acpi_smbus_unregister_callback(struct acpi_smb_hc *hc);
{ "pile_set_name": "Github" }
'use strict' require('./check-versions')() process.env.NODE_ENV = 'production' const ora = require('ora') const rm = require('rimraf') const path = require('path') const chalk = require('chalk') const webpack = require('webpack') const config = require('../config') const webpackConfig = require('./webpack.prod.conf') const spinner = ora('building for production...') spinner.start() rm(path.join(config.build.assetsRoot, config.build.assetsSubDirectory), err => { if (err) throw err webpack(webpackConfig, (err, stats) => { spinner.stop() if (err) throw err process.stdout.write(stats.toString({ colors: true, modules: false, children: false, // If you are using ts-loader, setting this to true will make TypeScript errors show up during build. chunks: false, chunkModules: false }) + '\n\n') if (stats.hasErrors()) { console.log(chalk.red(' Build failed with errors.\n')) process.exit(1) } console.log(chalk.cyan(' Build complete.\n')) console.log(chalk.yellow( ' Tip: built files are meant to be served over an HTTP server.\n' + ' Opening index.html over file:// won\'t work.\n' )) }) })
{ "pile_set_name": "Github" }
// Trees NEW // Gum tree material gum_branch { parent base diffuseMap gum_branch.png ambient 0.59 0.59 0.59 diffuse 0.59 0.59 0.59 alpha_rejection greater 128 cull_hardware none transparent true tree_wind true } material gum_bark { parent base diffuseMap gum_bark.png ambient 0.8 0.8 0.8 diffuse 0.64 0.64 0.64 specular 0.12 0.12 0.12 32 tree_wind true } // Palms material palm2 { parent base diffuseMap palm2_diffuse.png normalMap palm2_normal.png specMap palm2_specular.png alpha_rejection greater 128 specular 0.5 0.5 0.5 32 cull_hardware none tree_wind true transparent true receives_shadows false } material Palm { parent base diffuseMap palm.png normalMap palm_norm.png alpha_rejection greater 79 ambient 1.2 1.2 1.2 diffuse 0.4 0.4 0.4 specular 0.3 0.3 0.3 21 tree_wind true transparent true receives_shadows false } // Jungle tree material jungle_tree { parent base transparent true tree_wind true terrain_light_map true alpha_rejection greater 128 diffuseMap jungle_tree.png normalMap jungle_tree_normal.png //specMap jungle_tree_spec.png specular 0.4 0.4 0.4 32 cull_hardware none receives_shadows false } // Plants, ferns //------------------------------------------------------------------ material fern_v2 { parent base transparent true tree_wind true terrain_light_map true alpha_rejection greater 128 diffuseMap fern.png normalMap fern_normal.png specMap fern_spec.png specular 0.5 0.5 0.5 32 cull_hardware none receives_shadows false } material fern2 { parent base transparent true tree_wind true terrain_light_map true alpha_rejection greater 128 diffuseMap fern2.png normalMap fern2_normal.png specMap fern2_spec.png specular 0.5 0.5 0.5 32 cull_hardware none receives_shadows false } material palm_plant { parent base transparent true tree_wind true terrain_light_map true alpha_rejection greater 128 diffuseMap palm_plant.png normalMap palm_plant_normal.png specMap palm_plant_spec.png ambient 1.5 1.5 1.5 diffuse 1.5 1.5 1.5 specular 0.5 0.5 0.5 32 cull_hardware none //receives_shadows false twoside_diffuse true } // bushes //------------------------------------------------------------------ material plant_tropical { parent base transparent true tree_wind true terrain_light_map true alpha_rejection greater 128 diffuseMap plant_tropical_m.png normalMap plant_tropical_norm_m.png //specMap palm_plant_spec.png ambient 1.5 1.5 1.5 specular 0.57 0.6 0.5 24 cull_hardware none //receives_shadows false twoside_diffuse true } // cactus material cactus { parent base transparent true tree_wind false terrain_light_map true alpha_rejection greater 128 diffuseMap cactus.png normalMap cactus_norm.png ambient 1.4 1.4 1.4 diffuse 1.0 1.0 1.0 specular 0.2 0.2 0.2 32 } // Pine //------------------------------------------------------------------ material pine_base { parent base transparent true tree_wind true terrain_light_map true alpha_rejection greater 128 diffuseMap white.png //normalMap pine_half_norm.png ambient 1.2 1.2 1.2 diffuse 0.8 0.8 0.8 specular 0.8 0.8 0.8 24 cull_hardware none //twoside_diffuse true shadowBias 0.0001 } material pine1full { parent pine_base diffuseMap pine_full.png normalMap pine_full_norm.png ambient 1.2 1.22 1.22 diffuse 0.8 0.82 0.82 specular 1.0 1.0 1.0 32 } material pine2full { parent pine_base diffuseMap pine_full.png normalMap pine_full_norm.png ambient 1.2 1.21 1.21 diffuse 0.8 0.81 0.81 specular 1.0 1.0 1.0 16 } material pine3full { parent pine_base diffuseMap pine_full.png normalMap pine_full_norm.png ambient 1.2 1.21 1.21 diffuse 0.8 0.81 0.81 specular 1.0 1.0 1.0 24 } material pine1half { parent pine_base diffuseMap pine_half.png normalMap pine_half_norm.png specular 0.9 0.9 0.9 16 } material pine2half { parent pine_base diffuseMap pine_half.png normalMap pine_half_norm.png specular 0.9 0.9 0.9 16 } material pine3half { parent pine_base diffuseMap pine_half.png normalMap pine_half_norm.png specular 0.9 0.9 0.9 16 } material pine1norm { parent pine_base diffuseMap pine_norm_gr1.png normalMap pine_norm_norm.jpg ambient 0.85 0.95 0.95 diffuse 0.6 0.65 0.6 specular 0.2 0.2 0.2 8 } material pine2norm { parent pine_base diffuseMap pine_norm_gr1.png normalMap pine_norm_norm.jpg ambient 1.0 1.0 1.0 diffuse 0.6 0.6 0.6 specular 0.3 0.3 0.3 8 } material pine3norm { parent pine_base diffuseMap pine_norm_gr2.png normalMap pine_norm_norm.jpg ambient 0.9 0.9 0.9 diffuse 0.5 0.5 0.5 specular 0.3 0.3 0.3 16 } // Bushes //------------------------------------------------------------------ material bush_05 { parent base transparent true tree_wind true terrain_light_map true alpha_rejection greater 128 diffuseMap bush_05.png //normalMap plant_tropical_norm_m.png //specMap palm_plant_spec.png ambient 1.0 1.0 1.0 diffuse 1.0 1.0 1.0 specular 0.57 0.6 0.5 24 transparent true cull_hardware none //receives_shadows false twoside_diffuse true shadowBias 0.0001 } // Rocks //------------------------------------------------------------------ material rock1 { parent base //parallax true //parallaxHeight 0.035 diffuseMap rock1.png normalMap rock1_normal.png terrain_light_map true specular 0.5 0.5 0.5 32 } // new material rock_y2 // greece yellow { parent base diffuseMap rock_n2-GYel.jpg normalMap rock_n2_norm.png terrain_light_map true specular 0.4 0.4 0.35 32 } material rock_n1 { parent base diffuseMap rock_n1.jpg normalMap rock_n1_norm.png terrain_light_map true specular 0.5 0.5 0.5 32 } material rock_w1 //w { parent base diffuseMap rock_n1-W.jpg normalMap rock_n1_norm.png terrain_light_map true ambient 1.0 1.02 1.02 diffuse 1.0 1.02 1.02 specular 0.8 0.8 0.8 32 } material rocks_n1 { parent base diffuseMap rock_n1.jpg normalMap rock_n1_norm.png terrain_light_map true specular 0.5 0.5 0.5 32 } material rock_n2 { parent base diffuseMap rock_n2.jpg normalMap rock_n2_norm.png terrain_light_map true specular 0.5 0.5 0.5 32 } material rock_n3 { parent base diffuseMap rock_n3.jpg normalMap rock_n3_norm.png terrain_light_map true specular 0.5 0.5 0.5 32 } material rock_n4 { parent base diffuseMap rock_n4.jpg normalMap rock_n4_norm.png terrain_light_map true specular 0.5 0.5 0.5 32 } material rock_w4 //w { parent base diffuseMap rock_n4-W.jpg normalMap rock_n4_norm.png terrain_light_map true ambient 1.0 1.02 1.02 diffuse 1.0 1.02 1.02 specular 0.9 0.9 0.9 24 } material rock_n5 { parent base diffuseMap rock_n5.jpg normalMap rock_n5_norm.png terrain_light_map true specular 0.5 0.5 0.5 32 } material rock_w5v //w { parent base diffuseMap rock_n5v-W.jpg normalMap rock_n5_norm.png terrain_light_map true ambient 1.0 1.02 1.02 diffuse 1.0 1.02 1.02 specular 0.9 0.9 0.9 24 } // Mars Rocks //------------------------------------------------------------------ material rock_R2 { parent base diffuseMap rock_R2.jpg normalMap rock_n2_norm.png terrain_light_map true ambient 1.3 1.0 1.0 diffuse 1.0 0.8 0.7 //specular 1.5 0.5 0.5 32 } material rock_R3 { parent base diffuseMap rock_R3.jpg normalMap rock_n3_norm.png terrain_light_map true ambient 1.3 1.0 1.0 diffuse 1.0 0.8 0.7 //specular 1.5 0.5 0.5 32 } material rock_R4 { parent base diffuseMap rock_R4.jpg normalMap rock_n4_norm.png terrain_light_map true ambient 1.3 1.0 1.0 diffuse 1.0 0.8 0.7 //specular 1.5 0.5 0.5 32 } material rock_r4 { parent base diffuseMap rock_R4.jpg normalMap rock_n4_norm.png terrain_light_map true ambient 1.3 1.0 1.0 diffuse 1.0 0.8 0.7 //specular 1.5 0.5 0.5 32 } material rock_R5 { parent base diffuseMap rock_R5.jpg normalMap rock_n5_norm.png terrain_light_map true ambient 1.3 1.0 1.0 diffuse 1.0 0.8 0.7 //specular 1.5 0.5 0.5 32 } material rock_R5f { parent base diffuseMap rock_R5.jpg normalMap rock_n5_norm.png terrain_light_map true ambient 1.3 1.1 1.0 diffuse 1.0 0.8 0.7 //specular 0.9 0.9 0.9 24 } // Crystals //------------------------------------------------------------------ material crystal1 { parent base diffuseMap crystal_form3.jpg normalMap crystal_form3_n.jpg ambient 0.1 0.3 0.3 diffuse 0.3 0.6 0.6 specular 0.05 0.2 0.25 64 transparent true scene_blend add //scene_blend alpha_blend //cull_hardware none depth_write off depth_check off twoside_diffuse true tree_wind false terrain_light_map true receives_shadows false } material crystal6 { parent base diffuseMap crystal_form2.jpg normalMap crystal_form2_n.jpg //ambient 1 1 1 ambient 0.08 0.1 0.12 diffuse 0.22 0.25 0.3 specular 0.2 0.2 0.3 64 transparent true scene_blend add //scene_blend alpha_blend cull_hardware none depth_write off depth_check off twoside_diffuse true tree_wind false terrain_light_map true receives_shadows false } // - material crystal_cube4 { parent base diffuseMap crystal_form3.jpg normalMap crystal_form3_n.jpg ambient 0.34 0.80 0.78 diffuse 0.20 0.90 0.85 specular 0.5 0.95 1.0 128 tree_wind false terrain_light_map true twoside_diffuse true //receives_shadows false shadowBias 0.0001 bump_scale 3 } material crystal_form1 { parent base diffuseMap crystal_form1.jpg normalMap crystal_form1_n.jpg ambient 0.75 0.85 0.9 diffuse 0.6 0.80 0.90 specular 0.3 0.4 1.0 20 env_map true envMap ReflectionCube refl_amount 0.2 tree_wind false terrain_light_map true bump_scale 1 } material crystalForm4 { parent base diffuseMap crystal_form2.jpg normalMap crystal_form2_n.jpg ambient 0.9 0.9 0.9 diffuse 0.85 0.8 0.85 specular 0.05 0.4 0.4 64 tree_wind false terrain_light_map true bump_scale 2 } material crystal_form3 { parent base diffuseMap crystal_form3.jpg normalMap crystal_form3_n.jpg ambient 0.8 0.85 0.9 diffuse 0.8 0.85 0.9 specular 0.3 0.35 0.4 32 env_map true envMap ReflectionCube refl_amount 0.2 tree_wind false terrain_light_map true bump_scale 3 } // Alien //------------------------------------------------------------------ material gum_branch_alien { parent base diffuseMap gum_branch_alien.png ambient 1.0 1.2 0.9 diffuse 1.0 1.2 0.9 specular 0.12 0.12 0.12 32 alpha_rejection greater 128 cull_hardware none transparent true tree_wind true //receives_shadows false } material gum_bark_alien { parent base diffuseMap gum_bark_alien.png ambient 0.8 0.8 0.8 diffuse 0.64 0.64 0.64 specular 0.12 0.12 0.12 32 tree_wind true } material jungle_tree_alien { parent base diffuseMap jungle_tree_alien.png normalMap jungle_tree_normal.png //specMap jungle_tree_spec.png ambient 0.6 0.7 0.6 diffuse 0.6 0.72 0.7 specular 0.4 0.4 0.4 32 alpha_rejection greater 128 transparent true tree_wind true terrain_light_map true cull_hardware none receives_shadows false } material rock_L2 { parent base diffuseMap rock_L2.jpg normalMap rock_n2_norm.png terrain_light_map true ambient 1.0 1.0 1.0 diffuse 0.7 0.7 0.8 specular 0.3 0.5 0.2 32 } material rock_L3 { parent base diffuseMap rock_L3.jpg normalMap rock_n3_norm.png terrain_light_map true ambient 1.0 1.0 1.0 diffuse 0.7 0.7 0.8 //specular 0.5 0.5 1.0 32 } material rock_L4 { parent base diffuseMap rock_L4.jpg normalMap rock_n4_norm.png terrain_light_map true ambient 1.0 1.0 1.0 diffuse 0.7 0.7 0.8 //specular 0.5 0.5 1.0 32 } material shroom1_alien { parent base diffuseMap shroom_alien.png normalMap shroom2_norm.png terrain_light_map true ambient 1.0 1.0 1.0 diffuse 0.7 0.7 0.8 //specular 0.5 0.5 1.0 32 } material shroom2_alien { parent base diffuseMap shroom_alien.png normalMap shroom2_norm.png terrain_light_map true ambient 1.0 1.0 1.0 diffuse 0.7 0.7 0.8 //specular 0.5 0.5 1.0 32 } material shroom3_alien { parent base diffuseMap shroom_alien.png normalMap shroom2_norm.png terrain_light_map true ambient 1.0 1.0 1.0 diffuse 0.7 0.7 0.8 //specular 0.5 0.5 1.0 32 } // Black Desert //------------------------------------------------------------------ material fern2_black { parent base transparent true tree_wind true terrain_light_map true alpha_rejection greater 128 diffuseMap fern2.png normalMap fern2_normal.png specMap fern2_spec.png specular 0.3 0.5 0.7 32 emissive 0.1 0.25 2.5 cull_hardware none receives_shadows false } material gum_branch_black { parent base diffuseMap gum_branch_alien.png ambient 0.9 0.9 0.9 diffuse 0.9 0.9 0.9 specular 0.12 0.16 0.2 32 emissive 0.08 0.15 0.8 alpha_rejection greater 128 cull_hardware none transparent true tree_wind true //receives_shadows false }
{ "pile_set_name": "Github" }
# 点赞歌曲 ## 接口地址 /api/v1/music/{music_id}/digg ## 请求方法 POST ### HTTP Status Code 201 ## 返回体 ```json5 { "status": true, "code": 0, "message": "点赞成功", "data": null } ```
{ "pile_set_name": "Github" }
using System; namespace ExpressMapper.Tests.Model.ViewModels { public class OrganizationViewModel : ContactViewModel, IEquatable<PersonViewModel> { public OrganizationViewModel() { IsOrganization = true; IsPerson = false; } public string Name { get; set; } public OrganizationViewModel Relative { get; set; } public bool Equals(PersonViewModel other) { return ContactEquals(other) && Name == other.Name && (Relative == null || Relative.Equals(other.Relative)); } } }
{ "pile_set_name": "Github" }
From e6735556ed0a5e791ea81a015a90c130a0eea060 Mon Sep 17 00:00:00 2001 From: Xi Wang <[email protected]> Date: Wed, 20 Feb 2013 12:45:45 -0500 Subject: [PATCH] nice: fix overflow checking in int_add_no_wrap() In C, signed integer overflow is undefined behavior. Many compilers optimize away checks like `a + b < a'. Use safe precondition testing instead. Signed-off-by: Xi Wang <[email protected]> Signed-off-by: Bernhard Reutner-Fischer <[email protected]> --- libc/sysdeps/linux/common/nice.c | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/libc/sysdeps/linux/common/nice.c b/libc/sysdeps/linux/common/nice.c index 3694db8..ed39946 100644 --- a/libc/sysdeps/linux/common/nice.c +++ b/libc/sysdeps/linux/common/nice.c @@ -25,15 +25,15 @@ static __inline__ _syscall1(int, __syscall_nice, int, incr) static __inline__ int int_add_no_wrap(int a, int b) { - int s = a + b; - if (b < 0) { - if (s > a) s = INT_MIN; + if (a < INT_MIN - b) + return INT_MIN; } else { - if (s < a) s = INT_MAX; + if (a > INT_MAX - b) + return INT_MAX; } - return s; + return a + b; } static __inline__ int __syscall_nice(int incr) -- 1.7.10.4
{ "pile_set_name": "Github" }
package afero import ( "io" "os" "path/filepath" "syscall" ) // The UnionFile implements the afero.File interface and will be returned // when reading a directory present at least in the overlay or opening a file // for writing. // // The calls to // Readdir() and Readdirnames() merge the file os.FileInfo / names from the // base and the overlay - for files present in both layers, only those // from the overlay will be used. // // When opening files for writing (Create() / OpenFile() with the right flags) // the operations will be done in both layers, starting with the overlay. A // successful read in the overlay will move the cursor position in the base layer // by the number of bytes read. type UnionFile struct { Base File Layer File Merger DirsMerger off int files []os.FileInfo } func (f *UnionFile) Close() error { // first close base, so we have a newer timestamp in the overlay. If we'd close // the overlay first, we'd get a cacheStale the next time we access this file // -> cache would be useless ;-) if f.Base != nil { f.Base.Close() } if f.Layer != nil { return f.Layer.Close() } return BADFD } func (f *UnionFile) Read(s []byte) (int, error) { if f.Layer != nil { n, err := f.Layer.Read(s) if (err == nil || err == io.EOF) && f.Base != nil { // advance the file position also in the base file, the next // call may be a write at this position (or a seek with SEEK_CUR) if _, seekErr := f.Base.Seek(int64(n), os.SEEK_CUR); seekErr != nil { // only overwrite err in case the seek fails: we need to // report an eventual io.EOF to the caller err = seekErr } } return n, err } if f.Base != nil { return f.Base.Read(s) } return 0, BADFD } func (f *UnionFile) ReadAt(s []byte, o int64) (int, error) { if f.Layer != nil { n, err := f.Layer.ReadAt(s, o) if (err == nil || err == io.EOF) && f.Base != nil { _, err = f.Base.Seek(o+int64(n), os.SEEK_SET) } return n, err } if f.Base != nil { return f.Base.ReadAt(s, o) } return 0, BADFD } func (f *UnionFile) Seek(o int64, w int) (pos int64, err error) { if f.Layer != nil { pos, err = f.Layer.Seek(o, w) if (err == nil || err == io.EOF) && f.Base != nil { _, err = f.Base.Seek(o, w) } return pos, err } if f.Base != nil { return f.Base.Seek(o, w) } return 0, BADFD } func (f *UnionFile) Write(s []byte) (n int, err error) { if f.Layer != nil { n, err = f.Layer.Write(s) if err == nil && f.Base != nil { // hmm, do we have fixed size files where a write may hit the EOF mark? _, err = f.Base.Write(s) } return n, err } if f.Base != nil { return f.Base.Write(s) } return 0, BADFD } func (f *UnionFile) WriteAt(s []byte, o int64) (n int, err error) { if f.Layer != nil { n, err = f.Layer.WriteAt(s, o) if err == nil && f.Base != nil { _, err = f.Base.WriteAt(s, o) } return n, err } if f.Base != nil { return f.Base.WriteAt(s, o) } return 0, BADFD } func (f *UnionFile) Name() string { if f.Layer != nil { return f.Layer.Name() } return f.Base.Name() } // DirsMerger is how UnionFile weaves two directories together. // It takes the FileInfo slices from the layer and the base and returns a // single view. type DirsMerger func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error) var defaultUnionMergeDirsFn = func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error) { var files = make(map[string]os.FileInfo) for _, fi := range lofi { files[fi.Name()] = fi } for _, fi := range bofi { if _, exists := files[fi.Name()]; !exists { files[fi.Name()] = fi } } rfi := make([]os.FileInfo, len(files)) i := 0 for _, fi := range files { rfi[i] = fi i++ } return rfi, nil } // Readdir will weave the two directories together and // return a single view of the overlayed directories. // At the end of the directory view, the error is io.EOF if c > 0. func (f *UnionFile) Readdir(c int) (ofi []os.FileInfo, err error) { var merge DirsMerger = f.Merger if merge == nil { merge = defaultUnionMergeDirsFn } if f.off == 0 { var lfi []os.FileInfo if f.Layer != nil { lfi, err = f.Layer.Readdir(-1) if err != nil { return nil, err } } var bfi []os.FileInfo if f.Base != nil { bfi, err = f.Base.Readdir(-1) if err != nil { return nil, err } } merged, err := merge(lfi, bfi) if err != nil { return nil, err } f.files = append(f.files, merged...) } if c <= 0 && len(f.files) == 0 { return f.files, nil } if f.off >= len(f.files) { return nil, io.EOF } if c <= 0 { return f.files[f.off:], nil } if c > len(f.files) { c = len(f.files) } defer func() { f.off += c }() return f.files[f.off:c], nil } func (f *UnionFile) Readdirnames(c int) ([]string, error) { rfi, err := f.Readdir(c) if err != nil { return nil, err } var names []string for _, fi := range rfi { names = append(names, fi.Name()) } return names, nil } func (f *UnionFile) Stat() (os.FileInfo, error) { if f.Layer != nil { return f.Layer.Stat() } if f.Base != nil { return f.Base.Stat() } return nil, BADFD } func (f *UnionFile) Sync() (err error) { if f.Layer != nil { err = f.Layer.Sync() if err == nil && f.Base != nil { err = f.Base.Sync() } return err } if f.Base != nil { return f.Base.Sync() } return BADFD } func (f *UnionFile) Truncate(s int64) (err error) { if f.Layer != nil { err = f.Layer.Truncate(s) if err == nil && f.Base != nil { err = f.Base.Truncate(s) } return err } if f.Base != nil { return f.Base.Truncate(s) } return BADFD } func (f *UnionFile) WriteString(s string) (n int, err error) { if f.Layer != nil { n, err = f.Layer.WriteString(s) if err == nil && f.Base != nil { _, err = f.Base.WriteString(s) } return n, err } if f.Base != nil { return f.Base.WriteString(s) } return 0, BADFD } func copyToLayer(base Fs, layer Fs, name string) error { bfh, err := base.Open(name) if err != nil { return err } defer bfh.Close() // First make sure the directory exists exists, err := Exists(layer, filepath.Dir(name)) if err != nil { return err } if !exists { err = layer.MkdirAll(filepath.Dir(name), 0777) // FIXME? if err != nil { return err } } // Create the file on the overlay lfh, err := layer.Create(name) if err != nil { return err } n, err := io.Copy(lfh, bfh) if err != nil { // If anything fails, clean up the file layer.Remove(name) lfh.Close() return err } bfi, err := bfh.Stat() if err != nil || bfi.Size() != n { layer.Remove(name) lfh.Close() return syscall.EIO } err = lfh.Close() if err != nil { layer.Remove(name) lfh.Close() return err } return layer.Chtimes(name, bfi.ModTime(), bfi.ModTime()) }
{ "pile_set_name": "Github" }
/** * Orko - Copyright © 2018-2019 Graham Crockford * * <p>This program is free software: you can redistribute it and/or modify it under the terms of the * GNU Affero General Public License as published by the Free Software Foundation, either version 3 * of the License, or (at your option) any later version. * * <p>This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * <p>You should have received a copy of the GNU Affero General Public License along with this * program. If not, see <http://www.gnu.org/licenses/>. */ package com.gruelbox.orko.auth; import com.google.common.collect.FluentIterable; import java.util.Optional; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.core.NewCookie; public enum CookieHandlers { ACCESS_TOKEN(AuthModule.BIND_ACCESS_TOKEN_KEY); private final String name; private CookieHandlers(String name) { this.name = name; } public String getName() { return name; } public Optional<String> read(HttpServletRequest request) { if (request.getCookies() == null) return Optional.empty(); return FluentIterable.from(request.getCookies()) .firstMatch(cookie -> getName().equals(cookie.getName())) .transform(Cookie::getValue) .toJavaUtil(); } public NewCookie create(String token, AuthConfiguration authConfiguration) { return new NewCookie( getName(), token, "/", null, 1, null, authConfiguration.getJwt().getExpirationMinutes() * 60, null, authConfiguration.isHttpsOnly(), true); } }
{ "pile_set_name": "Github" }
<!DOCTYPE html> <HTML> <HEAD> <TITLE> ZTREE DEMO - Standard Data </TITLE> <meta http-equiv="content-type" content="text/html; charset=UTF-8"> <link rel="stylesheet" href="../../../css/demo.css" type="text/css"> <link rel="stylesheet" href="../../../css/zTreeStyle/zTreeStyle.css" type="text/css"> <script type="text/javascript" src="../../../js/jquery-1.4.4.min.js"></script> <script type="text/javascript" src="../../../js/jquery.ztree.core-3.5.js"></script> <!-- <script type="text/javascript" src="../../../js/jquery.ztree.excheck-3.5.js"></script> <script type="text/javascript" src="../../../js/jquery.ztree.exedit-3.5.js"></script>--> <SCRIPT type="text/javascript"> <!-- var setting = { }; var zNodes =[ { name:"pNode 01", open:true, children: [ { name:"pNode 11", children: [ { name:"leaf node 111"}, { name:"leaf node 112"}, { name:"leaf node 113"}, { name:"leaf node 114"} ]}, { name:"pNode 12", children: [ { name:"leaf node 121"}, { name:"leaf node 122"}, { name:"leaf node 123"}, { name:"leaf node 124"} ]}, { name:"pNode 13 - no child", isParent:true} ]}, { name:"pNode 02", children: [ { name:"pNode 21", open:true, children: [ { name:"leaf node 211"}, { name:"leaf node 212"}, { name:"leaf node 213"}, { name:"leaf node 214"} ]}, { name:"pNode 22", children: [ { name:"leaf node 221"}, { name:"leaf node 222"}, { name:"leaf node 223"}, { name:"leaf node 224"} ]}, { name:"pNode 23", children: [ { name:"leaf node 231"}, { name:"leaf node 232"}, { name:"leaf node 233"}, { name:"leaf node 234"} ]} ]}, { name:"pNode 3 - no child", isParent:true} ]; $(document).ready(function(){ $.fn.zTree.init($("#treeDemo"), setting, zNodes); }); //--> </SCRIPT> </HEAD> <BODY> <h1>Standard JSON Data</h1> <h6>[ File Path: core/standardData.html ]</h6> <div class="content_wrap"> <div class="zTreeDemoBackground left"> <ul id="treeDemo" class="ztree"></ul> </div> <div class="right"> <ul class="info"> <li class="title"><h2>1, Explanation of setting</h2> <ul class="list"> <li class="highlight_red">No extrally setting needed for basic functions.</li> <li>The setting.view in API documentation is associated with the display of the zTree.</li> <li>To change the 'name', 'children', 'title' attribute, please refer to the API documentation about 'setting.data.key'.</li> </ul> </li> <li class="title"><h2>2, Explanation of treeNode</h2> <ul class="list"> <li class="highlight_red">Need to use nested JSON data that include parent-child relationship between nodes <div><pre xmlns=""><code>For example: var nodes = [ {name: "pNode 01", children: [ {name: "child 01"}, {name: "child 02"} ]} ];</code></pre></div> </li> <li>To set nodes expanded by default, set treeNode.open attribute.</li> <li>No child nodes of parent node, set treeNode.isParent attribute.</li> <li>Please refer to the API documentation "treeNode data details" to view other attributes description.</li> </ul> </li> </ul> </div> </div> </BODY> </HTML>
{ "pile_set_name": "Github" }
/* * Copyright 2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.codenarc.rule.junit import org.codenarc.rule.AbstractAstVisitorRule import org.codenarc.rule.AbstractAstVisitor import org.codehaus.groovy.ast.MethodNode import org.codenarc.util.AstUtil /** * Check for throws clauses on JUnit test methods. That is not necessary in Groovy. * * This rule sets the default value of the <applyToClassNames> property to only match class names * ending in 'Test', 'Tests' or 'TestCase'. * * @author Chris Mair */ class JUnitUnnecessaryThrowsExceptionRule extends AbstractAstVisitorRule { String name = 'JUnitUnnecessaryThrowsException' int priority = 3 Class astVisitorClass = JUnitUnnecessaryThrowsExceptionAstVisitor String applyToClassNames = DEFAULT_TEST_CLASS_NAMES } class JUnitUnnecessaryThrowsExceptionAstVisitor extends AbstractAstVisitor { private static final List JUNIT4_ANNOTATIONS = ['Test', 'Before', 'BeforeClass', 'AfterClass', 'After', 'Ignore'] @Override protected void visitMethodEx(MethodNode node) { if (node.exceptions && node.parameters.size() == 0 && node.isPublic() && !node.isStatic() && node.isVoidMethod() && (isJUnit3MatchingMethod(node) || hasJUnit4Annotation(node)) ) { addViolation(node, "The ${node.name} method in class $currentClassName declares thrown exceptions, which is not necessary") } super.visitMethodEx(node) } private boolean isJUnit3MatchingMethod(MethodNode node) { return node.name.startsWith('test') || node.name in ['setUp', 'tearDown'] } private boolean hasJUnit4Annotation(MethodNode node) { return JUNIT4_ANNOTATIONS.find { annotation -> AstUtil.hasAnnotation(node, annotation) } } }
{ "pile_set_name": "Github" }
// Copyright 2012 Google Inc. All Rights Reserved. // // Use of this source code is governed by a BSD-style license // that can be found in the COPYING file in the root of the source // tree. An additional intellectual property rights grant can be found // in the file PATENTS. All contributing project authors may // be found in the AUTHORS file in the root of the source tree. // ----------------------------------------------------------------------------- // // TIFF decode. #ifndef WEBP_EXAMPLES_TIFFDEC_H_ #define WEBP_EXAMPLES_TIFFDEC_H_ #if defined(__cplusplus) || defined(c_plusplus) extern "C" { #endif struct Metadata; struct WebPPicture; // Reads a TIFF from 'filename', returning the decoded output in 'pic'. // If 'keep_alpha' is true and the TIFF has an alpha channel, the output is RGBA // otherwise it will be RGB. // Returns true on success. int ReadTIFF(const char* const filename, struct WebPPicture* const pic, int keep_alpha, struct Metadata* const metadata); #if defined(__cplusplus) || defined(c_plusplus) } // extern "C" #endif #endif // WEBP_EXAMPLES_TIFFDEC_H_
{ "pile_set_name": "Github" }
project "BulletFileLoader" kind "StaticLib" targetdir "../../../build/lib" includedirs { "../.." } files { "**.cpp", "**.h" }
{ "pile_set_name": "Github" }
namespace GraphQL.Language.AST { public enum OperationType { Query, Mutation, Subscription } }
{ "pile_set_name": "Github" }
#!/ bin / sh echo-- --------mksnapshot-- ---------echo $0 echo $1 echo $2 echo $3 echo $4 #qemu - arm / build / build - deb - arm - linux - \ gnueabihf / 3rdParty / V8 / v5 .7.0.0 / arm.release / mksnapshot $1 $2 $3 $4 @QEMU_ARCH @ @V8_TARGET_DIR @ / @V8_PROC_ARCH @.release / mksnapshot $1 $2 $3 $4
{ "pile_set_name": "Github" }
--- title: "InstallShield Limited Edition | Microsoft Docs" ms.date: 11/15/2016 ms.prod: "visual-studio-dev14" ms.technology: "vs-ide-deployment" ms.topic: conceptual ms.assetid: 151e46e5-86da-4336-b6d0-42b5326c4884 caps.latest.revision: 9 author: mikejo5000 ms.author: mikejo manager: jillfra --- # InstallShield Limited Edition [!INCLUDE[vs2017banner](../includes/vs2017banner.md)] By using InstallShield Limited Edition, you can create a setup file and distribute it to users so that they can install a desktop application or component without being connected to a network. InstallShield Limited Edition is free for users Visual Studio Professional and Enterprise editions. It replaces Windows Installer technology, which Visual Studio no longer supports. As an alternative, you can distribute applications and components by using ClickOnce, which requires network connectivity. See [ClickOnce Security and Deployment](../deployment/clickonce-security-and-deployment.md). > [!NOTE] > You can continue using Windows Installer projects created in earlier versions of Visual Studio by installing the Visual Studio Installer Projects Extension. See [Visual Studio Installer Projects Extension](https://devblogs.microsoft.com/visualstudio/visual-studio-installer-projects-extension/). ### To enable InstallShield Limited Edition 1. On the menu bar, choose **File**, **New**, **Project**. 2. In the **New Project** dialog box, expand the **Other Project Types** node, and then choose the **Setup and Deployment** node. 3. In the template list, choose **Enable InstallShield Limited Edition**, and then choose the **OK** button. 4. In the browser window that opens, read the instructions, and then choose the **Go to the download web site** link.
{ "pile_set_name": "Github" }
/////////////////////////////////////////////////////////////////////////// // // Copyright (c) 2003, Industrial Light & Magic, a division of Lucas // Digital Ltd. LLC // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Industrial Light & Magic nor the names of // its contributors may be used to endorse or promote products derived // from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // /////////////////////////////////////////////////////////////////////////// #include <ImfOutputFile.h> #include <ImfInputFile.h> #include <ImfChannelList.h> #include <ImfRgbaFile.h> #include <ImfArray.h> #include <stdio.h> #include <assert.h> using namespace std; using namespace Imath; using namespace Imf; namespace { void readImage (const char fileName[], Array2D<Rgba>& pixels, int& w, int& h, unsigned int correctChecksum) { RgbaInputFile in (fileName); const Box2i &dw = in.dataWindow(); w = dw.max.x - dw.min.x + 1; h = dw.max.y - dw.min.y + 1; int dx = dw.min.x; int dy = dw.min.y; pixels.resizeErase (h, w); in.setFrameBuffer (&pixels[0][0] - dx - dy * w, 1, w); in.readPixels (in.dataWindow().min.y, in.dataWindow().max.y); unsigned int checksum = 0; for (int y = 0; y < h; ++y) for (int x = 0; x < w; ++x) { checksum ^= pixels[y][x].r.bits(); checksum ^= pixels[y][x].g.bits(); checksum ^= pixels[y][x].b.bits(); checksum ^= pixels[y][x].a.bits(); } cout << "checksum = " << checksum << flush; assert (checksum == correctChecksum); cout << ", ok" << flush; } void readBackImage (const char fileName[], Array2D<Rgba>& pixels, const Array2D<Rgba>& pixels2, int& w, int& h, const int& xs, const int& ys) { InputFile file (fileName); Box2i dw = file.header().dataWindow(); w = dw.max.x - dw.min.x + 1; h = dw.max.y - dw.min.y + 1; pixels.resizeErase (h, w); FrameBuffer frameBuffer; char *base = (char *) (&pixels[0][0] - dw.min.x - dw.min.y * w); int xStride = sizeof (pixels[0][0]) * xs; int yStride = sizeof (pixels[0][0]) * w * ys; frameBuffer.insert ("R", // name Slice (HALF, // type base + offsetof (Rgba, r), // base xStride, // xStride yStride, // yStride xs, ys, // x/y sampling 0.0)); // fillValue frameBuffer.insert ("G", // name Slice (HALF, // type base + offsetof (Rgba, g), // base xStride, // xStride yStride, // yStride xs, ys, // x/y sampling 0.0)); // fillValue frameBuffer.insert ("B", // name Slice (HALF, // type base + offsetof (Rgba, b), // base xStride, // xStride yStride, // yStride xs, ys, // x/y sampling 0.0)); // fillValue frameBuffer.insert ("A", // name Slice (HALF, // type base + offsetof (Rgba, a), // base xStride, // xStride yStride, // yStride xs, ys, // x/y sampling 1.0)); // fillValue file.setFrameBuffer (frameBuffer); file.readPixels (dw.min.y, dw.max.y); cout << "comparing, " << flush; for (int y = 0; y < h; y+=ys) for (int x = 0; x < w; x+=xs) { assert(pixels2[y][x].r.bits() == pixels[y][x].r.bits()); assert(pixels2[y][x].g.bits() == pixels[y][x].g.bits()); assert(pixels2[y][x].b.bits() == pixels[y][x].b.bits()); assert(pixels2[y][x].a.bits() == pixels[y][x].a.bits()); } cout << "ok" << endl << flush; } void writeImage (const char fileName[], const Array2D<Imf::Rgba>& pixels, const int& width, const int& height, const int& xs = 1, const int& ys = 1) { // // Write the image to fileName one scanline at a time // Header header (width, height); header.compression() = PIZ_COMPRESSION; header.channels().insert ("R", Channel (HALF,xs,ys)); header.channels().insert ("G", Channel (HALF,xs,ys)); header.channels().insert ("B", Channel (HALF,xs,ys)); header.channels().insert ("A", Channel (HALF,xs,ys)); OutputFile file (fileName, header); FrameBuffer frameBuffer; char *base = (char *) (&pixels[0][0]); int xStride = sizeof (pixels[0][0]) * xs; int yStride = sizeof (pixels[0][0]) * 0; frameBuffer.insert ("R", // name Slice (HALF, // type base + offsetof (Rgba, r), // base xStride, // xStride yStride, // yStride xs, ys)); // x/y sampling frameBuffer.insert ("G", // name Slice (HALF, // type base + offsetof (Rgba, g), // base xStride, // xStride yStride, // yStride xs, ys)); // x/y sampling frameBuffer.insert ("B", // name Slice (HALF, // type base + offsetof (Rgba, b), // base xStride, // xStride yStride, // yStride xs, ys)); // x/y sampling frameBuffer.insert ("A", // name Slice (HALF, // type base + offsetof (Rgba, a), // base xStride, // xStride yStride, // yStride xs, ys)); // x/y sampling // iterate over all scanlines, and write them out for (int y = 0; y < height; ++y) { // set the base address for this scanline base = (char *) (&pixels[y][0]); frameBuffer["R"].base = base + offsetof (Rgba, r); frameBuffer["G"].base = base + offsetof (Rgba, g); frameBuffer["B"].base = base + offsetof (Rgba, b); frameBuffer["A"].base = base + offsetof (Rgba, a); // set the framebuffer and write the pixels file.setFrameBuffer (frameBuffer); file.writePixels (1); } } void readCopyRead (const char* infilename, unsigned int correctChecksum) { #ifdef PLATFORM_WIN32 const char * outfilename = "imf_test_native.exr"; #else const char * outfilename = "/var/tmp/imf_test_native.exr"; #endif int w, h; Array2D<Imf::Rgba> pixels (1,1); cout << " reading, " << flush; readImage(infilename, pixels, w, h, correctChecksum); cout << endl; for (int xs = 1; xs <= 2; ++xs) { for (int ys = 1; ys <= 2; ++ys) { cout << " x sampling " << xs << ", y sampling " << ys << ": writing image, " << flush; writeImage(outfilename, pixels, w, h, xs, ys); Array2D<Imf::Rgba> pixels2 (1,1); cout << "reading back, " << flush; readBackImage(outfilename, pixels2, pixels, w, h, xs, ys); remove(outfilename); } } } } // namespace void testNativeFormat () { try { cout << "Testing if uncompressible pixel data are written " "in Xdr, not native format" << endl; cout << "image 1:" << endl; readCopyRead("test_native1.exr", 54435); cout << "image 2:" << endl; readCopyRead("test_native2.exr", 37639); cout << "ok\n" << endl; } catch (const std::exception &e) { cerr << "ERROR -- caught exception: " << e.what() << endl; assert (false); } }
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: f3acae864dc04ed88a5f043aa32519e9 MonoImporter: externalObjects: {} serializedVersion: 2 defaultReferences: [] executionOrder: 0 icon: {fileID: 2800000, guid: 8ac5213854cf4dbabd140decf8df1946, type: 3} userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
#include "tommath.h" #ifdef BN_MP_INIT_C /* LibTomMath, multiple-precision integer library -- Tom St Denis * * LibTomMath is a library that provides multiple-precision * integer arithmetic as well as number theoretic functionality. * * The library was designed directly after the MPI library by * Michael Fromberger but has been written from scratch with * additional optimizations in place. * * The library is free for all purposes without any express * guarantee it works. * * Tom St Denis, [email protected], http://libtom.org */ /* init a new mp_int */ int mp_init (mp_int * a) { int i; /* allocate memory required and clear it */ a->dp = OPT_CAST(mp_digit) XMALLOC (sizeof (mp_digit) * MP_PREC); if (a->dp == NULL) { return MP_MEM; } /* set the digits to zero */ for (i = 0; i < MP_PREC; i++) { a->dp[i] = 0; } /* set the used to zero, allocated digits to the default precision * and sign to positive */ a->used = 0; a->alloc = MP_PREC; a->sign = MP_ZPOS; return MP_OKAY; } #endif /* $Source: /cvs/libtom/libtommath/bn_mp_init.c,v $ */ /* $Revision: 1.4 $ */ /* $Date: 2006/12/28 01:25:13 $ */
{ "pile_set_name": "Github" }
/* $Date: 2005/10/24 23:18:13 $ $RCSfile: mv88e1xxx.c,v $ $Revision: 1.49 $ */ #include "common.h" #include "mv88e1xxx.h" #include "cphy.h" #include "elmer0.h" /* MV88E1XXX MDI crossover register values */ #define CROSSOVER_MDI 0 #define CROSSOVER_MDIX 1 #define CROSSOVER_AUTO 3 #define INTR_ENABLE_MASK 0x6CA0 /* * Set the bits given by 'bitval' in PHY register 'reg'. */ static void mdio_set_bit(struct cphy *cphy, int reg, u32 bitval) { u32 val; (void) simple_mdio_read(cphy, reg, &val); (void) simple_mdio_write(cphy, reg, val | bitval); } /* * Clear the bits given by 'bitval' in PHY register 'reg'. */ static void mdio_clear_bit(struct cphy *cphy, int reg, u32 bitval) { u32 val; (void) simple_mdio_read(cphy, reg, &val); (void) simple_mdio_write(cphy, reg, val & ~bitval); } /* * NAME: phy_reset * * DESC: Reset the given PHY's port. NOTE: This is not a global * chip reset. * * PARAMS: cphy - Pointer to PHY instance data. * * RETURN: 0 - Successful reset. * -1 - Timeout. */ static int mv88e1xxx_reset(struct cphy *cphy, int wait) { u32 ctl; int time_out = 1000; mdio_set_bit(cphy, MII_BMCR, BMCR_RESET); do { (void) simple_mdio_read(cphy, MII_BMCR, &ctl); ctl &= BMCR_RESET; if (ctl) udelay(1); } while (ctl && --time_out); return ctl ? -1 : 0; } static int mv88e1xxx_interrupt_enable(struct cphy *cphy) { /* Enable PHY interrupts. */ (void) simple_mdio_write(cphy, MV88E1XXX_INTERRUPT_ENABLE_REGISTER, INTR_ENABLE_MASK); /* Enable Marvell interrupts through Elmer0. */ if (t1_is_asic(cphy->adapter)) { u32 elmer; t1_tpi_read(cphy->adapter, A_ELMER0_INT_ENABLE, &elmer); elmer |= ELMER0_GP_BIT1; if (is_T2(cphy->adapter)) elmer |= ELMER0_GP_BIT2 | ELMER0_GP_BIT3 | ELMER0_GP_BIT4; t1_tpi_write(cphy->adapter, A_ELMER0_INT_ENABLE, elmer); } return 0; } static int mv88e1xxx_interrupt_disable(struct cphy *cphy) { /* Disable all phy interrupts. */ (void) simple_mdio_write(cphy, MV88E1XXX_INTERRUPT_ENABLE_REGISTER, 0); /* Disable Marvell interrupts through Elmer0. */ if (t1_is_asic(cphy->adapter)) { u32 elmer; t1_tpi_read(cphy->adapter, A_ELMER0_INT_ENABLE, &elmer); elmer &= ~ELMER0_GP_BIT1; if (is_T2(cphy->adapter)) elmer &= ~(ELMER0_GP_BIT2|ELMER0_GP_BIT3|ELMER0_GP_BIT4); t1_tpi_write(cphy->adapter, A_ELMER0_INT_ENABLE, elmer); } return 0; } static int mv88e1xxx_interrupt_clear(struct cphy *cphy) { u32 elmer; /* Clear PHY interrupts by reading the register. */ (void) simple_mdio_read(cphy, MV88E1XXX_INTERRUPT_STATUS_REGISTER, &elmer); /* Clear Marvell interrupts through Elmer0. */ if (t1_is_asic(cphy->adapter)) { t1_tpi_read(cphy->adapter, A_ELMER0_INT_CAUSE, &elmer); elmer |= ELMER0_GP_BIT1; if (is_T2(cphy->adapter)) elmer |= ELMER0_GP_BIT2|ELMER0_GP_BIT3|ELMER0_GP_BIT4; t1_tpi_write(cphy->adapter, A_ELMER0_INT_CAUSE, elmer); } return 0; } /* * Set the PHY speed and duplex. This also disables auto-negotiation, except * for 1Gb/s, where auto-negotiation is mandatory. */ static int mv88e1xxx_set_speed_duplex(struct cphy *phy, int speed, int duplex) { u32 ctl; (void) simple_mdio_read(phy, MII_BMCR, &ctl); if (speed >= 0) { ctl &= ~(BMCR_SPEED100 | BMCR_SPEED1000 | BMCR_ANENABLE); if (speed == SPEED_100) ctl |= BMCR_SPEED100; else if (speed == SPEED_1000) ctl |= BMCR_SPEED1000; } if (duplex >= 0) { ctl &= ~(BMCR_FULLDPLX | BMCR_ANENABLE); if (duplex == DUPLEX_FULL) ctl |= BMCR_FULLDPLX; } if (ctl & BMCR_SPEED1000) /* auto-negotiation required for 1Gb/s */ ctl |= BMCR_ANENABLE; (void) simple_mdio_write(phy, MII_BMCR, ctl); return 0; } static int mv88e1xxx_crossover_set(struct cphy *cphy, int crossover) { u32 data32; (void) simple_mdio_read(cphy, MV88E1XXX_SPECIFIC_CNTRL_REGISTER, &data32); data32 &= ~V_PSCR_MDI_XOVER_MODE(M_PSCR_MDI_XOVER_MODE); data32 |= V_PSCR_MDI_XOVER_MODE(crossover); (void) simple_mdio_write(cphy, MV88E1XXX_SPECIFIC_CNTRL_REGISTER, data32); return 0; } static int mv88e1xxx_autoneg_enable(struct cphy *cphy) { u32 ctl; (void) mv88e1xxx_crossover_set(cphy, CROSSOVER_AUTO); (void) simple_mdio_read(cphy, MII_BMCR, &ctl); /* restart autoneg for change to take effect */ ctl |= BMCR_ANENABLE | BMCR_ANRESTART; (void) simple_mdio_write(cphy, MII_BMCR, ctl); return 0; } static int mv88e1xxx_autoneg_disable(struct cphy *cphy) { u32 ctl; /* * Crossover *must* be set to manual in order to disable auto-neg. * The Alaska FAQs document highlights this point. */ (void) mv88e1xxx_crossover_set(cphy, CROSSOVER_MDI); /* * Must include autoneg reset when disabling auto-neg. This * is described in the Alaska FAQ document. */ (void) simple_mdio_read(cphy, MII_BMCR, &ctl); ctl &= ~BMCR_ANENABLE; (void) simple_mdio_write(cphy, MII_BMCR, ctl | BMCR_ANRESTART); return 0; } static int mv88e1xxx_autoneg_restart(struct cphy *cphy) { mdio_set_bit(cphy, MII_BMCR, BMCR_ANRESTART); return 0; } static int mv88e1xxx_advertise(struct cphy *phy, unsigned int advertise_map) { u32 val = 0; if (advertise_map & (ADVERTISED_1000baseT_Half | ADVERTISED_1000baseT_Full)) { (void) simple_mdio_read(phy, MII_GBCR, &val); val &= ~(GBCR_ADV_1000HALF | GBCR_ADV_1000FULL); if (advertise_map & ADVERTISED_1000baseT_Half) val |= GBCR_ADV_1000HALF; if (advertise_map & ADVERTISED_1000baseT_Full) val |= GBCR_ADV_1000FULL; } (void) simple_mdio_write(phy, MII_GBCR, val); val = 1; if (advertise_map & ADVERTISED_10baseT_Half) val |= ADVERTISE_10HALF; if (advertise_map & ADVERTISED_10baseT_Full) val |= ADVERTISE_10FULL; if (advertise_map & ADVERTISED_100baseT_Half) val |= ADVERTISE_100HALF; if (advertise_map & ADVERTISED_100baseT_Full) val |= ADVERTISE_100FULL; if (advertise_map & ADVERTISED_PAUSE) val |= ADVERTISE_PAUSE; if (advertise_map & ADVERTISED_ASYM_PAUSE) val |= ADVERTISE_PAUSE_ASYM; (void) simple_mdio_write(phy, MII_ADVERTISE, val); return 0; } static int mv88e1xxx_set_loopback(struct cphy *cphy, int on) { if (on) mdio_set_bit(cphy, MII_BMCR, BMCR_LOOPBACK); else mdio_clear_bit(cphy, MII_BMCR, BMCR_LOOPBACK); return 0; } static int mv88e1xxx_get_link_status(struct cphy *cphy, int *link_ok, int *speed, int *duplex, int *fc) { u32 status; int sp = -1, dplx = -1, pause = 0; (void) simple_mdio_read(cphy, MV88E1XXX_SPECIFIC_STATUS_REGISTER, &status); if ((status & V_PSSR_STATUS_RESOLVED) != 0) { if (status & V_PSSR_RX_PAUSE) pause |= PAUSE_RX; if (status & V_PSSR_TX_PAUSE) pause |= PAUSE_TX; dplx = (status & V_PSSR_DUPLEX) ? DUPLEX_FULL : DUPLEX_HALF; sp = G_PSSR_SPEED(status); if (sp == 0) sp = SPEED_10; else if (sp == 1) sp = SPEED_100; else sp = SPEED_1000; } if (link_ok) *link_ok = (status & V_PSSR_LINK) != 0; if (speed) *speed = sp; if (duplex) *duplex = dplx; if (fc) *fc = pause; return 0; } static int mv88e1xxx_downshift_set(struct cphy *cphy, int downshift_enable) { u32 val; (void) simple_mdio_read(cphy, MV88E1XXX_EXT_PHY_SPECIFIC_CNTRL_REGISTER, &val); /* * Set the downshift counter to 2 so we try to establish Gb link * twice before downshifting. */ val &= ~(V_DOWNSHIFT_ENABLE | V_DOWNSHIFT_CNT(M_DOWNSHIFT_CNT)); if (downshift_enable) val |= V_DOWNSHIFT_ENABLE | V_DOWNSHIFT_CNT(2); (void) simple_mdio_write(cphy, MV88E1XXX_EXT_PHY_SPECIFIC_CNTRL_REGISTER, val); return 0; } static int mv88e1xxx_interrupt_handler(struct cphy *cphy) { int cphy_cause = 0; u32 status; /* * Loop until cause reads zero. Need to handle bouncing interrupts. */ while (1) { u32 cause; (void) simple_mdio_read(cphy, MV88E1XXX_INTERRUPT_STATUS_REGISTER, &cause); cause &= INTR_ENABLE_MASK; if (!cause) break; if (cause & MV88E1XXX_INTR_LINK_CHNG) { (void) simple_mdio_read(cphy, MV88E1XXX_SPECIFIC_STATUS_REGISTER, &status); if (status & MV88E1XXX_INTR_LINK_CHNG) cphy->state |= PHY_LINK_UP; else { cphy->state &= ~PHY_LINK_UP; if (cphy->state & PHY_AUTONEG_EN) cphy->state &= ~PHY_AUTONEG_RDY; cphy_cause |= cphy_cause_link_change; } } if (cause & MV88E1XXX_INTR_AUTONEG_DONE) cphy->state |= PHY_AUTONEG_RDY; if ((cphy->state & (PHY_LINK_UP | PHY_AUTONEG_RDY)) == (PHY_LINK_UP | PHY_AUTONEG_RDY)) cphy_cause |= cphy_cause_link_change; } return cphy_cause; } static void mv88e1xxx_destroy(struct cphy *cphy) { kfree(cphy); } static struct cphy_ops mv88e1xxx_ops = { .destroy = mv88e1xxx_destroy, .reset = mv88e1xxx_reset, .interrupt_enable = mv88e1xxx_interrupt_enable, .interrupt_disable = mv88e1xxx_interrupt_disable, .interrupt_clear = mv88e1xxx_interrupt_clear, .interrupt_handler = mv88e1xxx_interrupt_handler, .autoneg_enable = mv88e1xxx_autoneg_enable, .autoneg_disable = mv88e1xxx_autoneg_disable, .autoneg_restart = mv88e1xxx_autoneg_restart, .advertise = mv88e1xxx_advertise, .set_loopback = mv88e1xxx_set_loopback, .set_speed_duplex = mv88e1xxx_set_speed_duplex, .get_link_status = mv88e1xxx_get_link_status, }; static struct cphy *mv88e1xxx_phy_create(struct net_device *dev, int phy_addr, const struct mdio_ops *mdio_ops) { struct adapter *adapter = netdev_priv(dev); struct cphy *cphy = kzalloc(sizeof(*cphy), GFP_KERNEL); if (!cphy) return NULL; cphy_init(cphy, dev, phy_addr, &mv88e1xxx_ops, mdio_ops); /* Configure particular PHY's to run in a different mode. */ if ((board_info(adapter)->caps & SUPPORTED_TP) && board_info(adapter)->chip_phy == CHBT_PHY_88E1111) { /* * Configure the PHY transmitter as class A to reduce EMI. */ (void) simple_mdio_write(cphy, MV88E1XXX_EXTENDED_ADDR_REGISTER, 0xB); (void) simple_mdio_write(cphy, MV88E1XXX_EXTENDED_REGISTER, 0x8004); } (void) mv88e1xxx_downshift_set(cphy, 1); /* Enable downshift */ /* LED */ if (is_T2(adapter)) { (void) simple_mdio_write(cphy, MV88E1XXX_LED_CONTROL_REGISTER, 0x1); } return cphy; } static int mv88e1xxx_phy_reset(adapter_t* adapter) { return 0; } const struct gphy t1_mv88e1xxx_ops = { .create = mv88e1xxx_phy_create, .reset = mv88e1xxx_phy_reset };
{ "pile_set_name": "Github" }
# Copyright 1999-2017 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 EAPI=6 # ebuild generated by hackport 0.5.1.9999 CABAL_FEATURES="" inherit haskell-cabal DESCRIPTION="query tools for the local cabal database" HOMEPAGE="https://github.com/vincenthz/cabal-db" SRC_URI="https://hackage.haskell.org/package/${P}/${P}.tar.gz" LICENSE="BSD" SLOT="0" KEYWORDS="~amd64 ~x86" IUSE="" RDEPEND="dev-haskell/ansi-wl-pprint:= dev-haskell/cabal:= dev-haskell/mtl:= >=dev-haskell/optparse-applicative-0.11:= >=dev-haskell/tar-0.4.0:= dev-haskell/utf8-string:= >=dev-lang/ghc-7.4.1:= " DEPEND="${RDEPEND} >=dev-haskell/cabal-1.8 " PATCHES=("${FILESDIR}"/${P}-oa-0.13.patch)
{ "pile_set_name": "Github" }
'use strict'; if (!require('./is-implemented')()) { Object.defineProperty(RegExp.prototype, 'search', { value: require('./shim'), configurable: true, enumerable: false, writable: true }); }
{ "pile_set_name": "Github" }
/* * Copyright (C)2014-2020 Haxe Foundation * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ package js.node.http; import haxe.DynamicAccess; import js.node.net.Socket; #if haxe4 import js.lib.Error; #else import js.Error; #end /** An `Agent` is responsible for managing connection persistence and reuse for HTTP clients. It maintains a queue of pending requests for a given host and port, reusing a single socket connection for each until the queue is empty, at which time the socket is either destroyed or put into a pool where it is kept to be used again for requests to the same host and port. Whether it is destroyed or pooled depends on the `keepAlive` option. Pooled connections have TCP Keep-Alive enabled for them, but servers may still close idle connections, in which case they will be removed from the pool and a new connection will be made when a new HTTP request is made for that host and port. Servers may also refuse to allow multiple requests over the same connection, in which case the connection will have to be remade for every request and cannot be pooled. The `Agent` will still make the requests to that server, but each one will occur over a new connection. When a connection is closed by the client or the server, it is removed from the pool. Any unused sockets in the pool will be unrefed so as not to keep the Node.js process running when there are no outstanding requests. (see [socket.unref()](https://nodejs.org/api/net.html#net_socket_unref)). It is good practice, to `destroy()` an Agent instance when it is no longer in use, because unused sockets consume OS resources. Sockets are removed from an agent when the socket emits either a `'close'` event or an `'agentRemove'` event. When intending to keep one HTTP request open for a long time without keeping it in the agent, something like the following may be done. An agent may also be used for an individual request. By providing `{agent: false}` as an option to the `http.get()` or `http.request()` functions, a one-time use `Agent` with default options will be used for the client connection. **/ @:jsRequire("http", "Agent") extern class Agent { /** `options` in socket.connect() are also supported. The default `http.globalAgent` that is used by `http.request()` has all of these values set to their respective defaults. To configure any of them, a custom `http.Agent` instance must be created. **/ function new(?options:HttpAgentOptions); /** Produces a socket/stream to be used for HTTP requests. By default, this function is the same as `net.createConnection()`. However, custom agents may override this method in case greater flexibility is desired. A socket/stream can be supplied in one of two ways: by returning the socket/stream from this function, or by passing the socket/stream to `callback`. `callback` has a signature of `(err, stream)`. **/ #if haxe4 function createConnection(options:SocketConnectOptionsTcp, ?callback:(err:Error, stream:Socket) -> Void):Socket; #else function createConnection(options:SocketConnectOptionsTcp, ?callback:Error->Socket->Void):Socket; #end /** Called when `socket` is detached from a request and could be persisted by the `Agent`. This method can be overridden by a particular `Agent` subclass. If this method returns a falsy value, the socket will be destroyed instead of persisting it for use with the next request. **/ function keepSocketAlive(socket:Socket):Void; /** Called when `socket` is attached to `request` after being persisted because of the keep-alive options. This method can be overridden by a particular `Agent` subclass. **/ function reuseSocket(socket:Socket, request:ClientRequest):Void; /** Destroy any sockets that are currently in use by the agent. It is usually not necessary to do this. However, if using an agent with `keepAlive` enabled, then it is best to explicitly shut down the agent when it will no longer be used. Otherwise, sockets may hang open for quite a long time before the server terminates them. **/ function destroy():Void; /** An object which contains arrays of sockets currently awaiting use by the agent when keepAlive is enabled. Do not modify. */ var freeSockets(default, null):DynamicAccess<Array<Socket>>; /** Get a unique name for a set of request options, to determine whether a connection can be reused. For an HTTP agent, this returns `host:port:localAddress` or `host:port:localAddress:family`. For an HTTPS agent, the name includes the CA, cert, ciphers, and other HTTPS/TLS-specific options that determine socket reusability. **/ function getName(options:js.node.Http.HttpRequestOptions):String; /** By default set to `256`. For agents with `keepAlive` enabled, this sets the maximum number of sockets that will be left open in the free state. **/ var maxFreeSockets:Float; /** By default set to `Infinity`. Determines how many concurrent sockets the agent can have open per origin. Origin is the returned value of `getName()`. **/ var maxSockets:Float; /** An object which contains queues of requests that have not yet been assigned to sockets. Do not modify. **/ var requests(default, null):DynamicAccess<Array<ClientRequest>>; /** An object which contains arrays of sockets currently in use by the agent. Do not modify. **/ var sockets(default, null):DynamicAccess<Array<Socket>>; } /** Options for `Agent` constructor. **/ typedef HttpAgentOptions = { /** Keep sockets around even when there are no outstanding requests, so they can be used for future requests without having to reestablish a TCP connection. Not to be confused with the `keep-alive` value of the `Connection` header. The `Connection: keep-alive` header is always sent when using an agent except when the `Connection` header is explicitly specified or when the `keepAlive` and `maxSockets` options are respectively set to `false` and `Infinity`, in which case `Connection: close` will be used. Default: `false` **/ @:optional var keepAlive:Bool; /** When using the `keepAlive` option, specifies the [initial delay](https://nodejs.org/api/net.html#net_socket_setkeepalive_enable_initialdelay) for TCP Keep-Alive packets. Ignored when the `keepAlive` option is `false` or `undefined`. Default: `1000`. **/ @:optional var keepAliveMsecs:Int; /** Maximum number of sockets to allow per host. Each request will use a new socket until the maximum is reached. Default: `Infinity`. **/ @:optional var maxSockets:Int; /** Maximum number of sockets to leave open in a free state. Only relevant if `keepAlive` is set to `true`. Default: `256`. **/ @:optional var maxFreeSockets:Int; /** Socket timeout in milliseconds. This will set the timeout when the socket is created. **/ @:optional var timeout:Int; }
{ "pile_set_name": "Github" }
<%= wrap_in_modules <<~rb class ApplicationMailer < ActionMailer::Base default from: '[email protected]' layout 'mailer' end rb %>
{ "pile_set_name": "Github" }
// Pegasus Frontend // Copyright (C) 2017-2019 Mátyás Mustoha // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program. If not, see <http://www.gnu.org/licenses/>. #pragma once #include <QString> #include <vector> namespace android { constexpr const char* jni_classname(); QString primary_storage_path(); std::vector<QString> storage_paths(); } // namespace android
{ "pile_set_name": "Github" }
/** * Contact: [email protected] */ #include "LinearRegression.h" #include <stdlib.h> #include <math.h> /* math functions */ int linreg(int n, std::unique_ptr<REAL[]> const & x, std::unique_ptr<REAL[]> const & y, REAL* m, REAL* b, REAL* r) { //int linreg(int n, const REAL x[], const REAL y[], REAL* m, REAL* b, REAL* r) { REAL sumx = 0.0; /* sum of x */ REAL sumx2 = 0.0; /* sum of x**2 */ REAL sumxy = 0.0; /* sum of x * y */ REAL sumy = 0.0; /* sum of y */ REAL sumy2 = 0.0; /* sum of y**2 */ for (int i = 0; i < n; i++) { sumx += x[i]; sumx2 += sqr(x[i]); sumxy += x[i] * y[i]; sumy += y[i]; sumy2 += sqr(y[i]); } REAL denom = (n * sumx2 - sqr(sumx)); if (denom == 0) { // singular matrix. can't solve the problem. *m = 0; *b = 0; if (r) *r = 0; return 1; } *m = (n * sumxy - sumx * sumy) / denom; *b = (sumy * sumx2 - sumx * sumxy) / denom; if (r != NULL) { *r = (sumxy - sumx * sumy / n) / /* compute correlation coeff */ sqrt((sumx2 - sqr(sumx) / n) * (sumy2 - sqr(sumy) / n)); } return 0; }
{ "pile_set_name": "Github" }
opentracing on; {% if opentracing_config == nil or opentracing_config == empty %} {% assign opentracing_config = "conf.d/opentracing/jaeger.example.json" | filesystem | first%} {% endif %} {% if platform == "OSX" %} opentracing_load_tracer libjaegertracing.dylib {{ opentracing_config }}; {% else %} opentracing_load_tracer libjaegertracing.so {{ opentracing_config }}; {% endif %}
{ "pile_set_name": "Github" }
<component name="CopyrightManager"> <settings default="" /> </component>
{ "pile_set_name": "Github" }
/*! * Bootstrap-select v1.12.2 (http://silviomoreto.github.io/bootstrap-select) * * Copyright 2013-2017 bootstrap-select * Licensed under MIT (https://github.com/silviomoreto/bootstrap-select/blob/master/LICENSE) */ (function (root, factory) { if (typeof define === 'function' && define.amd) { // AMD. Register as an anonymous module unless amdModuleId is set define(["jquery"], function (a0) { return (factory(a0)); }); } else if (typeof module === 'object' && module.exports) { // Node. Does not work with strict CommonJS, but // only CommonJS-like environments that support module.exports, // like Node. module.exports = factory(require("jquery")); } else { factory(root["jQuery"]); } }(this, function (jQuery) { (function ($) { $.fn.selectpicker.defaults = { noneSelectedText: 'Inget valt', noneResultsText: 'Inget sökresultat matchar {0}', countSelectedText: function (numSelected, numTotal) { return (numSelected === 1) ? "{0} alternativ valt" : "{0} alternativ valda"; }, maxOptionsText: function (numAll, numGroup) { return [ 'Gräns uppnåd (max {n} alternativ)', 'Gräns uppnåd (max {n} gruppalternativ)' ]; }, selectAllText: 'Markera alla', deselectAllText: 'Avmarkera alla', multipleSeparator: ', ' }; })(jQuery); }));
{ "pile_set_name": "Github" }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2020 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ext.oracle.model; import org.jkiss.code.NotNull; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.model.access.DBARole; import org.jkiss.dbeaver.model.exec.jdbc.JDBCPreparedStatement; import org.jkiss.dbeaver.model.exec.jdbc.JDBCResultSet; import org.jkiss.dbeaver.model.exec.jdbc.JDBCSession; import org.jkiss.dbeaver.model.exec.jdbc.JDBCStatement; import org.jkiss.dbeaver.model.impl.jdbc.JDBCUtils; import org.jkiss.dbeaver.model.impl.jdbc.cache.JDBCObjectCache; import org.jkiss.dbeaver.model.meta.Association; import org.jkiss.dbeaver.model.meta.Property; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.dbeaver.model.struct.DBSObject; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Collection; /** * OracleRole */ public class OracleRole extends OracleGrantee implements DBARole { private static final Log log = Log.getLog(OracleRole.class); private String name; private String authentication; private final UserCache userCache = new UserCache(); public OracleRole(OracleDataSource dataSource, ResultSet resultSet) { super(dataSource); this.name = JDBCUtils.safeGetString(resultSet, "ROLE"); this.authentication = JDBCUtils.safeGetStringTrimmed(resultSet, "PASSWORD_REQUIRED"); } @NotNull @Override @Property(viewable = true, order = 2) public String getName() { return name; } @Property(viewable = true, order = 3) public String getAuthentication() { return authentication; } @Association public Collection<OraclePrivUser> getUserPrivs(DBRProgressMonitor monitor) throws DBException { return userCache.getAllObjects(monitor, this); } @Nullable @Override public DBSObject refreshObject(@NotNull DBRProgressMonitor monitor) throws DBException { userCache.clearCache(); return super.refreshObject(monitor); } static class UserCache extends JDBCObjectCache<OracleRole, OraclePrivUser> { @NotNull @Override protected JDBCStatement prepareObjectsStatement(@NotNull JDBCSession session, @NotNull OracleRole owner) throws SQLException { final JDBCPreparedStatement dbStat = session.prepareStatement( "SELECT * FROM DBA_ROLE_PRIVS WHERE GRANTED_ROLE=? ORDER BY GRANTEE"); dbStat.setString(1, owner.getName()); return dbStat; } @Override protected OraclePrivUser fetchObject(@NotNull JDBCSession session, @NotNull OracleRole owner, @NotNull JDBCResultSet resultSet) throws SQLException, DBException { return new OraclePrivUser(owner, resultSet); } } }
{ "pile_set_name": "Github" }
<http://a.example/s> <http://a.example/p> "\n" .
{ "pile_set_name": "Github" }
# Classes TODO: Brief intro to classes. [Models](mvc/models) and [Controllers](mvc/controllers) are classes as well, but are treated slightly differently by Kohana. Read their respective pages to learn more. ## Helper or Library? Kohana 3 does not differentiate between "helper" classes and "library" classes like in previous versions. They are all placed in the `classes/` folder and follow the same conventions. The distinction is that in general, a "helper" class is used statically, (for examples see the [helpers included in Kohana](helpers)), and library classes are typically instantiated and used as objects (like the [Database query builders](../database/query/builder)). The distinction is not black and white, and is irrelevant anyways, since they are treated the same by Kohana. ## Creating a class To create a new class, simply place a file in the `classes/` directory at any point in the [Cascading Filesystem](files), that follows the [Class naming conventions](conventions#class-names-and-file-location). For example, lets create a `Foobar` class. // classes/foobar.php class Foobar { static function magic() { // Does something } } We can now call `Foobar::magic()` any where and Kohana will [autoload](autoloading) the file for us. We can also put classes in subdirectories. // classes/professor/baxter.php class Professor_Baxter { static function teach() { // Does something } } We could now call `Professor_Baxter::teach()` any where we want. For examples of how to create and use classes, simply look at the 'classes' folder in `system` or any module. ## Namespacing your classes TODO: Discuss namespacing to provide transparent extension functionality in your own classes/modules.
{ "pile_set_name": "Github" }
// This code is in the public domain -- [email protected] #include "nvmesh.h" // pch #include "AtlasPacker.h" #include "nvmesh/geometry/Measurements.h" #include "nvmesh/halfedge/Vertex.h" #include "nvmesh/halfedge/Face.h" #include "nvmesh/param/Atlas.h" #include "nvmesh/param/Util.h" #include "nvmesh/raster/Raster.h" #include "nvmath/Vector.inl" #include "nvmath/ConvexHull.h" #include "nvcore/StrLib.h" // debug #include "nvcore/StdStream.h" // fileOpen #include <float.h> // FLT_MAX #include <limits.h> // UINT_MAX using namespace nv; AtlasPacker::AtlasPacker(Atlas * atlas) : m_atlas(atlas), m_bitmap(256, 256) { m_width = 0; m_height = 0; } AtlasPacker::~AtlasPacker() { } // This should compute convex hull and use rotating calipers to find the best box. Currently it uses a brute force method. static void computeBoundingBox(Chart * chart, Vector2 * majorAxis, Vector2 * minorAxis, Vector2 * minCorner, Vector2 * maxCorner) { // Compute list of boundary points. Array<Vector2> points(16); HalfEdge::Mesh * mesh = chart->chartMesh(); const uint vertexCount = mesh->vertexCount(); for (uint i = 0; i < vertexCount; i++) { HalfEdge::Vertex * vertex = mesh->vertexAt(i); if (vertex->isBoundary()) { points.append(vertex->tex); } } Array<Vector2> hull; convexHull(points, hull, 0.00001f); // @@ Ideally I should use rotating calipers to find the best box. Using brute force for now. float best_area = FLT_MAX; Vector2 best_min; Vector2 best_max; Vector2 best_axis; const uint hullCount = hull.count(); for (uint i = 0, j = hullCount-1; i < hullCount; j = i, i++) { if (equal(hull[i], hull[j])) { continue; } Vector2 axis = normalize(hull[i] - hull[j], 0.0f); nvDebugCheck(isFinite(axis)); // Compute bounding box. Vector2 box_min(FLT_MAX, FLT_MAX); Vector2 box_max(-FLT_MAX, -FLT_MAX); for (uint v = 0; v < hullCount; v++) { Vector2 point = hull[v]; float x = dot(axis, point); if (x < box_min.x) box_min.x = x; if (x > box_max.x) box_max.x = x; float y = dot(Vector2(-axis.y, axis.x), point); if (y < box_min.y) box_min.y = y; if (y > box_max.y) box_max.y = y; } // Compute box area. float area = (box_max.x - box_min.x) * (box_max.y - box_min.y); if (area < best_area) { best_area = area; best_min = box_min; best_max = box_max; best_axis = axis; } } // Consider all points, not only boundary points, in case the input chart is malformed. for (uint i = 0; i < vertexCount; i++) { HalfEdge::Vertex * vertex = mesh->vertexAt(i); Vector2 point = vertex->tex; float x = dot(best_axis, point); if (x < best_min.x) best_min.x = x; if (x > best_max.x) best_max.x = x; float y = dot(Vector2(-best_axis.y, best_axis.x), point); if (y < best_min.y) best_min.y = y; if (y > best_max.y) best_max.y = y; } *majorAxis = best_axis; *minorAxis = Vector2(-best_axis.y, best_axis.x); *minCorner = best_min; *maxCorner = best_max; } void AtlasPacker::packCharts(int quality, float texelsPerUnit, int padding) { nvCheck(padding >= 0); m_padding = padding; const uint chartCount = m_atlas->chartCount(); Array<float> chartOrderArray; chartOrderArray.resize(chartCount); Array<Vector2> chartExtents; chartExtents.resize(chartCount); float meshArea = 0; for (uint c = 0; c < chartCount; c++) { Chart * chart = m_atlas->chartAt(c); if (!chart->isDisk()) { chartOrderArray[c] = 0; // Skip non-disks. continue; } // Compute surface area to sort charts. float chartArea = computeSurfaceArea(chart->chartMesh()) * chart->scale; meshArea += chartArea; // Compute chart scale float parametricArea = computeParametricArea(chart->chartMesh()); float scale = (chartArea / parametricArea) * texelsPerUnit; if (parametricArea < NV_EPSILON) { scale = 0; } nvCheck(isFinite(scale)); // Compute bounding box of chart. Vector2 majorAxis, minorAxis, origin, end; computeBoundingBox(chart, &majorAxis, &minorAxis, &origin, &end); nvCheck(isFinite(majorAxis) && isFinite(minorAxis) && isFinite(origin)); // Sort charts by perimeter. @@ This is sometimes producing somewhat unexpected results. Is this right? chartOrderArray[c] = ((end.x - origin.x) + (end.y - origin.y)) * scale; // Translate, rotate and scale vertices. Compute extents. Vector2 extents(0.0f); HalfEdge::Mesh * mesh = chart->chartMesh(); const uint vertexCount = mesh->vertexCount(); for (uint i = 0; i < vertexCount; i++) { HalfEdge::Vertex * vertex = mesh->vertexAt(i); Vector2 tmp; tmp.x = dot(vertex->tex, majorAxis); tmp.y = dot(vertex->tex, minorAxis); tmp -= origin; tmp *= scale; if (tmp.x < 0 || tmp.y < 0) { nvDebug("tmp: %f %f\n", tmp.x, tmp.y); nvDebug("scale: %f\n", scale); nvDebug("origin: %f %f\n", origin.x, origin.y); nvDebug("majorAxis: %f %f\n", majorAxis.x, majorAxis.y); nvDebug("minorAxis: %f %f\n", minorAxis.x, minorAxis.y); nvDebugBreak(); } vertex->tex = tmp; extents = max(extents, tmp); } nvDebugCheck(extents.x >= 0 && extents.y >= 0); // Limit chart size. @@ Ideally we should adjust the scale of the entire mesh. if (extents.x > 1024 || extents.y > 1024) { float limit = max(extents.x, extents.y); scale = 1024 / (limit + 1); extents.set(0, 0); for (uint i = 0; i < vertexCount; i++) { HalfEdge::Vertex * vertex = mesh->vertexAt(i); vertex->tex *= scale; extents = max(extents, vertex->tex); } nvDebugCheck(extents.x <= 1024 && extents.y <= 1024); } chartExtents[c] = extents; } // Sort charts by area. m_radix.sort(chartOrderArray); const uint32 * ranks = m_radix.ranks(); // Estimate size of the map based on the mesh surface area and given texel scale. float texelCount = meshArea * square(texelsPerUnit) / 0.75f; // Assume 75% utilization. if (texelCount < 1) texelCount = 1; uint approximateExtent = nextPowerOfTwo(uint(sqrtf(texelCount))); // Init bit map. m_bitmap.clearAll(); if (approximateExtent > m_bitmap.width()) { m_bitmap.resize(approximateExtent, approximateExtent, false); } int w = 0; int h = 0; // Add sorted charts to bitmap. for (uint i = 0; i < chartCount; i++) { uint c = ranks[chartCount - i - 1]; // largest chart first Chart * chart = m_atlas->chartAt(c); if (!chart->isDisk()) continue; BitMap chart_bitmap(iceil(chartExtents[c].x) + padding * 2, iceil(chartExtents[c].y) + padding * 2); chart_bitmap.clearAll(); drawChartBitmap(chart, &chart_bitmap, padding); int best_x, best_y; int best_cw, best_ch; int best_r; findChartLocation(quality, &chart_bitmap, chartExtents[c], w, h, &best_x, &best_y, &best_cw, &best_ch, &best_r); // Update parametric extents. w = max(w, best_x + best_cw + padding); h = max(h, best_y + best_ch + padding); // Resize bitmap if necessary. if (uint(w) > m_bitmap.width() || uint(h) > m_bitmap.height()) { m_bitmap.resize(nextPowerOfTwo(w), nextPowerOfTwo(h), false); } // Add chart. addChart(chart, w, h, best_x, best_y, best_r); // Translate and rotate chart texture coordinates. HalfEdge::Mesh * mesh = chart->chartMesh(); const uint vertexCount = mesh->vertexCount(); for (uint v = 0; v < vertexCount; v++) { HalfEdge::Vertex * vertex = mesh->vertexAt(v); Vector2 t = vertex->tex; if (best_r) swap(t.x, t.y); vertex->tex.x = best_x + t.x; vertex->tex.y = best_y + t.y; nvCheck(vertex->tex.x >= 0 && vertex->tex.y >= 0); } } w -= padding - 1; // Leave one pixel border! h -= padding - 1; // This is not the right way to force a square POT texture. #if FORCE_POT w = nextPowerOfTwo(w); h = nextPowerOfTwo(h); w = max(w, h); h = max(w, h); m_bitmap.resize(w, h, false); #endif m_width = max(0, w); m_height = max(0, h); } void AtlasPacker::findChartLocation(int quality, const BitMap * bitmap, Vector2::Arg extents, int w, int h, int * best_x, int * best_y, int * best_w, int * best_h, int * best_r) { int attempts = 256; if (quality == 1) attempts = 4096; if (quality == 2) attempts = 2048; if (quality == 3) attempts = 1024; if (quality == 4) attempts = 512; if (quality == 0 || w*h < attempts) { findChartLocation_bruteForce(bitmap, extents, w, h, best_x, best_y, best_w, best_h, best_r); } else { findChartLocation_random(bitmap, extents, w, h, best_x, best_y, best_w, best_h, best_r, attempts); } } void AtlasPacker::findChartLocation_bruteForce(const BitMap * bitmap, Vector2::Arg extents, int w, int h, int * best_x, int * best_y, int * best_w, int * best_h, int * best_r) { int best_metric = INT_MAX; // Try two different orientations. for (int r = 0; r < 2; r++) { int cw = iceil(extents.x); int ch = iceil(extents.y); if (r & 1) swap(cw, ch); for (int y = 1; y <= h + m_padding; y++) // Leave 1 pixel border on the left { for (int x = 1; x <= w + m_padding; x++) // Leave 1 pixel border on the top { // Early out. int area = max(w, x+cw+m_padding) * max(h, y+ch+m_padding); //int perimeter = max(w, x+cw+m_padding) + max(h, y+ch+m_padding); int extents = max(max(w, x+cw+m_padding), max(h, y+ch+m_padding)); int metric = extents*extents + area; if (metric > best_metric) { continue; } if (metric == best_metric && max(x, y) >= max(*best_x, *best_y)) { // If metric is the same, pick the one closest to the origin. continue; } if (canAddChart(bitmap, w, h, x, y, r)) { best_metric = metric; *best_x = x; *best_y = y; *best_w = cw; *best_h = ch; *best_r = r; if (area == w*h) { // Chart is completely inside, do not look at any other location. goto done; } } } } } done: nvDebugCheck (best_metric != INT_MAX); } void AtlasPacker::findChartLocation_random(const BitMap * bitmap, Vector2::Arg extents, int w, int h, int * best_x, int * best_y, int * best_w, int * best_h, int * best_r, int minTrialCount) { int best_metric = INT_MAX; for (int i = 0; i < minTrialCount || best_metric == INT_MAX; i++) { int r = m_rand.getRange(1); int x = 1 + m_rand.getRange(w + m_padding - 1); int y = 1 + m_rand.getRange(h + m_padding - 1); int cw = iceil(extents.x); int ch = iceil(extents.y); if (r & 1) swap(cw, ch); // Early out. int area = max(w, x+cw+m_padding) * max(h, y+ch+m_padding); //int perimeter = max(w, x+cw+m_padding) + max(h, y+ch+m_padding); int extents = max(max(w, x+cw+m_padding), max(h, y+ch+m_padding)); int metric = extents*extents + area; if (metric > best_metric) { continue; } if (metric == best_metric && min(x, y) > min(*best_x, *best_y)) { // If metric is the same, pick the one closest to the origin. continue; } if (canAddChart(bitmap, w, h, x, y, r)) { best_metric = metric; *best_x = x; *best_y = y; *best_w = cw; *best_h = ch; *best_r = r; if (area == w*h) { // Chart is completely inside, do not look at any other location. break; } } } } void AtlasPacker::drawChartBitmap(const Chart * chart, BitMap * bitmap, int padding) { const int w = bitmap->width(); const int h = bitmap->height(); const Vector2 extents = Vector2(float(w), float(h)); // Rasterize chart faces, check that all bits are not set. const uint faceCount = chart->faceCount(); for (uint f = 0; f < faceCount; f++) { const HalfEdge::Face * face = chart->chartMesh()->faceAt(f); Vector2 vertices[4]; uint edgeCount = 0; for (HalfEdge::Face::ConstEdgeIterator it(face->edges()); !it.isDone(); it.advance()) { if (edgeCount < 4) { vertices[edgeCount] = it.vertex()->tex + Vector2(float(padding), float(padding)); } edgeCount++; } if (edgeCount == 3) { Raster::drawTriangle(true, extents, true, vertices, AtlasPacker::setBitsCallback, bitmap); } else { Raster::drawQuad(true, extents, true, vertices, AtlasPacker::setBitsCallback, bitmap); } } // Expand chart by padding pixels. (dilation) BitMap tmp(w, h); for (int i = 0; i < padding; i++) { tmp.clearAll(); for (int y = 0; y < h; y++) { for (int x = 0; x < w; x++) { bool b = bitmap->bitAt(x, y); if (!b) { if (x > 0) { b |= bitmap->bitAt(x - 1, y); if (y > 0) b |= bitmap->bitAt(x - 1, y - 1); if (y < h-1) b |= bitmap->bitAt(x - 1, y + 1); } if (y > 0) b |= bitmap->bitAt(x, y - 1); if (y < h-1) b |= bitmap->bitAt(x, y + 1); if (x < w-1) { b |= bitmap->bitAt(x + 1, y); if (y > 0) b |= bitmap->bitAt(x + 1, y - 1); if (y < h-1) b |= bitmap->bitAt(x + 1, y + 1); } } if (b) tmp.setBitAt(x, y); } } swap(tmp, *bitmap); } } bool AtlasPacker::canAddChart(const BitMap * bitmap, int atlas_w, int atlas_h, int offset_x, int offset_y, int r) { nvDebugCheck(r == 0 || r == 1); // Check whether the two bitmaps overlap. const int w = bitmap->width(); const int h = bitmap->height(); if (r == 0) { for (int y = 0; y < h; y++) { int yy = y + offset_y - m_padding; if (yy >= 0) { for (int x = 0; x < w; x++) { int xx = x + offset_x - m_padding; if (xx >= 0) { if (bitmap->bitAt(x, y)) { if (xx < atlas_w && yy < atlas_h) { if (m_bitmap.bitAt(xx, yy)) return false; } } } } } } } else if (r == 1) { for (int y = 0; y < h; y++) { int xx = y + offset_x - m_padding; if (xx >= 0) { for (int x = 0; x < w; x++) { int yy = x + offset_y - m_padding; if (yy >= 0) { if (bitmap->bitAt(x, y)) { if (xx < atlas_w && yy < atlas_h) { if (m_bitmap.bitAt(xx, yy)) return false; } } } } } } } return true; } void AtlasPacker::checkCanAddChart(const Chart * chart, int w, int h, int x, int y, int r) { nvDebugCheck(r == 0 || r == 1); Vector2 extents = Vector2(float(w), float(h)); Vector2 offset = Vector2(float(x), float(y)); // Rasterize chart faces, set bits. const uint faceCount = chart->faceCount(); for (uint f = 0; f < faceCount; f++) { const HalfEdge::Face * face = chart->chartMesh()->faceAt(f); Vector2 vertices[4]; uint edgeCount = 0; for (HalfEdge::Face::ConstEdgeIterator it(face->edges()); !it.isDone(); it.advance()) { if (edgeCount < 4) { Vector2 t = it.vertex()->tex; if (r == 1) swap(t.x, t.y); vertices[edgeCount] = t + offset; } edgeCount++; } if (edgeCount == 3) { Raster::drawTriangle(true, extents, true, vertices, AtlasPacker::checkBitsCallback, &m_bitmap); } else { Raster::drawQuad(true, extents, true, vertices, AtlasPacker::checkBitsCallback, &m_bitmap); } } } void AtlasPacker::addChart(const Chart * chart, int w, int h, int x, int y, int r) { nvDebugCheck(r == 0 || r == 1); Vector2 extents = Vector2(float(w), float(h)); Vector2 offset = Vector2(float(x), float(y)); // Rasterize chart faces, set bits. const uint faceCount = chart->faceCount(); for (uint f = 0; f < faceCount; f++) { const HalfEdge::Face * face = chart->chartMesh()->faceAt(f); Vector2 vertices[4]; uint edgeCount = 0; for (HalfEdge::Face::ConstEdgeIterator it(face->edges()); !it.isDone(); it.advance()) { if (edgeCount < 4) { Vector2 t = it.vertex()->tex; if (r == 1) swap(t.x, t.y); vertices[edgeCount] = t + offset; } edgeCount++; } if (edgeCount == 3) { Raster::drawTriangle(true, extents, true, vertices, AtlasPacker::setBitsCallback, &m_bitmap); } else { Raster::drawQuad(true, extents, true, vertices, AtlasPacker::setBitsCallback, &m_bitmap); } } } /*static*/ bool AtlasPacker::checkBitsCallback(void * param, int x, int y, Vector3::Arg, Vector3::Arg, Vector3::Arg, float) { BitMap * bitmap = (BitMap * )param; nvDebugCheck(bitmap->bitAt(x, y) == false); return true; } /*static*/ bool AtlasPacker::setBitsCallback(void * param, int x, int y, Vector3::Arg, Vector3::Arg, Vector3::Arg, float area) { BitMap * bitmap = (BitMap * )param; if (area > 0.0001) { bitmap->setBitAt(x, y); } return true; } float AtlasPacker::computeAtlasUtilization() const { const uint w = m_width; const uint h = m_height; nvDebugCheck(w <= m_bitmap.width()); nvDebugCheck(h <= m_bitmap.height()); uint count = 0; for (uint y = 0; y < h; y++) { for (uint x = 0; x < w; x++) { count += m_bitmap.bitAt(x, y); } } return float(count) / (w * h); }
{ "pile_set_name": "Github" }
<?php /** * This file is part of OXID eShop Community Edition. * * OXID eShop Community Edition is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * OXID eShop Community Edition is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with OXID eShop Community Edition. If not, see <http://www.gnu.org/licenses/>. * * @link http://www.oxid-esales.com * @copyright (C) OXID eSales AG 2003-2017 * @version OXID eShop CE */ class test_12_ajax_controller_ajax extends \OxidEsales\Eshop\Application\Controller\Admin\ListComponentAjax { /** * @inheritdoc */ public function processRequest($function = null) { $response = 'test_12_ajax_controller successfully called'; oxRegistry::getConfig()->saveShopConfVar('str', 'testModule12AjaxCalledSuccessfully', $response); $this->_outputResponse('test_12_ajax_controller successfully called'); } public function getFeedback() { oxRegistry::getConfig()->saveShopConfVar('str', 'testModule12AjaxCalledSuccessfully', ''); $this->_output('test_12_ajax_controller getFeedback'); } }
{ "pile_set_name": "Github" }
/* @flow */ 'use strict'; import React from 'react'; import Immutable from 'immutable'; import { Animated, View, Text, Linking, Keyboard, Platform, Settings, Share, StatusBar, } from 'react-native'; import {WebView} from 'react-native-webview'; import Components from './WebViewScreenComponents'; import ProgressBar from '../ProgressBar'; import TinyColor from '../../lib/tinycolor'; import SafariView from 'react-native-safari-view'; import {ThemeContext} from '../ThemeContext'; class WebViewScreen extends React.Component { static navigationOptions = ({screenProps}) => { // avoid accidental scroll down to dismiss action on devices without a notch return { gestureResponseDistance: { vertical: screenProps.hasNotch ? 135 : 75, }, }; }; constructor(props) { super(props); this.siteManager = this.props.screenProps.siteManager; this.routes = []; this.backForwardAction = null; this.currentIndex = 0; this.safariViewVisible = false; SafariView.addEventListener('onShow', () => { this.safariViewVisible = true; }); SafariView.addEventListener('onDismiss', () => { this.safariViewVisible = false; }); this.state = { progress: 0, scrollDirection: '', headerBg: 'transparent', headerBgAnim: new Animated.Value(0), barStyle: 'dark-content', // default errorData: null, userAgentSuffix: 'DiscourseHub', layoutCalculated: false, hasNotch: this.props.screenProps.hasNotch, webviewUrl: this.props.navigation.getParam('url'), }; } componentDidMount() { const theme = this.context; this.setState({ headerBg: theme.grayBackground, barStyle: theme.barStyle, }); // Workaround for StatusBar bug in RN Webview // https://github.com/react-native-community/react-native-webview/issues/735 Keyboard.addListener('keyboardWillShow', this._onKeyboardShow.bind(this)); Keyboard.addListener('keyboardDidHide', this._onKeyboardShow.bind(this)); } componentDidUpdate() { const url = this.props.navigation.getParam('url'); if (url !== this.state.webviewUrl) { this.setState({ webviewUrl: url, }); } } UNSAFE_componentWillUpdate(nextProps, nextState) { if (nextState.headerBg !== this.state.headerBg) { Animated.timing(this.state.headerBgAnim, { toValue: 1, duration: 250, useNativeDriver: false, }).start(); } } _onLayout(event) { // The iPad user agent string no longer includes "iPad". // We want to serve desktop version on fullscreen iPad app // and mobile version on split view. // That's why we append the device ID (which includes "iPad" on large window sizes only) var {width, height} = event.nativeEvent.layout; this.setState({ userAgentSuffix: width > 767 ? `DiscourseHub ${this.props.screenProps.deviceId}` : 'DiscourseHub', layoutCalculated: true, }); // TODO: disable notch spacing in landscape mode } render() { const theme = this.context; return ( <Animated.View onLayout={e => this._onLayout(e)} style={{ flex: 1, paddingTop: this.state.hasNotch ? 35 : 20, backgroundColor: this.state.headerBgAnim.interpolate({ inputRange: [0, 1], outputRange: [theme.grayBackground, this.state.headerBg], }), }}> <StatusBar barStyle={this.state.barStyle} /> <View style={{marginTop: this.state.hasNotch ? 8 : 0}}> <ProgressBar progress={this.state.progress} /> </View> {this.state.layoutCalculated && ( <WebView style={{ marginTop: -1, // hacky fix to a 1px overflow just above header }} ref={ref => (this.webview = ref)} source={{uri: this.state.webviewUrl}} applicationNameForUserAgent={this.state.userAgentSuffix} allowsBackForwardNavigationGestures={true} allowsInlineMediaPlayback={true} allowsLinkPreview={true} onError={syntheticEvent => { const {nativeEvent} = syntheticEvent; this.setState({errorData: nativeEvent}); }} renderError={errorName => ( <Components.ErrorScreen errorName={errorName} errorData={this.state.errorData} onRefresh={() => this._onRefresh()} onClose={() => this._onClose()} /> )} startInLoadingState={true} renderLoading={() => ( <Components.ErrorScreen onRefresh={() => this._onRefresh()} onClose={() => this._onClose()} /> )} onShouldStartLoadWithRequest={request => { console.log('onShouldStartLoadWithRequest', request); if (request.url.startsWith('discourse://')) { this.props.navigation.goBack(); return false; } else { // onShouldStartLoadWithRequest is sometimes triggered by ajax requests (ads, etc.) // this is a workaround to avoid launching Safari for these events if (request.url !== request.mainDocumentURL) { return true; } // launch externally and stop loading request if external link if (!this.siteManager.urlInSites(request.url)) { // ensure URL can be opened, before opening an external URL Linking.canOpenURL(request.url) .then(() => { const useSVC = Settings.get('external_links_svc'); if (useSVC) { if (!this.safariViewVisible) { SafariView.show({url: request.url}); } } else { Linking.openURL(request.url); } }) .catch(e => { console.log('failed to fetch notifications ' + e); }); return false; } return true; } }} onNavigationStateChange={() => { StatusBar.setBarStyle(this.state.barStyle, true); }} decelerationRate={'normal'} onLoadProgress={({nativeEvent}) => { const progress = nativeEvent.progress; this.setState({ progress: progress, }); if (progress === 1) { this.progressTimeout = setTimeout( () => this.setState({progress: 0}), 400, ); } }} onMessage={event => this._onMessage(event)} onContentProcessDidTerminate={event => { console.log('onContentProcessDidTerminate', event); this._onClose(); }} /> )} </Animated.View> ); } componentWillUnmount() { clearTimeout(this.progressTimeout); Keyboard.removeListener('keyboardWillShow', this._onKeyboardShow); Keyboard.removeListener('keyboardDidHide', this._onKeyboardShow); this.siteManager.refreshSites(); } _onKeyboardShow() { StatusBar.setBarStyle(this.state.barStyle); } _onRefresh() { this.webview.reload(); } _onClose() { this.props.navigation.goBack(); } _onMessage(event) { let data = JSON.parse(event.nativeEvent.data); console.log('_onMessage', data); let {headerBg, shareUrl, dismiss} = data; if (headerBg) { // when fully transparent, use black status bar if (TinyColor(headerBg).getAlpha() === 0) { headerBg = 'rgb(0,0,0)'; } this.setState({ headerBg: headerBg, barStyle: TinyColor(headerBg).getBrightness() < 125 ? 'light-content' : 'dark-content', }); // ugly hack for an outstanding react-native-webview issue with the statusbar // https://github.com/react-native-community/react-native-webview/issues/735 setTimeout(() => { StatusBar.setBarStyle(this.state.barStyle); }, 400); } if (shareUrl) { Share.share({ url: shareUrl, }); } if (dismiss) { // react-navigation back action (exits webview) this.props.navigation.goBack(); } } } WebViewScreen.contextType = ThemeContext; export default WebViewScreen;
{ "pile_set_name": "Github" }
//<-- CLI SHELL MODE --> // ============================================================================= // Scilab ( http://www.scilab.org/ ) - This file is part of Scilab // Copyright (C) 2011 - DIGITEO - Allan CORNET // // This file is distributed under the same license as the Scilab package. // ============================================================================= savehistory(TMPDIR + "/historysize_backup"); // backup resethistory(); assert_checkequal(historysize(), 0); fmterr = msprintf(gettext("%s: Wrong value for input argument #%d: ""%s"" expected.\n"),"historysize", 1, "max"); assert_checkerror ("historysize(""toto"")", fmterr , 999 ); assert_checkequal(historysize(100), 100); assert_checkequal(historysize("max"), 100); fmterr = msprintf(gettext("%s: Wrong value for input argument #%d.\n"),"historysize", 1); assert_checkerror ("historysize(-1)", fmterr , 999 ); strs = string(1:1000); resethistory(); addhistory(strs); savehistory(TMPDIR + "/historysize_tmp"); resethistory(); warning("off"); loadhistory(TMPDIR + "/historysize_tmp"); assert_checkequal(historysize(), 100 + 1); resethistory(); loadhistory(TMPDIR + "/historysize_backup"); // restore
{ "pile_set_name": "Github" }
/* * Copyright (C) 2015, BMW Car IT GmbH * * Author: Sebastian Mattheis <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in * writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package com.bmwcarit.barefoot.matcher; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Properties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.bmwcarit.barefoot.matcher.MatcherServer.DebugJSONOutputFormatter; import com.bmwcarit.barefoot.matcher.MatcherServer.GeoJSONOutputFormatter; import com.bmwcarit.barefoot.matcher.MatcherServer.InputFormatter; import com.bmwcarit.barefoot.matcher.MatcherServer.OutputFormatter; import com.bmwcarit.barefoot.matcher.MatcherServer.SlimJSONOutputFormatter; import com.bmwcarit.barefoot.roadmap.Loader; import com.bmwcarit.barefoot.roadmap.RoadMap; import com.bmwcarit.barefoot.util.SourceException; /** * Server control of stand-alone offline map matching server ({@link MatcherServer}). */ public abstract class ServerControl { private final static Logger logger = LoggerFactory.getLogger(ServerControl.class); private static MatcherServer matcherServer = null; private static Properties databaseProperties = new Properties(); private static Properties serverProperties = new Properties(); /** * Initializes stand-alone offline map matching server. Server properties file must include * matcher and server properties, see * {@link MatcherServer#MatcherServer(Properties, RoadMap, InputFormatter, OutputFormatter)}. * Database properties file must include database connection properties, see * {@link Loader#roadmap(Properties, boolean)}. * * @param pathServerProperties Path to server properties file. * @param pathDatabaseProperties Path to database properties file. * @param input {@link InputFormatter} to be used for input formatting. * @param output {@link OutputFormatter} to be used for output formatting. */ public static void initServer(String pathServerProperties, String pathDatabaseProperties, InputFormatter input, OutputFormatter output) { logger.info("initialize server"); try { logger.info("read database properties from file {}", pathDatabaseProperties); databaseProperties.load(new FileInputStream(pathDatabaseProperties)); } catch (FileNotFoundException e) { logger.error("file {} not found", pathDatabaseProperties); System.exit(1); } catch (IOException e) { logger.error("reading database properties from file {} failed: {}", pathDatabaseProperties, e.getMessage()); System.exit(1); } RoadMap map = null; try { map = Loader.roadmap(databaseProperties, true); } catch (SourceException e) { logger.error("loading map failed:", e); System.exit(1); } map.construct(); try { logger.info("read tracker properties from file {}", pathServerProperties); serverProperties.load(new FileInputStream(pathServerProperties)); } catch (FileNotFoundException e) { logger.error("file {} not found", pathServerProperties); System.exit(1); } catch (IOException e) { logger.error("reading tracker properties from file {} failed: {}", pathDatabaseProperties, e.getMessage()); System.exit(1); } matcherServer = new MatcherServer(serverProperties, map, input, output); } /** * Gets {@link MatcherServer} object (singleton). * * @return {@link MatcherServer} object (singleton). */ public static MatcherServer getServer() { return matcherServer; } /** * Starts/runs server. */ public static void runServer() { logger.info("starting server on port {} with map {}", matcherServer.getPortNumber(), databaseProperties.getProperty("database.name")); matcherServer.runServer(); logger.info("server stopped"); } /** * Stops server. */ public static void stopServer() { logger.info("stopping server"); if (matcherServer != null) { matcherServer.stopServer(); } else { logger.error("stopping server failed, not yet started"); } } public static void main(String[] args) { if (args.length < 2 || args.length > 3) { logger.error( "missing arguments\nusage: [--slimjson|--debug|--geojson] /path/to/server/properties /path/to/mapserver/properties"); System.exit(1); } InputFormatter input = new InputFormatter(); OutputFormatter output = new OutputFormatter(); if (args.length > 2) { for (int i = 0; i < args.length - 2; ++i) { switch (args[i]) { case "--debug": output = new DebugJSONOutputFormatter(); break; case "--slimjson": output = new SlimJSONOutputFormatter(); break; case "--geojson": output = new GeoJSONOutputFormatter(); break; default: logger.warn("invalid option {} ignored", args[i]); break; } } } Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { stopServer(); } }); initServer(args[args.length - 2], args[args.length - 1], input, output); runServer(); } }
{ "pile_set_name": "Github" }
{ lib, stdenv, fetchFromGitHub, cmake, pkgconfig, unbound, openssl, boost , lmdb, miniupnpc, readline }: stdenv.mkDerivation rec { pname = "dero"; version = "0.11.7"; src = fetchFromGitHub { owner = "deroproject"; repo = "dero"; rev = "v${version}"; sha256 = "1v8b9wbmqbpyf4jpc0v276qzk3hc5fpddcmwvv5k5yfi30nmbh5c"; }; nativeBuildInputs = [ cmake pkgconfig ]; buildInputs = [ boost miniupnpc openssl lmdb unbound readline ]; enableParallelBuilding = true; meta = with lib; { description = "Secure, private blockchain with smart contracts based on Monero"; homepage = "https://dero.io/"; license = licenses.bsd3; maintainers = with maintainers; [ fpletz ]; platforms = platforms.linux; }; }
{ "pile_set_name": "Github" }
from __future__ import print_function from __future__ import absolute_import # -------------------------------------------------------- # Fast R-CNN # Copyright (c) 2015 Microsoft # Licensed under The MIT License [see LICENSE for details] # Written by Ross Girshick # -------------------------------------------------------- import os from datasets.imdb import imdb import datasets.ds_utils as ds_utils import xml.etree.ElementTree as ET import numpy as np import scipy.sparse import gzip import PIL import json from .vg_eval import vg_eval from model.utils.config import cfg import pickle import pdb try: xrange # Python 2 except NameError: xrange = range # Python 3 class vg(imdb): def __init__(self, version, image_set, ): imdb.__init__(self, 'vg_' + version + '_' + image_set) self._version = version self._image_set = image_set self._data_path = os.path.join(cfg.DATA_DIR, 'genome') self._img_path = os.path.join(cfg.DATA_DIR, 'vg') # VG specific config options self.config = {'cleanup' : False} # Load classes self._classes = ['__background__'] self._class_to_ind = {} self._class_to_ind[self._classes[0]] = 0 with open(os.path.join(self._data_path, self._version, 'objects_vocab.txt')) as f: count = 1 for object in f.readlines(): names = [n.lower().strip() for n in object.split(',')] self._classes.append(names[0]) for n in names: self._class_to_ind[n] = count count += 1 # Load attributes self._attributes = ['__no_attribute__'] self._attribute_to_ind = {} self._attribute_to_ind[self._attributes[0]] = 0 with open(os.path.join(self._data_path, self._version, 'attributes_vocab.txt')) as f: count = 1 for att in f.readlines(): names = [n.lower().strip() for n in att.split(',')] self._attributes.append(names[0]) for n in names: self._attribute_to_ind[n] = count count += 1 # Load relations self._relations = ['__no_relation__'] self._relation_to_ind = {} self._relation_to_ind[self._relations[0]] = 0 with open(os.path.join(self._data_path, self._version, 'relations_vocab.txt')) as f: count = 1 for rel in f.readlines(): names = [n.lower().strip() for n in rel.split(',')] self._relations.append(names[0]) for n in names: self._relation_to_ind[n] = count count += 1 self._image_ext = '.jpg' load_index_from_file = False if os.path.exists(os.path.join(self._data_path, "vg_image_index_{}.p".format(self._image_set))): with open(os.path.join(self._data_path, "vg_image_index_{}.p".format(self._image_set)), 'rb') as fp: self._image_index = pickle.load(fp) load_index_from_file = True load_id_from_file = False if os.path.exists(os.path.join(self._data_path, "vg_id_to_dir_{}.p".format(self._image_set))): with open(os.path.join(self._data_path, "vg_id_to_dir_{}.p".format(self._image_set)), 'rb') as fp: self._id_to_dir = pickle.load(fp) load_id_from_file = True if not load_index_from_file or not load_id_from_file: self._image_index, self._id_to_dir = self._load_image_set_index() with open(os.path.join(self._data_path, "vg_image_index_{}.p".format(self._image_set)), 'wb') as fp: pickle.dump(self._image_index, fp) with open(os.path.join(self._data_path, "vg_id_to_dir_{}.p".format(self._image_set)), 'wb') as fp: pickle.dump(self._id_to_dir, fp) self._roidb_handler = self.gt_roidb def image_path_at(self, i): """ Return the absolute path to image i in the image sequence. """ return self.image_path_from_index(self._image_index[i]) def image_id_at(self, i): """ Return the absolute path to image i in the image sequence. """ return i # return self._image_index[i] def image_path_from_index(self, index): """ Construct an image path from the image's "index" identifier. """ folder = self._id_to_dir[index] image_path = os.path.join(self._img_path, folder, str(index) + self._image_ext) assert os.path.exists(image_path), \ 'Path does not exist: {}'.format(image_path) return image_path def _image_split_path(self): if self._image_set == "minitrain": return os.path.join(self._data_path, 'train.txt') if self._image_set == "smalltrain": return os.path.join(self._data_path, 'train.txt') if self._image_set == "minival": return os.path.join(self._data_path, 'val.txt') if self._image_set == "smallval": return os.path.join(self._data_path, 'val.txt') else: return os.path.join(self._data_path, self._image_set+'.txt') def _load_image_set_index(self): """ Load the indexes listed in this dataset's image set file. """ training_split_file = self._image_split_path() assert os.path.exists(training_split_file), \ 'Path does not exist: {}'.format(training_split_file) with open(training_split_file) as f: metadata = f.readlines() if self._image_set == "minitrain": metadata = metadata[:1000] elif self._image_set == "smalltrain": metadata = metadata[:20000] elif self._image_set == "minival": metadata = metadata[:100] elif self._image_set == "smallval": metadata = metadata[:2000] image_index = [] id_to_dir = {} for line in metadata: im_file,ann_file = line.split() image_id = int(ann_file.split('/')[-1].split('.')[0]) filename = self._annotation_path(image_id) if os.path.exists(filename): # Some images have no bboxes after object filtering, so there # is no xml annotation for these. tree = ET.parse(filename) for obj in tree.findall('object'): obj_name = obj.find('name').text.lower().strip() if obj_name in self._class_to_ind: # We have to actually load and check these to make sure they have # at least one object actually in vocab image_index.append(image_id) id_to_dir[image_id] = im_file.split('/')[0] break return image_index, id_to_dir def gt_roidb(self): """ Return the database of ground-truth regions of interest. This function loads/saves from/to a cache file to speed up future calls. """ cache_file = os.path.join(self.cache_path, self.name + '_gt_roidb.pkl') if os.path.exists(cache_file): fid = gzip.open(cache_file,'rb') roidb = pickle.load(fid) fid.close() print('{} gt roidb loaded from {}'.format(self.name, cache_file)) return roidb gt_roidb = [self._load_vg_annotation(index) for index in self.image_index] fid = gzip.open(cache_file,'wb') pickle.dump(gt_roidb, fid, pickle.HIGHEST_PROTOCOL) fid.close() print('wrote gt roidb to {}'.format(cache_file)) return gt_roidb def _get_size(self, index): return PIL.Image.open(self.image_path_from_index(index)).size def _annotation_path(self, index): return os.path.join(self._data_path, 'xml', str(index) + '.xml') def _load_vg_annotation(self, index): """ Load image and bounding boxes info from XML file in the PASCAL VOC format. """ width, height = self._get_size(index) filename = self._annotation_path(index) tree = ET.parse(filename) objs = tree.findall('object') num_objs = len(objs) boxes = np.zeros((num_objs, 4), dtype=np.uint16) gt_classes = np.zeros((num_objs), dtype=np.int32) # Max of 16 attributes are observed in the data gt_attributes = np.zeros((num_objs, 16), dtype=np.int32) overlaps = np.zeros((num_objs, self.num_classes), dtype=np.float32) # "Seg" area for pascal is just the box area seg_areas = np.zeros((num_objs), dtype=np.float32) # Load object bounding boxes into a data frame. obj_dict = {} ix = 0 for obj in objs: obj_name = obj.find('name').text.lower().strip() if obj_name in self._class_to_ind: bbox = obj.find('bndbox') x1 = max(0,float(bbox.find('xmin').text)) y1 = max(0,float(bbox.find('ymin').text)) x2 = min(width-1,float(bbox.find('xmax').text)) y2 = min(height-1,float(bbox.find('ymax').text)) # If bboxes are not positive, just give whole image coords (there are a few examples) if x2 < x1 or y2 < y1: print('Failed bbox in %s, object %s' % (filename, obj_name)) x1 = 0 y1 = 0 x2 = width-1 y2 = width-1 cls = self._class_to_ind[obj_name] obj_dict[obj.find('object_id').text] = ix atts = obj.findall('attribute') n = 0 for att in atts: att = att.text.lower().strip() if att in self._attribute_to_ind: gt_attributes[ix, n] = self._attribute_to_ind[att] n += 1 if n >= 16: break boxes[ix, :] = [x1, y1, x2, y2] gt_classes[ix] = cls overlaps[ix, cls] = 1.0 seg_areas[ix] = (x2 - x1 + 1) * (y2 - y1 + 1) ix += 1 # clip gt_classes and gt_relations gt_classes = gt_classes[:ix] gt_attributes = gt_attributes[:ix, :] overlaps = scipy.sparse.csr_matrix(overlaps) gt_attributes = scipy.sparse.csr_matrix(gt_attributes) rels = tree.findall('relation') num_rels = len(rels) gt_relations = set() # Avoid duplicates for rel in rels: pred = rel.find('predicate').text if pred: # One is empty pred = pred.lower().strip() if pred in self._relation_to_ind: try: triple = [] triple.append(obj_dict[rel.find('subject_id').text]) triple.append(self._relation_to_ind[pred]) triple.append(obj_dict[rel.find('object_id').text]) gt_relations.add(tuple(triple)) except: pass # Object not in dictionary gt_relations = np.array(list(gt_relations), dtype=np.int32) return {'boxes' : boxes, 'gt_classes': gt_classes, 'gt_attributes' : gt_attributes, 'gt_relations' : gt_relations, 'gt_overlaps' : overlaps, 'width' : width, 'height': height, 'flipped' : False, 'seg_areas' : seg_areas} def evaluate_detections(self, all_boxes, output_dir): self._write_voc_results_file(self.classes, all_boxes, output_dir) self._do_python_eval(output_dir) if self.config['cleanup']: for cls in self._classes: if cls == '__background__': continue filename = self._get_vg_results_file_template(output_dir).format(cls) os.remove(filename) def evaluate_attributes(self, all_boxes, output_dir): self._write_voc_results_file(self.attributes, all_boxes, output_dir) self._do_python_eval(output_dir, eval_attributes = True) if self.config['cleanup']: for cls in self._attributes: if cls == '__no_attribute__': continue filename = self._get_vg_results_file_template(output_dir).format(cls) os.remove(filename) def _get_vg_results_file_template(self, output_dir): filename = 'detections_' + self._image_set + '_{:s}.txt' path = os.path.join(output_dir, filename) return path def _write_voc_results_file(self, classes, all_boxes, output_dir): for cls_ind, cls in enumerate(classes): if cls == '__background__': continue print('Writing "{}" vg results file'.format(cls)) filename = self._get_vg_results_file_template(output_dir).format(cls) with open(filename, 'wt') as f: for im_ind, index in enumerate(self.image_index): dets = all_boxes[cls_ind][im_ind] if dets == []: continue # the VOCdevkit expects 1-based indices for k in xrange(dets.shape[0]): f.write('{:s} {:.3f} {:.1f} {:.1f} {:.1f} {:.1f}\n'. format(str(index), dets[k, -1], dets[k, 0] + 1, dets[k, 1] + 1, dets[k, 2] + 1, dets[k, 3] + 1)) def _do_python_eval(self, output_dir, pickle=True, eval_attributes = False): # We re-use parts of the pascal voc python code for visual genome aps = [] nposs = [] thresh = [] # The PASCAL VOC metric changed in 2010 use_07_metric = False print('VOC07 metric? ' + ('Yes' if use_07_metric else 'No')) if not os.path.isdir(output_dir): os.mkdir(output_dir) # Load ground truth gt_roidb = self.gt_roidb() if eval_attributes: classes = self._attributes else: classes = self._classes for i, cls in enumerate(classes): if cls == '__background__' or cls == '__no_attribute__': continue filename = self._get_vg_results_file_template(output_dir).format(cls) rec, prec, ap, scores, npos = vg_eval( filename, gt_roidb, self.image_index, i, ovthresh=0.5, use_07_metric=use_07_metric, eval_attributes=eval_attributes) # Determine per class detection thresholds that maximise f score if npos > 1: f = np.nan_to_num((prec*rec)/(prec+rec)) thresh += [scores[np.argmax(f)]] else: thresh += [0] aps += [ap] nposs += [float(npos)] print('AP for {} = {:.4f} (npos={:,})'.format(cls, ap, npos)) if pickle: with open(os.path.join(output_dir, cls + '_pr.pkl'), 'wb') as f: pickle.dump({'rec': rec, 'prec': prec, 'ap': ap, 'scores': scores, 'npos':npos}, f) # Set thresh to mean for classes with poor results thresh = np.array(thresh) avg_thresh = np.mean(thresh[thresh!=0]) thresh[thresh==0] = avg_thresh if eval_attributes: filename = 'attribute_thresholds_' + self._image_set + '.txt' else: filename = 'object_thresholds_' + self._image_set + '.txt' path = os.path.join(output_dir, filename) with open(path, 'wt') as f: for i, cls in enumerate(classes[1:]): f.write('{:s} {:.3f}\n'.format(cls, thresh[i])) weights = np.array(nposs) weights /= weights.sum() print('Mean AP = {:.4f}'.format(np.mean(aps))) print('Weighted Mean AP = {:.4f}'.format(np.average(aps, weights=weights))) print('Mean Detection Threshold = {:.3f}'.format(avg_thresh)) print('~~~~~~~~') print('Results:') for ap,npos in zip(aps,nposs): print('{:.3f}\t{:.3f}'.format(ap,npos)) print('{:.3f}'.format(np.mean(aps))) print('~~~~~~~~') print('') print('--------------------------------------------------------------') print('Results computed with the **unofficial** PASCAL VOC Python eval code.') print('--------------------------------------------------------------') if __name__ == '__main__': d = vg('val') res = d.roidb from IPython import embed; embed()
{ "pile_set_name": "Github" }
--- layout: documentation title: TZ06 - ZWave --- {% include base.html %} # TZ06 In Wall Dual Relay(1 Way) Switch Module 2x 1.5kW This describes the Z-Wave device *TZ06*, manufactured by *TKB Home* with the thing type UID of ```tkb_tz06_00_000```. The device is in the category of *Wall Switch*, defining Any device attached to the wall that controls a binary status of something, for ex. a light switch. ![TZ06 product image](https://opensmarthouse.org/zwavedatabase/156/image/) The TZ06 supports routing. This allows the device to communicate using other routing enabled devices as intermediate routers. This device is also able to participate in the routing of data between other devices in the mesh network. ## Overview This in-wall switch module is a transceiver which is a Z-Wave Plus™ enabled device and is fully compatible with any Z-Wave enabled network. Mini size design let the module can easily hide itself into the wall box and that will be good for the house decoration. There are many kind of application by using the module to switch Load On and Off, one main application is the light control. ### Inclusion Information In the front casing, there is an on/off button with LED indicator below which is to toggle switch on and off or carries out inclusion, exclusion, reset or association.  When first power is applied, its LED flashes on and off alternately and repeatedly at 0.5 second intervals.  It implies that it has not been assigned a node ID and start auto inclusion. Pressing the Include button of TZ06 three times within 2 seconds will enter inclusion mode ### Exclusion Information Pressing the Include button of TZ06 three times within 2 seconds will enter exclusion mode ## Channels The following table summarises the channels available for the TZ06 -: | Channel Name | Channel ID | Channel Type | Category | Item Type | |--------------|------------|--------------|----------|-----------| | Switch | switch_binary | switch_binary | Switch | Switch | | Alarm | alarm_general | alarm_general | Alarm | Switch | | Switch 1 | switch_binary1 | switch_binary | Switch | Switch | | Switch 2 | switch_binary2 | switch_binary | Switch | Switch | | Switch 3 | switch_binary3 | switch_binary | Switch | Switch | ### Switch Switch the power on and off. The ```switch_binary``` channel is of type ```switch_binary``` and supports the ```Switch``` item and is in the ```Switch``` category. ### Alarm Indicates if an alarm is triggered. The ```alarm_general``` channel is of type ```alarm_general``` and supports the ```Switch``` item and is in the ```Alarm``` category. This is a read only channel so will only be updated following state changes from the device. The following state translation is provided for this channel to the ```Switch``` item type -: | Value | Label | |-------|-----------| | OFF | OK | | ON | Alarm | ### Switch 1 Switch the power on and off. The ```switch_binary1``` channel is of type ```switch_binary``` and supports the ```Switch``` item and is in the ```Switch``` category. ### Switch 2 Switch the power on and off. The ```switch_binary2``` channel is of type ```switch_binary``` and supports the ```Switch``` item and is in the ```Switch``` category. ### Switch 3 Switch the power on and off. The ```switch_binary3``` channel is of type ```switch_binary``` and supports the ```Switch``` item and is in the ```Switch``` category. ## Device Configuration The following table provides a summary of the 6 configuration parameters available in the TZ06. Detailed information on each parameter can be found in the sections below. | Param | Name | Description | |-------|-------|-------------| | 1 | Selected Relay | Selected Relay (Endpoint) | | 2 | External Switch Mode | Change the External Switch (S1 & S2) Mode | | 3 | Restore Switch State | Restore Switch State after AC power loss | | 4 | Auto OFF Timer | Auto OFF Timer Countdown | | 5 | RF Command Mode | RF Command Mode | | 6 | Existence of Endpoint 3 | Existence of Endpoint 3 | | | Switch All Mode | Set the mode for the switch when receiving SWITCH ALL commands | ### Parameter 1: Selected Relay Selected Relay (Endpoint) If Controller not using Multi_Channel command class to access the endpoint of PAN06, you may configure the endpoint value to react the Basic Command Class Relay 1: Only Relay 1 can be controlled and configured Relay 2: Only Relay 2 can be controlled and configured Relay 1 & 2: Both Relays can be controlled and configured The following option values may be configured -: | Value | Description | |--------|-------------| | 1 | Relay 1 | | 2 | Relay 2 | | 3 | Relay 1&2 | The manufacturer defined default value is ```3``` (Relay 1&2). This parameter has the configuration ID ```config_1_1``` and is of type ```INTEGER```. ### Parameter 2: External Switch Mode Change the External Switch (S1 & S2) Mode Manual Switch S1 and S2 can be set to Edge mode or Pulse mode or Edge-Toggle mode, default value is Edge mode. The following option values may be configured -: | Value | Description | |--------|-------------| | 1 | Edge mode | | 2 | Pulse mode | | 3 | Edge-Toggle mode | The manufacturer defined default value is ```1``` (Edge mode). This parameter has the configuration ID ```config_2_1``` and is of type ```INTEGER```. ### Parameter 3: Restore Switch State Restore Switch State after AC power loss Whenever the AC Power return from lost, TZ06 will restore the Switch state which could be SWITCH OFF, LAST SWITCH STATE, SWITCH ON.  The default setting is LAST SWITCH STATE. The following option values may be configured -: | Value | Description | |--------|-------------| | 0 | Switch OFF | | 1 | Last Switch State | | 2 | Switch ON | The manufacturer defined default value is ```1``` (Last Switch State). This parameter has the configuration ID ```config_3_1``` and is of type ```INTEGER```. ### Parameter 4: Auto OFF Timer Auto OFF Timer Countdown Whenever TZ06 switches to ON, the auto OFF timer will begin to count down.  After the time decreases to zero, it will switch to OFF automatically.  However, if auto OFF timer is set to 0, the auto OFF function will be disabled.  The default setting is 0. Values in the range 0 to 32767 may be set. The manufacturer defined default value is ```0```. This parameter has the configuration ID ```config_4_2``` and is of type ```INTEGER```. ### Parameter 5: RF Command Mode RF Command Mode Whenever a Switch OFF command, BASIC\_SET, BINARY\_SWITCH\_SET, SWITCH\_ALL_OFF, is received, it could be interpreted as 4 variety of commands. 5-0: Switch OFF: It switches to OFF State. The default setting is Switch OFF 5-1: Ignore: The Switch OFF command will be ignored 5-2: Switch Toggle: It switches to the inverse of current State 5-3: Switch ON: It switches to ON state The following option values may be configured -: | Value | Description | |--------|-------------| | 0 | Switch OFF | | 1 | Switch Ingore | | 2 | Switch Toggle | | 3 | Switch ON | The manufacturer defined default value is ```0``` (Switch OFF). This parameter has the configuration ID ```config_5_1``` and is of type ```INTEGER```. ### Parameter 6: Existence of Endpoint 3 Existence of Endpoint 3 The Endpoint 3 of Multi-Channel Command Class is related to Relay 1 and Relay 2. It may be redundant for the need to control Relay 1 or Relay 2 individually.  When the existence of Endpoint 3 is set as 2, the endpoint 3 of Multi-Channel Command Class will be disabled.  The default value is 1. The following option values may be configured -: | Value | Description | |--------|-------------| | 1 | Endpoint 3 exists | | 2 | No Endpoint 3 | The manufacturer defined default value is ```1``` (Endpoint 3 exists). This parameter has the configuration ID ```config_6_1``` and is of type ```INTEGER```. ### Switch All Mode Set the mode for the switch when receiving SWITCH ALL commands. The following option values may be configured -: | Value | Description | |--------|-------------| | 0 | Exclude from All On and All Off groups | | 1 | Include in All On group | | 2 | Include in All Off group | | 255 | Include in All On and All Off groups | This parameter has the configuration ID ```switchall_mode``` and is of type ```INTEGER```. ## Association Groups Association groups allow the device to send unsolicited reports to the controller, or other devices in the network. Using association groups can allow you to eliminate polling, providing instant feedback of a device state change without unnecessary network traffic. The TZ06 supports 3 association groups. ### Group 1: Relay 1 + 2 Association group 1 supports 1 node. ### Group 2: Relay 1 Association group 2 supports 1 node. ### Group 3: Relay 2 Association group 3 supports 1 node. ## Technical Information ### Endpoints #### Endpoint 0 | Command Class | Comment | |---------------|---------| | COMMAND_CLASS_NO_OPERATION_V1| | | COMMAND_CLASS_BASIC_V1| | | COMMAND_CLASS_SWITCH_BINARY_V1| Linked to BASIC| | COMMAND_CLASS_SWITCH_ALL_V1| | | COMMAND_CLASS_METER_PULSE_V1| | | COMMAND_CLASS_MULTI_CHANNEL_V2| | | COMMAND_CLASS_CONFIGURATION_V1| | | COMMAND_CLASS_ALARM_V1| | | COMMAND_CLASS_MANUFACTURER_SPECIFIC_V1| | | COMMAND_CLASS_ASSOCIATION_V1| | | COMMAND_CLASS_VERSION_V1| | | COMMAND_CLASS_LANGUAGE_V1| | #### Endpoint 1 | Command Class | Comment | |---------------|---------| | COMMAND_CLASS_BASIC_V1| | | COMMAND_CLASS_SWITCH_BINARY_V1| Linked to BASIC| #### Endpoint 2 | Command Class | Comment | |---------------|---------| | COMMAND_CLASS_BASIC_V1| | | COMMAND_CLASS_SWITCH_BINARY_V1| Linked to BASIC| #### Endpoint 3 | Command Class | Comment | |---------------|---------| | COMMAND_CLASS_BASIC_V1| | | COMMAND_CLASS_SWITCH_BINARY_V1| Linked to BASIC| ### Documentation Links * [User Manual](https://opensmarthouse.org/zwavedatabase/156/TZ71-TZ76.pdf) * [User Manual](https://opensmarthouse.org/zwavedatabase/156/TZ06-manual.pdf) * [TZ07 User Manual 21070111](https://opensmarthouse.org/zwavedatabase/156/TZ07-User-Manual-21070111.pdf) --- Did you spot an error in the above definition or want to improve the content? You can [contribute to the database here](https://opensmarthouse.org/zwavedatabase/156).
{ "pile_set_name": "Github" }
{}
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html> <head> <title>ZIP 309: Blind Off-chain Lightweight Transactions (BOLT)</title> <meta charset="utf-8" /> <meta name="viewport" content="width=device-width, initial-scale=1"><link rel="stylesheet" href="css/style.css"></head> <body> <section> <pre>ZIP: 309 Title: Blind Off-chain Lightweight Transactions (BOLT) Owners: J. Ayo Akinyele &lt;[email protected]&gt; Colleen Swanson &lt;[email protected]&gt; Credits: Ian Miers Matthew Green Status: Reserved Category: Consensus Created: 2019-07-15 Discussions-To: &lt;<a href="https://github.com/zcash/zips/issues/2353">https://github.com/zcash/zips/issues/2353</a>&gt; Pull-Request: &lt;<a href="https://github.com/zcash/zips/pull/216">https://github.com/zcash/zips/pull/216</a>&gt;</pre> </section> </body> </html>
{ "pile_set_name": "Github" }
class Subclass is Range {} // expect runtime error: Class 'Subclass' cannot inherit from built-in class 'Range'.
{ "pile_set_name": "Github" }
// 20.2.2.30 Math.sinh(x) var $export = require('./$.export') , expm1 = require('./$.math-expm1') , exp = Math.exp; // V8 near Chromium 38 has a problem with very small numbers $export($export.S + $export.F * require('./$.fails')(function(){ return !Math.sinh(-2e-17) != -2e-17; }), 'Math', { sinh: function sinh(x){ return Math.abs(x = +x) < 1 ? (expm1(x) - expm1(-x)) / 2 : (exp(x - 1) - exp(-x - 1)) * (Math.E / 2); } });
{ "pile_set_name": "Github" }
{ "images" : [ { "idiom" : "iphone", "scale" : "1x" }, { "idiom" : "iphone", "filename" : "PlayerBotHit_8_02_100.png", "scale" : "2x" }, { "idiom" : "iphone", "filename" : "PlayerBotHit_8_02_180.png", "scale" : "3x" }, { "idiom" : "ipad", "filename" : "PlayerBotHit_8_02_120.png", "scale" : "1x" }, { "idiom" : "ipad", "filename" : "PlayerBotHit_8_02_240.png", "scale" : "2x" }, { "idiom" : "tv", "filename" : "PlayerBotHit_8_02_169.png", "scale" : "1x" }, { "idiom" : "mac", "filename" : "PlayerBotHit_8_02_225.png", "scale" : "1x" }, { "idiom" : "mac", "scale" : "2x" } ], "info" : { "version" : 1, "author" : "xcode" } }
{ "pile_set_name": "Github" }
/*global SelectBox, gettext, interpolate, quickElement, SelectFilter*/ /* SelectFilter2 - Turns a multiple-select box into a filter interface. Requires jQuery, core.js, and SelectBox.js. */ (function($) { 'use strict'; function findForm(node) { // returns the node of the form containing the given node if (node.tagName.toLowerCase() !== 'form') { return findForm(node.parentNode); } return node; } window.SelectFilter = { init: function(field_id, field_name, is_stacked) { if (field_id.match(/__prefix__/)) { // Don't initialize on empty forms. return; } var from_box = document.getElementById(field_id); from_box.id += '_from'; // change its ID from_box.className = 'filtered'; var ps = from_box.parentNode.getElementsByTagName('p'); for (var i = 0; i < ps.length; i++) { if (ps[i].className.indexOf("info") !== -1) { // Remove <p class="info">, because it just gets in the way. from_box.parentNode.removeChild(ps[i]); } else if (ps[i].className.indexOf("help") !== -1) { // Move help text up to the top so it isn't below the select // boxes or wrapped off on the side to the right of the add // button: from_box.parentNode.insertBefore(ps[i], from_box.parentNode.firstChild); } } // <div class="selector"> or <div class="selector stacked"> var selector_div = quickElement('div', from_box.parentNode); selector_div.className = is_stacked ? 'selector stacked' : 'selector'; // <div class="selector-available"> var selector_available = quickElement('div', selector_div); selector_available.className = 'selector-available'; var title_available = quickElement('h2', selector_available, interpolate(gettext('Available %s') + ' ', [field_name])); quickElement( 'span', title_available, '', 'class', 'help help-tooltip help-icon', 'title', interpolate( gettext( 'This is the list of available %s. You may choose some by ' + 'selecting them in the box below and then clicking the ' + '"Choose" arrow between the two boxes.' ), [field_name] ) ); var filter_p = quickElement('p', selector_available, '', 'id', field_id + '_filter'); filter_p.className = 'selector-filter'; var search_filter_label = quickElement('label', filter_p, '', 'for', field_id + '_input'); quickElement( 'span', search_filter_label, '', 'class', 'help-tooltip search-label-icon', 'title', interpolate(gettext("Type into this box to filter down the list of available %s."), [field_name]) ); filter_p.appendChild(document.createTextNode(' ')); var filter_input = quickElement('input', filter_p, '', 'type', 'text', 'placeholder', gettext("Filter")); filter_input.id = field_id + '_input'; selector_available.appendChild(from_box); var choose_all = quickElement('a', selector_available, gettext('Choose all'), 'title', interpolate(gettext('Click to choose all %s at once.'), [field_name]), 'href', '#', 'id', field_id + '_add_all_link'); choose_all.className = 'selector-chooseall'; // <ul class="selector-chooser"> var selector_chooser = quickElement('ul', selector_div); selector_chooser.className = 'selector-chooser'; var add_link = quickElement('a', quickElement('li', selector_chooser), gettext('Choose'), 'title', gettext('Choose'), 'href', '#', 'id', field_id + '_add_link'); add_link.className = 'selector-add'; var remove_link = quickElement('a', quickElement('li', selector_chooser), gettext('Remove'), 'title', gettext('Remove'), 'href', '#', 'id', field_id + '_remove_link'); remove_link.className = 'selector-remove'; // <div class="selector-chosen"> var selector_chosen = quickElement('div', selector_div); selector_chosen.className = 'selector-chosen'; var title_chosen = quickElement('h2', selector_chosen, interpolate(gettext('Chosen %s') + ' ', [field_name])); quickElement( 'span', title_chosen, '', 'class', 'help help-tooltip help-icon', 'title', interpolate( gettext( 'This is the list of chosen %s. You may remove some by ' + 'selecting them in the box below and then clicking the ' + '"Remove" arrow between the two boxes.' ), [field_name] ) ); var to_box = quickElement('select', selector_chosen, '', 'id', field_id + '_to', 'multiple', 'multiple', 'size', from_box.size, 'name', from_box.getAttribute('name')); to_box.className = 'filtered'; var clear_all = quickElement('a', selector_chosen, gettext('Remove all'), 'title', interpolate(gettext('Click to remove all chosen %s at once.'), [field_name]), 'href', '#', 'id', field_id + '_remove_all_link'); clear_all.className = 'selector-clearall'; from_box.setAttribute('name', from_box.getAttribute('name') + '_old'); // Set up the JavaScript event handlers for the select box filter interface var move_selection = function(e, elem, move_func, from, to) { if (elem.className.indexOf('active') !== -1) { move_func(from, to); SelectFilter.refresh_icons(field_id); } e.preventDefault(); }; choose_all.addEventListener('click', function(e) { move_selection(e, this, SelectBox.move_all, field_id + '_from', field_id + '_to'); }); add_link.addEventListener('click', function(e) { move_selection(e, this, SelectBox.move, field_id + '_from', field_id + '_to'); }); remove_link.addEventListener('click', function(e) { move_selection(e, this, SelectBox.move, field_id + '_to', field_id + '_from'); }); clear_all.addEventListener('click', function(e) { move_selection(e, this, SelectBox.move_all, field_id + '_to', field_id + '_from'); }); filter_input.addEventListener('keypress', function(e) { SelectFilter.filter_key_press(e, field_id); }); filter_input.addEventListener('keyup', function(e) { SelectFilter.filter_key_up(e, field_id); }); filter_input.addEventListener('keydown', function(e) { SelectFilter.filter_key_down(e, field_id); }); selector_div.addEventListener('change', function(e) { if (e.target.tagName === 'SELECT') { SelectFilter.refresh_icons(field_id); } }); selector_div.addEventListener('dblclick', function(e) { if (e.target.tagName === 'OPTION') { if (e.target.closest('select').id === field_id + '_to') { SelectBox.move(field_id + '_to', field_id + '_from'); } else { SelectBox.move(field_id + '_from', field_id + '_to'); } SelectFilter.refresh_icons(field_id); } }); findForm(from_box).addEventListener('submit', function() { SelectBox.select_all(field_id + '_to'); }); SelectBox.init(field_id + '_from'); SelectBox.init(field_id + '_to'); // Move selected from_box options to to_box SelectBox.move(field_id + '_from', field_id + '_to'); if (!is_stacked) { // In horizontal mode, give the same height to the two boxes. var j_from_box = $(from_box); var j_to_box = $(to_box); var resize_filters = function() { j_to_box.height($(filter_p).outerHeight() + j_from_box.outerHeight()); }; if (j_from_box.outerHeight() > 0) { resize_filters(); // This fieldset is already open. Resize now. } else { // This fieldset is probably collapsed. Wait for its 'show' event. j_to_box.closest('fieldset').one('show.fieldset', resize_filters); } } // Initial icon refresh SelectFilter.refresh_icons(field_id); }, any_selected: function(field) { var any_selected = false; try { // Temporarily add the required attribute and check validity. // This is much faster in WebKit browsers than the fallback. field.attr('required', 'required'); any_selected = field.is(':valid'); field.removeAttr('required'); } catch (e) { // Browsers that don't support :valid (IE < 10) any_selected = field.find('option:selected').length > 0; } return any_selected; }, refresh_icons: function(field_id) { var from = $('#' + field_id + '_from'); var to = $('#' + field_id + '_to'); // Active if at least one item is selected $('#' + field_id + '_add_link').toggleClass('active', SelectFilter.any_selected(from)); $('#' + field_id + '_remove_link').toggleClass('active', SelectFilter.any_selected(to)); // Active if the corresponding box isn't empty $('#' + field_id + '_add_all_link').toggleClass('active', from.find('option').length > 0); $('#' + field_id + '_remove_all_link').toggleClass('active', to.find('option').length > 0); }, filter_key_press: function(event, field_id) { var from = document.getElementById(field_id + '_from'); // don't submit form if user pressed Enter if ((event.which && event.which === 13) || (event.keyCode && event.keyCode === 13)) { from.selectedIndex = 0; SelectBox.move(field_id + '_from', field_id + '_to'); from.selectedIndex = 0; event.preventDefault(); return false; } }, filter_key_up: function(event, field_id) { var from = document.getElementById(field_id + '_from'); var temp = from.selectedIndex; SelectBox.filter(field_id + '_from', document.getElementById(field_id + '_input').value); from.selectedIndex = temp; return true; }, filter_key_down: function(event, field_id) { var from = document.getElementById(field_id + '_from'); // right arrow -- move across if ((event.which && event.which === 39) || (event.keyCode && event.keyCode === 39)) { var old_index = from.selectedIndex; SelectBox.move(field_id + '_from', field_id + '_to'); from.selectedIndex = (old_index === from.length) ? from.length - 1 : old_index; return false; } // down arrow -- wrap around if ((event.which && event.which === 40) || (event.keyCode && event.keyCode === 40)) { from.selectedIndex = (from.length === from.selectedIndex + 1) ? 0 : from.selectedIndex + 1; } // up arrow -- wrap around if ((event.which && event.which === 38) || (event.keyCode && event.keyCode === 38)) { from.selectedIndex = (from.selectedIndex === 0) ? from.length - 1 : from.selectedIndex - 1; } return true; } }; window.addEventListener('load', function(e) { $('select.selectfilter, select.selectfilterstacked').each(function() { var $el = $(this), data = $el.data(); SelectFilter.init($el.attr('id'), data.fieldName, parseInt(data.isStacked, 10)); }); }); })(django.jQuery);
{ "pile_set_name": "Github" }
class NewClass1 { } class NewClass3 { } class NewClass4 { } class NewClass5 { }
{ "pile_set_name": "Github" }
/******************************************************************************* * Copyright (c) 2018 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 *******************************************************************************/ package org.eclipse.hono.adapter.mqtt; import org.eclipse.hono.service.metric.MicrometerBasedMetrics; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import io.micrometer.core.instrument.MeterRegistry; import io.vertx.core.Vertx; /** * Metrics for the MQTT based adapters. */ @Component public class MicrometerBasedMqttAdapterMetrics extends MicrometerBasedMetrics implements MqttAdapterMetrics { /** * Create a new metrics instance for MQTT adapters. * * @param registry The meter registry to use. * @param vertx The Vert.x instance to use. * * @throws NullPointerException if either parameter is {@code null}. */ @Autowired public MicrometerBasedMqttAdapterMetrics(final MeterRegistry registry, final Vertx vertx) { super(registry, vertx); } }
{ "pile_set_name": "Github" }
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("EmpiricalFontSize.UWP")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("EmpiricalFontSize.UWP")] [assembly: AssemblyCopyright("Copyright © 2015")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")] [assembly: ComVisible(false)]
{ "pile_set_name": "Github" }
// Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef NET_NETWORK_ERROR_LOGGING_NETWORK_ERROR_LOGGING_TEST_UTIL_H_ #define NET_NETWORK_ERROR_LOGGING_NETWORK_ERROR_LOGGING_TEST_UTIL_H_ #include <string> #include <vector> #include "base/callback.h" #include "base/macros.h" #include "net/base/address_list.h" #include "net/base/ip_address.h" #include "net/network_error_logging/network_error_logging_service.h" #include "url/gurl.h" #include "url/origin.h" namespace net { class IPAddress; // A NetworkErrorLoggingService implementation that stashes all NEL headers and // reports so that they can be easily verified in unit tests. class TestNetworkErrorLoggingService : public NetworkErrorLoggingService { public: struct Header { Header() = default; ~Header() = default; // Returns whether the |received_ip_address| field matches any of the // addresses in |address_list|. bool MatchesAddressList(const AddressList& address_list) const; url::Origin origin; IPAddress received_ip_address; std::string value; }; TestNetworkErrorLoggingService(); ~TestNetworkErrorLoggingService() override; const std::vector<Header>& headers() { return headers_; } const std::vector<RequestDetails>& errors() { return errors_; } // NetworkErrorLoggingService implementation void OnHeader(const url::Origin& origin, const IPAddress& received_ip_address, const std::string& value) override; void OnRequest(RequestDetails details) override; void QueueSignedExchangeReport(SignedExchangeReportDetails details) override; void RemoveBrowsingData( const base::RepeatingCallback<bool(const GURL&)>& origin_filter) override; void RemoveAllBrowsingData() override; private: std::vector<Header> headers_; std::vector<RequestDetails> errors_; DISALLOW_COPY_AND_ASSIGN(TestNetworkErrorLoggingService); }; } // namespace net #endif // NET_NETWORK_ERROR_LOGGING_NETWORK_ERROR_LOGGING_TEST_UTIL_H_
{ "pile_set_name": "Github" }
#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # All rights reserved. # # This source code is licensed under the BSD-style license found in the # LICENSE file in the root directory of this source tree. #
{ "pile_set_name": "Github" }
#!/usr/bin/env python # # This file is used to test host- and service-downtimes. # import sys from sys import __stdout__ from functools import partial import time import datetime import os import string import re import random import copy import locale import unittest2 as unittest # import the shinken library from the parent directory import __import_shinken ; del __import_shinken import shinken from shinken.objects.config import Config from shinken.objects.command import Command from shinken.objects.module import Module from shinken.objects.schedulingitem import SchedulingItem from shinken.dispatcher import Dispatcher from shinken.log import logger from shinken.modulesctx import modulesctx from shinken.scheduler import Scheduler from shinken.macroresolver import MacroResolver from shinken.external_command import ExternalCommandManager, ExternalCommand from shinken.check import Check from shinken.message import Message from shinken.objects.arbiterlink import ArbiterLink from shinken.objects.schedulerlink import SchedulerLink from shinken.objects.pollerlink import PollerLink from shinken.objects.reactionnerlink import ReactionnerLink from shinken.objects.brokerlink import BrokerLink from shinken.objects.satellitelink import SatelliteLink from shinken.notification import Notification from shinken.modulesmanager import ModulesManager from shinken.basemodule import BaseModule from shinken.brok import Brok from shinken.misc.common import DICT_MODATTR from shinken.daemons.schedulerdaemon import Shinken from shinken.daemons.brokerdaemon import Broker from shinken.daemons.arbiterdaemon import Arbiter from shinken.daemons.receiverdaemon import Receiver from logging import ERROR # Modules are by default on the ../modules myself = os.path.abspath(__file__) global modules_dir modules_dir = os.environ.get('SHINKEN_MODULES_DIR', "modules") class __DUMMY: def add(self, obj): pass logger.load_obj(__DUMMY()) logger.setLevel(ERROR) ############################################################################# def guess_sys_stdout_encoding(): ''' Return the best guessed encoding to be used for printing on sys.stdout. ''' return ( getattr(sys.stdout, 'encoding', None) or getattr(__stdout__, 'encoding', None) or locale.getpreferredencoding() or sys.getdefaultencoding() or 'ascii' ) def safe_print(*args, **kw): """" "print" args to sys.stdout, If some of the args aren't unicode then convert them first to unicode, using keyword argument 'in_encoding' if provided (else default to UTF8) and replacing bad encoded bytes. Write to stdout using 'out_encoding' if provided else best guessed encoding, doing xmlcharrefreplace on errors. """ in_bytes_encoding = kw.pop('in_encoding', 'UTF-8') out_encoding = kw.pop('out_encoding', guess_sys_stdout_encoding()) if kw: raise ValueError('unhandled named/keyword argument(s): %r' % kw) # make_in_data_gen = lambda: ( a if isinstance(a, unicode) else unicode(str(a), in_bytes_encoding, 'replace') for a in args ) possible_codings = ( out_encoding, ) if out_encoding != 'ascii': possible_codings += ( 'ascii', ) for coding in possible_codings: data = u' '.join(make_in_data_gen()).encode(coding, 'xmlcharrefreplace') try: sys.stdout.write(data) break except UnicodeError as err: # there might still have some problem with the underlying sys.stdout. # it might be a StringIO whose content could be decoded/encoded in this same process # and have encode/decode errors because we could have guessed a bad encoding with it. # in such case fallback on 'ascii' if coding == 'ascii': raise sys.stderr.write('Error on write to sys.stdout with %s encoding: err=%s\nTrying with ascii' % ( coding, err)) sys.stdout.write(b'\n') ############################################################################# # We overwrite the functions time() and sleep() # This way we can modify sleep() so that it immediately returns although # for a following time() it looks like thee was actually a delay. # This massively speeds up the tests. class TimeHacker(object): def __init__(self): self.my_offset = 0 self.my_starttime = time.time() self.my_oldtime = time.time self.original_time_time = time.time self.original_time_sleep = time.sleep self.in_real_time = True def my_time_time(self): return self.my_oldtime() + self.my_offset def my_time_sleep(self, delay): self.my_offset += delay def time_warp(self, duration): self.my_offset += duration def set_my_time(self): if self.in_real_time: time.time = self.my_time_time time.sleep = self.my_time_sleep self.in_real_time = False # If external processes or time stamps for files are involved, we must # revert the fake timing routines, because these externals cannot be fooled. # They get their times from the operating system. def set_real_time(self): if not self.in_real_time: time.time = self.original_time_time time.sleep = self.original_time_sleep self.in_real_time = True #Time hacking for every test! time_hacker = TimeHacker() time_hacker.set_my_time() class Pluginconf(object): pass class ShinkenTest(unittest.TestCase): def setUp(self): self.setup_with_file('etc/shinken_1r_1h_1s.cfg') def setup_with_file(self, path): time_hacker.set_my_time() self.print_header() # i am arbiter-like self.broks = [] self.me = None self.log = logger self.log.load_obj(self) self.config_files = [path] self.conf = Config() buf = self.conf.read_config(self.config_files) raw_objects = self.conf.read_config_buf(buf) self.conf.create_objects_for_type(raw_objects, 'arbiter') self.conf.create_objects_for_type(raw_objects, 'module') self.conf.early_arbiter_linking() # If we got one arbiter defined here (before default) we should be in a case where # the tester want to load/test a module, so we simulate an arbiter daemon # and the modules loading phase. As it has its own modulesmanager, should # not impact scheduler modules ones, especially we are asking for arbiter type :) if len(self.conf.arbiters) == 1: arbdaemon = Arbiter([''],[''], False, False, None, None) # only load if the module_dir is reallyexisting, so was set explicitly # in the test configuration if os.path.exists(getattr(self.conf, 'modules_dir', '')): arbdaemon.modules_dir = self.conf.modules_dir arbdaemon.load_modules_manager() # we request the instances without them being *started* # (for those that are concerned ("external" modules): # we will *start* these instances after we have been daemonized (if requested) for arb in self.conf.arbiters: arbdaemon.modules_manager.set_modules(arb.modules) arbdaemon.do_load_modules() arbdaemon.load_modules_configuration_objects(raw_objects) self.conf.create_objects(raw_objects) self.conf.instance_id = 0 self.conf.instance_name = 'test' # Hack push_flavor, that is set by the dispatcher self.conf.push_flavor = 0 self.conf.load_triggers() #import pdb;pdb.set_trace() self.conf.linkify_templates() #import pdb;pdb.set_trace() self.conf.apply_inheritance() #import pdb;pdb.set_trace() self.conf.explode() #print "Aconf.services has %d elements" % len(self.conf.services) self.conf.apply_implicit_inheritance() self.conf.fill_default() self.conf.remove_templates() self.conf.override_properties() self.conf.linkify() self.conf.apply_dependencies() self.conf.set_initial_state() self.conf.explode_global_conf() self.conf.propagate_timezone_option() self.conf.create_business_rules() self.conf.create_business_rules_dependencies() self.conf.is_correct() if not self.conf.conf_is_correct: print "The conf is not correct, I stop here" self.conf.dump() return self.conf.clean() self.confs = self.conf.cut_into_parts() self.conf.prepare_for_sending() self.conf.show_errors() self.dispatcher = Dispatcher(self.conf, self.me) scheddaemon = Shinken(None, False, False, False, None, None) self.scheddaemon = scheddaemon self.sched = scheddaemon.sched scheddaemon.modules_dir = modules_dir scheddaemon.load_modules_manager() # Remember to clean the logs we just created before launching tests self.clear_logs() m = MacroResolver() m.init(self.conf) self.sched.load_conf(self.conf, in_test=True) e = ExternalCommandManager(self.conf, 'applyer') self.sched.external_command = e e.load_scheduler(self.sched) e2 = ExternalCommandManager(self.conf, 'dispatcher') e2.load_arbiter(self) self.external_command_dispatcher = e2 self.sched.conf.accept_passive_unknown_check_results = False self.sched.schedule() def add(self, b): if isinstance(b, Brok): self.broks.append(b) return if isinstance(b, ExternalCommand): self.sched.run_external_command(b.cmd_line) def fake_check(self, ref, exit_status, output="OK", check_variant=SchedulingItem.default_check_variant, fake_timeout=False): #print "fake", ref now = time.time() ref.schedule(force=True) # now checks are schedule and we get them in # the action queue #check = ref.actions.pop() check = ref.get_checks_in_progress(check_variant)[0] self.sched.add(check) # check is now in sched.checks[] # Allows to force check scheduling without setting its status nor # output. Useful for manual business rules rescheduling, for instance. if exit_status is None: return # fake execution check.check_time = now # and lie about when we will launch it because # if not, the schedule call for ref # will not really reschedule it because there # is a valid value in the future ref.next_chk = now - 0.5 check.get_outputs(output, 9000) check.exit_status = exit_status check.execution_time = 0.001 if fake_timeout is True: check.status = "timeout" else: check.status = 'waitconsume' self.sched.waiting_results.append(check) def scheduler_loop(self, count, reflist, do_sleep=False, sleep_time=61, verbose=True): for ref in reflist: if isinstance(ref, dict): obj = ref["item"] else: obj = ref[0] obj.checks_in_progress = [] for loop in range(1, count + 1): if verbose is True: print "processing check", loop for ref in reflist: ext = {} if isinstance(ref, dict): obj = ref["item"] exit_status = ref["exit_status"] output = ref["output"] if "check_variant" in ref: ext["check_variant"] = ref["check_variant"] if "timeout" in ref: ext["fake_timeout"] = ref["timeout"] else: (obj, exit_status, output) = ref self.fake_check(obj, exit_status, output, **ext) self.sched.manage_internal_checks() self.sched.consume_results() self.sched.get_new_actions() self.sched.get_new_broks() self.sched.scatter_master_notifications() self.worker_loop(verbose) for ref in reflist: if isinstance(ref, dict): obj = ref["item"] else: obj = ref[0] obj.checks_in_progress = [] self.sched.update_downtimes_and_comments() #time.sleep(ref.retry_interval * 60 + 1) if do_sleep: time.sleep(sleep_time) def worker_loop(self, verbose=True): self.sched.delete_zombie_checks() self.sched.delete_zombie_actions() checks = self.sched.get_to_run_checks(True, False, worker_name='tester') actions = self.sched.get_to_run_checks(False, True, worker_name='tester') #print "------------ worker loop checks ----------------" #print checks #print "------------ worker loop actions ----------------" if verbose is True: self.show_actions() #print "------------ worker loop new ----------------" for a in actions: a.status = 'inpoller' a.check_time = time.time() a.exit_status = 0 self.sched.put_results(a) if verbose is True: self.show_actions() #print "------------ worker loop end ----------------" def show_logs(self): print "--- logs <<<----------------------------------" if hasattr(self, "sched"): broks = self.sched.broks else: broks = self.broks for brok in broks: if brok.type == 'log': brok.prepare() safe_print("LOG: ", brok.data['log']) print "--- logs >>>----------------------------------" def show_actions(self): print "--- actions <<<----------------------------------" if hasattr(self, "sched"): actions = self.sched.actions else: actions = self.actions for a in sorted(actions.values(), lambda x, y: x.id - y.id): if a.is_a == 'notification': if a.ref.my_type == "host": ref = "host: %s" % a.ref.get_name() else: ref = "host: %s svc: %s" % (a.ref.host.get_name(), a.ref.get_name()) print "NOTIFICATION %d %s %s %s %s" % (a.id, ref, a.type, time.asctime(time.localtime(a.t_to_go)), a.status) elif a.is_a == 'eventhandler': print "EVENTHANDLER:", a print "--- actions >>>----------------------------------" def show_and_clear_logs(self): self.show_logs() self.clear_logs() def show_and_clear_actions(self): self.show_actions() self.clear_actions() def count_logs(self): if hasattr(self, "sched"): broks = self.sched.broks else: broks = self.broks return len([b for b in broks if b.type == 'log']) def count_actions(self): if hasattr(self, "sched"): actions = self.sched.actions else: actions = self.actions return len(actions.values()) def clear_logs(self): if hasattr(self, "sched"): broks = self.sched.broks else: broks = self.broks to_del = [] for b in broks: if b.type == 'log': to_del.append(b) for b in to_del: broks.remove(b) def clear_actions(self): if hasattr(self, "sched"): self.sched.actions = {} else: self.actions = {} def assert_log_match(self, index, pattern, no_match=False): # log messages are counted 1...n, so index=1 for the first message if not no_match: self.assertGreaterEqual(self.count_logs(), index) regex = re.compile(pattern) lognum = 1 broks = sorted(self.sched.broks, key=lambda x: x.id) for brok in broks: if brok.type == 'log': brok.prepare() print "%s (%s): %s" % (lognum, brok.id, brok.data['log']) if index == lognum: print brok.data['log'] if re.search(regex, brok.data['log']): return lognum += 1 [b.prepare() for b in self.broks] self.assertTrue(no_match, "%s found a matched log line in broks :\n" "index=%s pattern=%r\n" "broks_logs=[[[\n%s\n]]]" % ( '*HAVE*' if no_match else 'Not', index, pattern, '\n'.join( '\t%s=%s' % (idx, b.strip()) for idx, b in enumerate( (b.data['log'] for b in self.broks if b.type == 'log'), 1) ) )) def _any_log_match(self, pattern, assert_not): regex = re.compile(pattern) broks = getattr(self, 'sched', self).broks broks = sorted(broks, lambda x, y: x.id - y.id) for brok in broks: if brok.type == 'log': brok.prepare() if re.search(regex, brok.data['log']): self.assertTrue(not assert_not, "Found matching log line:\n" "pattern = %r\nbrok log = %r" % (pattern, brok.data['log']) ) return self.assertTrue(assert_not, "No matching log line found:\n" "pattern = %r\n" "broks = %r" % (pattern, broks) ) def assert_any_log_match(self, pattern): self._any_log_match(pattern, assert_not=False) def assert_no_log_match(self, pattern): self._any_log_match(pattern, assert_not=True) def get_log_match(self, pattern): regex = re.compile(pattern) res = [] for brok in self.sched.broks: if brok.type == 'log': if re.search(regex, brok.data['log']): res.append(brok.data['log']) return res def print_header(self): print "\n" + "#" * 80 + "\n" + "#" + " " * 78 + "#" print "#" + string.center(self.id(), 78) + "#" print "#" + " " * 78 + "#\n" + "#" * 80 + "\n" def xtest_conf_is_correct(self): self.print_header() self.assertTrue(self.conf.conf_is_correct) if __name__ == '__main__': unittest.main()
{ "pile_set_name": "Github" }
/*************************************************************************** finddialoglite.cpp - K Desktop Planetarium ------------------- begin : Wed Jul 29 2016 copyright : (C) 2016 by Artem Fedoskin email : [email protected] ***************************************************************************/ /*************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ #include "detaildialoglite.h" #include "constellationboundarylines.h" #include "deepskyobject.h" #include "kspaths.h" #include "ksutils.h" #include "Options.h" #include "skymapcomposite.h" #include "skymaplite.h" #include "starobject.h" #include "kstarslite/skyobjectlite.h" #include "skyobjects/ksasteroid.h" #include "skyobjects/kscomet.h" #include "skyobjects/ksmoon.h" #include "skyobjects/ksplanetbase.h" #include "skyobjects/supernova.h" #include <QDesktopServices> #include <QTemporaryFile> DetailDialogLite::DetailDialogLite() { setProperty("isLinksOn", true); setProperty("isLogOn", true); } void DetailDialogLite::initialize() { connect(SkyMapLite::Instance(), SIGNAL(objectLiteChanged()), this, SLOT(createGeneralTab())); connect(SkyMapLite::Instance(), SIGNAL(objectLiteChanged()), this, SLOT(createPositionTab())); connect(SkyMapLite::Instance(), SIGNAL(objectLiteChanged()), this, SLOT(createLogTab())); connect(SkyMapLite::Instance(), SIGNAL(objectLiteChanged()), this, SLOT(createLinksTab())); } void DetailDialogLite::createGeneralTab() { SkyObject *selectedObject = SkyMapLite::Instance()->getClickedObjectLite()->getObject(); // Stuff that should be visible only for specific types of objects setProperty("illumination", ""); // Only shown for the moon setProperty("BVindex", ""); // Only shown for stars setupThumbnail(); //Fill in the data fields //Contents depend on type of object QString objecttyp, str; switch (selectedObject->type()) { case SkyObject::STAR: { StarObject *s = (StarObject *)selectedObject; if (s->getHDIndex()) { setProperty("name", (QString("%1, HD %2").arg(s->longname()).arg(s->getHDIndex()))); } else { setProperty("name", s->longname()); } objecttyp = s->sptype() + ' ' + i18n("star"); setProperty("magnitude", i18nc("number in magnitudes", "%1 mag", QLocale().toString(s->mag(), 'f', 2))); //show to hundredth place if (s->getBVIndex() < 30.) { setProperty("BVindex", QString::number(s->getBVIndex(), 'f', 2)); } //distance if (s->distance() > 2000. || s->distance() < 0.) // parallax < 0.5 mas { setProperty("distance", (QString(i18nc("larger than 2000 parsecs", "> 2000 pc")))); } else if (s->distance() > 50.) //show to nearest integer { setProperty("distance", (i18nc("number in parsecs", "%1 pc", QLocale().toString(s->distance(), 'f', 0)))); } else if (s->distance() > 10.0) //show to tenths place { setProperty("distance", (i18nc("number in parsecs", "%1 pc", QLocale().toString(s->distance(), 'f', 1)))); } else //show to hundredths place { setProperty("distance", (i18nc("number in parsecs", "%1 pc", QLocale().toString(s->distance(), 'f', 2)))); } //Note multiplicity/variability in angular size label setProperty("angSize", QString()); if (s->isMultiple() && s->isVariable()) { QString multiple = QString(i18nc("the star is a multiple star", "multiple") + ','); setProperty("angSize", QString(multiple + '\n' + (i18nc("the star is a variable star", "variable")))); } else if (s->isMultiple()) { setProperty("angSize", i18nc("the star is a multiple star", "multiple")); } else if (s->isVariable()) { setProperty("angSize", (i18nc("the star is a variable star", "variable"))); } break; //end of stars case } case SkyObject::ASTEROID: //[fall through to planets] case SkyObject::COMET: //[fall through to planets] case SkyObject::MOON: //[fall through to planets] case SkyObject::PLANET: { KSPlanetBase *ps = (KSPlanetBase *)selectedObject; setProperty("name", ps->longname()); //Type is "G5 star" for Sun if (ps->name() == "Sun") { objecttyp = i18n("G5 star"); } else if (ps->name() == "Moon") { objecttyp = ps->translatedName(); } else if (ps->name() == i18n("Pluto") || ps->name() == "Ceres" || ps->name() == "Eris") // TODO: Check if Ceres / Eris have translations and i18n() them { objecttyp = i18n("Dwarf planet"); } else { objecttyp = ps->typeName(); } //The moon displays illumination fraction and updateMag is called to calculate moon's current magnitude if (selectedObject->name() == "Moon") { setProperty( "illumination", (QString("%1 %").arg(QLocale().toString(((KSMoon *)selectedObject)->illum() * 100., 'f', 0)))); ((KSMoon *)selectedObject)->updateMag(); } // JM: Shouldn't we use the calculated magnitude? Disabling the following /* if(selectedObject->type() == SkyObject::COMET){ Data->Magnitude->setText(i18nc("number in magnitudes", "%1 mag", QLocale().toString( ((KSComet *)selectedObject)->getTotalMagnitudeParameter(), 'f', 2))); //show to hundredth place } else{*/ setProperty("magnitude", (i18nc("number in magnitudes", "%1 mag", QLocale().toString(ps->mag(), 'f', 2)))); //show to hundredth place //} //Distance from Earth. The moon requires a unit conversion if (ps->name() == "Moon") { setProperty("distance", (i18nc("distance in kilometers", "%1 km", QLocale().toString(ps->rearth() * AU_KM, 'f', 2)))); } else { setProperty("distance", (i18nc("distance in Astronomical Units", "%1 AU", QLocale().toString(ps->rearth(), 'f', 3)))); } //Angular size; moon and sun in arcmin, others in arcsec if (ps->angSize()) { if (ps->name() == "Sun" || ps->name() == "Moon") { setProperty( "angSize", (i18nc( "angular size in arcminutes", "%1 arcmin", QLocale().toString( ps->angSize(), 'f', 1)))); // Needn't be a plural form because sun / moon will never contract to 1 arcminute } else { setProperty("angSize", i18nc("angular size in arcseconds", "%1 arcsec", QLocale().toString(ps->angSize() * 60.0, 'f', 1))); } } else { setProperty("angSize", "--"); } break; //end of planets/comets/asteroids case } case SkyObject::SUPERNOVA: { Supernova *sup = (Supernova *)selectedObject; objecttyp = i18n("Supernova"); setProperty("name", sup->name()); setProperty("magnitude", (i18nc("number in magnitudes", "%1 mag", QLocale().toString(sup->mag(), 'f', 2)))); setProperty("distance", "---"); break; } default: //deep-sky objects { DeepSkyObject *dso = (DeepSkyObject *)selectedObject; //Show all names recorded for the object QStringList nameList; if (!dso->longname().isEmpty() && dso->longname() != dso->name()) { nameList.append(dso->translatedLongName()); nameList.append(dso->translatedName()); } else { nameList.append(dso->translatedName()); } if (!dso->translatedName2().isEmpty()) { nameList.append(dso->translatedName2()); } if (dso->ugc() != 0) { nameList.append(QString("UGC %1").arg(dso->ugc())); } if (dso->pgc() != 0) { nameList.append(QString("PGC %1").arg(dso->pgc())); } setProperty("name", nameList.join(",")); objecttyp = dso->typeName(); if (dso->type() == SkyObject::RADIO_SOURCE) { //ta->MagLabel->setText(i18nc("integrated flux at a frequency", "Flux(%1):", dso->customCatalog()->fluxFrequency())); //Data->Magnitude->setText(i18nc("integrated flux value", "%1 %2", // QLocale().toString(dso->flux(), 'f', 1), dso->customCatalog()->fluxUnit())); //show to tenths place } else if (dso->mag() > 90.0) { setProperty("magnitude", "--"); } else { setProperty("magnitude", i18nc("number in magnitudes", "%1 mag", QLocale().toString(dso->mag(), 'f', 1))); //show to tenths place } //No distances at this point... setProperty("distance", "--"); //Only show decimal place for small angular sizes if (dso->a() > 10.0) { setProperty("angSize", i18nc("angular size in arcminutes", "%1 arcmin", QLocale().toString(dso->a(), 'f', 0))); } else if (dso->a()) { setProperty("angSize", i18nc("angular size in arcminutes", "%1 arcmin", QLocale().toString(dso->a(), 'f', 1))); } else { setProperty("angSize", "--"); } break; } } //Reset advanced properties setProperty("perihilion", ""); setProperty("orbitID", ""); setProperty("NEO", ""); setProperty("diameter", ""); setProperty("rotation", ""); setProperty("earthMOID", ""); setProperty("orbitClass", ""); setProperty("albedo", ""); setProperty("dimensions", ""); setProperty("period", ""); // Add specifics data switch (selectedObject->type()) { case SkyObject::ASTEROID: { KSAsteroid *ast = (KSAsteroid *)selectedObject; // Perihelion str.setNum(ast->getPerihelion()); setProperty("perihelion", QString(str + " AU")); // Earth MOID if (ast->getEarthMOID() == 0) str = ""; else str.setNum(ast->getEarthMOID()).append(" AU"); setProperty("earthMOID", str); // Orbit ID setProperty("orbitID", ast->getOrbitID()); // Orbit Class setProperty("orbitClass", ast->getOrbitClass()); // NEO if (ast->isNEO()) setProperty("NEO", "Yes"); else setProperty("NEO", "No"); // Albedo if (ast->getAlbedo() == 0.0) str = ""; else str.setNum(ast->getAlbedo()); setProperty("albedo", str); // Diameter if (ast->getDiameter() == 0.0) str = ""; else str.setNum(ast->getDiameter()).append(" km"); setProperty("diameter", str); // Dimensions if (ast->getDimensions().isEmpty()) setProperty("dimensions", ""); else setProperty("dimensions", QString(ast->getDimensions() + " km")); // Rotation period if (ast->getRotationPeriod() == 0.0) str = ""; else str.setNum(ast->getRotationPeriod()).append(" h"); setProperty("rotation", str); // Period if (ast->getPeriod() == 0.0) str = ""; else str.setNum(ast->getPeriod()).append(" y"); setProperty("period", str); break; } case SkyObject::COMET: { KSComet *com = (KSComet *)selectedObject; // Perihelion str.setNum(com->getPerihelion()); setProperty("perihelion", QString(str + " AU")); // Earth MOID if (com->getEarthMOID() == 0) str = ""; else str.setNum(com->getEarthMOID()).append(" AU"); setProperty("earthMOID", str); // Orbit ID setProperty("orbitID", com->getOrbitID()); // Orbit Class setProperty("orbitClass", com->getOrbitClass()); // NEO if (com->isNEO()) setProperty("NEO", "Yes"); else setProperty("NEO", "No"); // Albedo if (com->getAlbedo() == 0.0) str = ""; else str.setNum(com->getAlbedo()); setProperty("albedo", str); // Diameter if (com->getDiameter() == 0.0) str = ""; else str.setNum(com->getDiameter()).append(" km"); setProperty("diameter", str); // Dimensions if (com->getDimensions().isEmpty()) setProperty("dimensions", ""); else setProperty("dimensions", QString(com->getDimensions() + " km")); // Rotation period if (com->getRotationPeriod() == 0.0) str = ""; else str.setNum(com->getRotationPeriod()).append(" h"); setProperty("rotation", str); // Period if (com->getPeriod() == 0.0) str = ""; else str.setNum(com->getPeriod()).append(" y"); setProperty("period", str); break; } } //Common to all types: QString cname = KStarsData::Instance()->skyComposite()->constellationBoundary()->constellationName(selectedObject); if (selectedObject->type() != SkyObject::CONSTELLATION) { cname = i18nc("%1 type of sky object (planet, asteroid etc), %2 name of a constellation", "%1 in %2", objecttyp, cname); } setProperty("typeInConstellation", cname); } void DetailDialogLite::createPositionTab() { KStarsData *data = KStarsData::Instance(); KStarsDateTime ut = data->ut(); GeoLocation *geo = data->geo(); SkyObject *selectedObject = SkyMapLite::Instance()->getClickedObjectLite()->getObject(); //Coordinates Section: //Don't use KLocale::formatNumber() for the epoch string, //because we don't want a thousands-place separator! QString sEpoch = QString::number(ut.epoch(), 'f', 1); //Replace the decimal point with localized decimal symbol sEpoch.replace('.', QLocale().decimalPoint()); qDebug() << (selectedObject->deprecess(data->updateNum(), 2451545.0l)).ra0().toHMSString() << (selectedObject->deprecess(data->updateNum(), 2451545.0l)).dec0().toDMSString() << endl; //qDebug() << selectedObject->ra().toHMSString() << selectedObject->dec().toDMSString() << endl; setProperty("RALabel", i18n("RA (%1):", sEpoch)); setProperty("decLabel", i18n("Dec (%1):", sEpoch)); setProperty("RA", selectedObject->ra().toHMSString()); setProperty("dec", selectedObject->dec().toDMSString()); selectedObject->EquatorialToHorizontal(data->lst(), data->geo()->lat()); setProperty("az", selectedObject->az().toDMSString()); dms a; if (Options::useAltAz()) a = selectedObject->alt(); else a = selectedObject->altRefracted(); setProperty("alt", a.toDMSString()); // Display the RA0 and Dec0 for objects that are outside the solar system if (!selectedObject->isSolarSystem()) { setProperty("RA0", selectedObject->ra0().toHMSString()); setProperty("dec0", selectedObject->dec0().toDMSString()); } else { setProperty("RA0", "--"); setProperty("dec0", "--"); } //Hour Angle can be negative, but dms HMS expressions cannot. //Here's a kludgy workaround: dms lst = geo->GSTtoLST(ut.gst()); dms ha(lst.Degrees() - selectedObject->ra().Degrees()); QChar sgn('+'); if (ha.Hours() > 12.0) { ha.setH(24.0 - ha.Hours()); sgn = '-'; } setProperty("HA", QString("%1%2").arg(sgn).arg(ha.toHMSString())); //Airmass is approximated as the secant of the zenith distance, //equivalent to 1./sin(Alt). Beware of Inf at Alt=0! if (selectedObject->alt().Degrees() > 0.0) setProperty("airmass", QLocale().toString(selectedObject->airmass(), 'f', 2)); else setProperty("airmass", "--"); //Rise/Set/Transit Section: //Prepare time/position variables QTime rt = selectedObject->riseSetTime(ut, geo, true); //true = use rise time dms raz = selectedObject->riseSetTimeAz(ut, geo, true); //true = use rise time //If transit time is before rise time, use transit time for tomorrow QTime tt = selectedObject->transitTime(ut, geo); dms talt = selectedObject->transitAltitude(ut, geo); if (tt < rt) { tt = selectedObject->transitTime(ut.addDays(1), geo); talt = selectedObject->transitAltitude(ut.addDays(1), geo); } //If set time is before rise time, use set time for tomorrow QTime st = selectedObject->riseSetTime(ut, geo, false); //false = use set time dms saz = selectedObject->riseSetTimeAz(ut, geo, false); //false = use set time if (st < rt) { st = selectedObject->riseSetTime(ut.addDays(1), geo, false); //false = use set time saz = selectedObject->riseSetTimeAz(ut.addDays(1), geo, false); //false = use set time } if (rt.isValid()) { setProperty("timeRise", QString().sprintf("%02d:%02d", rt.hour(), rt.minute())); setProperty("timeSet", QString().sprintf("%02d:%02d", st.hour(), st.minute())); setProperty("azRise", raz.toDMSString()); setProperty("azSet", saz.toDMSString()); } else { if (selectedObject->alt().Degrees() > 0.0) { setProperty("timeRise", i18n("Circumpolar")); setProperty("timeSet", i18n("Circumpolar")); } else { setProperty("timeRise", i18n("Never rises")); setProperty("timeSet", i18n("Never rises")); } setProperty("azRise", i18nc("Not Applicable", "N/A")); setProperty("azSet", i18nc("Not Applicable", "N/A")); } setProperty("timeTransit", QString().sprintf("%02d:%02d", tt.hour(), tt.minute())); setProperty("altTransit", talt.toDMSString()); // Restore the position and other time-dependent parameters selectedObject->recomputeCoords(ut, geo); } void DetailDialogLite::createLogTab() { SkyObject *selectedObject = SkyMapLite::Instance()->getClickedObjectLite()->getObject(); //Don't create a log tab for an unnamed star if (selectedObject->name() == QString("star")) { setProperty("isLogOn", false); return; } setProperty("isLogOn", true); if (selectedObject->userLog().isEmpty()) { setProperty("userLog", i18n("Record here observation logs and/or data on %1.", selectedObject->translatedName())); } else { setProperty("userLog", selectedObject->userLog()); } /*//Automatically save the log contents when the widget loses focus connect( Log->UserLog, SIGNAL(focusOut()), this, SLOT(saveLogData()) );*/ } void DetailDialogLite::createLinksTab() { SkyObject *selectedObject = SkyMapLite::Instance()->getClickedObjectLite()->getObject(); //No links for unnamed stars if (selectedObject->name() == QString("star")) { setProperty("isLinksOn", false); return; } setProperty("isLinksOn", true); QStringList newInfoList; foreach (const QString &s, selectedObject->InfoTitle()) newInfoList.append(i18nc("Image/info menu item (should be translated)", s.toLocal8Bit())); setProperty("infoTitleList", newInfoList); QStringList newImageList; foreach (const QString &s, selectedObject->ImageTitle()) newImageList.append(i18nc("Image/info menu item (should be translated)", s.toLocal8Bit())); setProperty("imageTitleList", newImageList); } void DetailDialogLite::updateLocalDatabase(int type, const QString &search_line, const QString &replace_line) { QString TempFileName, file_line; QFile URLFile; QTemporaryFile TempFile; QTextStream *temp_stream = nullptr; QTextStream *out_stream = nullptr; bool replace = !replace_line.isEmpty(); if (search_line.isEmpty()) return; TempFile.setAutoRemove(false); TempFile.open(); TempFileName = TempFile.fileName(); switch (type) { // Info Links case 0: // Get name for our local info_url file URLFile.setFileName(KSPaths::writableLocation(QStandardPaths::GenericDataLocation) + "info_url.dat"); break; // Image Links case 1: // Get name for our local info_url file URLFile.setFileName(KSPaths::writableLocation(QStandardPaths::GenericDataLocation) + "image_url.dat"); break; } if (!URLFile.open(QIODevice::ReadWrite)) { qDebug() << "DetailDialog: Failed to open " << URLFile.fileName(); qDebug() << "KStars cannot save to user database"; return; } // Copy URL file to temp file TempFile.write(URLFile.readAll()); //Return pointers to initial positions TempFile.seek(0); //Clear URLFile URLFile.resize(0); // Get streams; temp_stream = new QTextStream(&TempFile); out_stream = new QTextStream(&URLFile); while (!temp_stream->atEnd()) { file_line = temp_stream->readLine(); // If we find a match, either replace, or remove (by skipping). if (file_line == search_line) { if (replace) (*out_stream) << replace_line << endl; else continue; } else (*out_stream) << file_line << endl; } URLFile.close(); delete temp_stream; delete out_stream; } void DetailDialogLite::addLink(const QString &url, const QString &desc, bool isImageLink) { SkyObject *selectedObject = SkyMapLite::Instance()->getClickedObjectLite()->getObject(); if (url.isEmpty() || desc.isEmpty()) return; //Do nothing if empty url or desc were provided QString entry; QFile file; if (isImageLink) { //Add link to object's ImageList, and descriptive text to its ImageTitle list selectedObject->ImageList().append(url); selectedObject->ImageTitle().append(desc); //Also, update the user's custom image links database //check for user's image-links database. If it doesn't exist, create it. file.setFileName(KSPaths::writableLocation(QStandardPaths::GenericDataLocation) + "image_url.dat"); //determine filename in local user KDE directory tree. if (!file.open(QIODevice::ReadWrite | QIODevice::Append)) { QString message = i18n("Custom image-links file could not be opened.\nLink cannot be recorded for future sessions."); qDebug() << message; return; } else { entry = selectedObject->name() + ':' + desc + ':' + url; QTextStream stream(&file); stream << entry << endl; file.close(); setProperty("imageTitleList", selectedObject->ImageTitle()); } } else { selectedObject->InfoList().append(url); selectedObject->InfoTitle().append(desc); //check for user's image-links database. If it doesn't exist, create it. file.setFileName(KSPaths::writableLocation(QStandardPaths::GenericDataLocation) + "info_url.dat"); //determine filename in local user KDE directory tree. if (!file.open(QIODevice::ReadWrite | QIODevice::Append)) { QString message = i18n( "Custom information-links file could not be opened.\nLink cannot be recorded for future sessions."); qDebug() << message; return; } else { entry = selectedObject->name() + ':' + desc + ':' + url; QTextStream stream(&file); stream << entry << endl; file.close(); setProperty("infoTitleList", selectedObject->InfoTitle()); } } } void DetailDialogLite::removeLink(int itemIndex, bool isImage) { SkyObject *selectedObject = SkyMapLite::Instance()->getClickedObjectLite()->getObject(); QString currentItemURL, currentItemTitle, LineEntry, TempFileName, FileLine; QFile URLFile; QTemporaryFile TempFile; TempFile.setAutoRemove(false); TempFile.open(); TempFileName = TempFile.fileName(); //Check if it is a valid index if (itemIndex < 0) { return; } else if (isImage && itemIndex >= selectedObject->ImageTitle().length()) { return; } else if (!isImage && itemIndex >= selectedObject->InfoTitle().length()) { return; } //if (title.isEmpty() || url.isEmpty()) return; if (!isImage) //Information { currentItemTitle = selectedObject->InfoTitle()[itemIndex]; currentItemURL = selectedObject->InfoList()[itemIndex]; LineEntry = selectedObject->name(); LineEntry += ':'; LineEntry += currentItemTitle; LineEntry += ':'; LineEntry += currentItemURL; } else //Image { currentItemTitle = selectedObject->ImageTitle()[itemIndex]; currentItemURL = selectedObject->ImageList()[itemIndex]; LineEntry = selectedObject->name(); LineEntry += ':'; LineEntry += currentItemTitle; LineEntry += ':'; LineEntry += currentItemURL; } /*if (KMessageBox::warningContinueCancel( 0, i18n("Are you sure you want to remove the %1 link?", currentItemTitle), i18n("Delete Confirmation"),KStandardGuiItem::del())!=KMessageBox::Continue) return;*/ if (isImage) { selectedObject->ImageTitle().removeAt(itemIndex); selectedObject->ImageList().removeAt(itemIndex); } else { selectedObject->InfoTitle().removeAt(itemIndex); selectedObject->InfoList().removeAt(itemIndex); } // Remove link from file updateLocalDatabase(isImage ? 1 : 0, LineEntry); setProperty("infoTitleList", selectedObject->InfoTitle()); setProperty("imageTitleList", selectedObject->ImageTitle()); } void DetailDialogLite::editLink(int itemIndex, bool isImage, const QString &desc, const QString &url) { SkyObject *selectedObject = SkyMapLite::Instance()->getClickedObjectLite()->getObject(); if (url.isEmpty() || desc.isEmpty()) return; //Do nothing if empty url or desc were provided QString search_line, replace_line, currentItemTitle, currentItemURL; //Check if it is a valid index if (itemIndex < 0) { return; } else if (isImage && itemIndex >= selectedObject->ImageTitle().length()) { return; } else if (!isImage && itemIndex >= selectedObject->InfoTitle().length()) { return; } if (!isImage) //Information { currentItemTitle = selectedObject->InfoTitle()[itemIndex]; currentItemURL = selectedObject->InfoList()[itemIndex]; search_line = selectedObject->name(); search_line += ':'; search_line += currentItemTitle; search_line += ':'; search_line += currentItemURL; } else //Image { currentItemTitle = selectedObject->ImageTitle()[itemIndex]; currentItemURL = selectedObject->ImageList()[itemIndex]; search_line = selectedObject->name(); search_line += ':'; search_line += currentItemTitle; search_line += ':'; search_line += currentItemURL; } bool go(true); // If nothing changed, skip th action if (url == currentItemURL && desc == currentItemTitle) go = false; if (go) { replace_line = selectedObject->name() + ':' + desc + ':' + url; // Info Link if (!isImage) { selectedObject->InfoTitle().replace(itemIndex, desc); selectedObject->InfoList().replace(itemIndex, url); // Image Links } else { selectedObject->ImageTitle().replace(itemIndex, desc); selectedObject->ImageList().replace(itemIndex, url); } // Update local files updateLocalDatabase(isImage ? 1 : 0, search_line, replace_line); setProperty("infoTitleList", selectedObject->InfoTitle()); setProperty("imageTitleList", selectedObject->ImageTitle()); } } QString DetailDialogLite::getInfoURL(int index) { SkyObject *selectedObject = SkyMapLite::Instance()->getClickedObjectLite()->getObject(); QStringList urls = selectedObject->InfoList(); if (index >= 0 && index < urls.size()) { return urls[index]; } else { return ""; } } QString DetailDialogLite::getImageURL(int index) { SkyObject *selectedObject = SkyMapLite::Instance()->getClickedObjectLite()->getObject(); QStringList urls = selectedObject->ImageList(); if (index >= 0 && index < urls.size()) { return urls[index]; } else { return ""; } } void DetailDialogLite::setupThumbnail() { SkyObject *selectedObject = SkyMapLite::Instance()->getClickedObjectLite()->getObject(); //No image if object is a star if (selectedObject->type() == SkyObject::STAR || selectedObject->type() == SkyObject::CATALOG_STAR) { /*Thumbnail->scaled( Data->Image->width(), Data->Image->height() ); Thumbnail->fill( Data->DataFrame->palette().color( QPalette::Window ) ); Data->Image->setPixmap( *Thumbnail );*/ setProperty("thumbnail", ""); return; } //Try to load the object's image from disk //If no image found, load "no image" image QFile file; QString fname = "thumb-" + selectedObject->name().toLower().remove(' ') + ".png"; if (KSUtils::openDataFile(file, fname)) { file.close(); setProperty("thumbnail", file.fileName()); } else { setProperty("thumbnail", ""); } } /*void DetailDialogLite::viewResource(int itemIndex, bool isImage) { QString url; if(isImage) { url = getImageURL(itemIndex); } else { url = getInfoURL(itemIndex); } QDesktopServices::openUrl(QUrl(url, QUrl::TolerantMode)); }*/ void DetailDialogLite::saveLogData(const QString &userLog) { SkyObject *selectedObject = SkyMapLite::Instance()->getClickedObjectLite()->getObject(); selectedObject->saveUserLog(userLog); }
{ "pile_set_name": "Github" }
# Copyright 2020 MONAI Consortium # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import warnings from typing import Callable, List, Sequence, Union import numpy as np import torch from monai.networks import one_hot from monai.utils import Average def do_activation(input_data: torch.Tensor, activation: Union[str, Callable] = "softmax") -> torch.Tensor: """ This function is used to do activation for inputs. Args: input_data: the input that to be activated, in the shape [B] or [BN] or [BNHW] or [BNHWD]. activation: can be ``"sigmoid"`` or ``"softmax"``, or a callable function. Defaults to ``"softmax"``. An example for callable function: ``activation = lambda x: torch.log_softmax(x)``. Raises: NotImplementedError: When input an activation name that is not implemented. """ input_ndim = input_data.ndimension() if activation == "softmax": if input_ndim == 1: warnings.warn("input_data has only one channel, softmax ignored.") else: input_data = input_data.float().softmax(dim=1) elif activation == "sigmoid": input_data = input_data.float().sigmoid() elif callable(activation): input_data = activation(input_data) else: raise NotImplementedError("activation can only be sigmoid, softmax or a callable function.") return input_data def do_binarization( input_data: torch.Tensor, bin_mode: str = "threshold", bin_threshold: Union[float, Sequence[float]] = 0.5, ) -> torch.Tensor: """ Args: input_data: the input that to be binarized, in the shape [B] or [BN] or [BNHW] or [BNHWD]. bin_mode: can be ``"threshold"`` or ``"mutually_exclusive"``, or a callable function. - ``"threshold"``, a single threshold or a sequence of thresholds should be set. - ``"mutually_exclusive"``, `input_data` will be converted by a combination of argmax and to_onehot. bin_threshold: the threshold to binarize the input data, can be a single value or a sequence of values that each one of the value represents a threshold for a class. Raises: AssertionError: when `bin_threshold` is a sequence and the input has the shape [B]. AssertionError: when `bin_threshold` is a sequence but the length != the number of classes. AssertionError: when `bin_mode` is ``"mutually_exclusive"`` the input has the shape [B]. AssertionError: when `bin_mode` is ``"mutually_exclusive"`` the input has the shape [B, 1]. """ input_ndim = input_data.ndimension() if bin_mode == "threshold": if isinstance(bin_threshold, Sequence): assert input_ndim > 1, "a sequence of thresholds are used for multi-class tasks." error_hint = "the length of the sequence should be the same as the number of classes." assert input_data.shape[1] == len(bin_threshold), "{}".format(error_hint) for cls_num in range(input_data.shape[1]): input_data[:, cls_num] = (input_data[:, cls_num] > bin_threshold[cls_num]).float() else: input_data = (input_data > bin_threshold).float() elif bin_mode == "mutually_exclusive": assert input_ndim > 1, "mutually_exclusive is used for multi-class tasks." n_classes = input_data.shape[1] assert n_classes > 1, "mutually_exclusive is used for multi-class tasks." input_data = torch.argmax(input_data, dim=1, keepdim=True) input_data = one_hot(input_data, num_classes=n_classes) return input_data def cal_confusion_matrix_elements(p: torch.Tensor, t: torch.Tensor) -> List[np.ndarray]: """ This function is used to calculate the number of true positives (tp), true negatives(tn), false positives (fp), false negatives (fn), total positives and total negatives, and return a list of these values. Args: p: predictions, a binarized torch.Tensor that its first dimension represents the batch size. t: ground truth, a binarized torch.Tensor that its first dimension represents the batch size. parameter t and p should have same shapes. Notes: If the input shape is [B], each element in the returned list is an int value. Else, each element in the returned list is an np.ndarray with shape (N,), where each element in this array represents the value for the corresponding class. Raises: AssertionError: when `p` and `t` have different shapes. """ assert p.shape == t.shape, "predictions and targets should have same shapes." with torch.no_grad(): dims = p.ndimension() if dims > 1: # in the form of [BNS], where S is the number of pixels for one sample. batch_size, n_class = p.shape[:2] p = p.view(batch_size, n_class, -1) t = t.view(batch_size, n_class, -1) tp = ((p + t) == 2).float() tn = ((p + t) == 0).float() if dims > 1: tp = tp.sum(dim=[0, 2]) tn = tn.sum(dim=[0, 2]) total_p = t.sum(dim=[0, 2]) total_n = batch_size * t.shape[-1] - total_p else: tp, tn = tp.sum(), tn.sum() total_p = t.sum() total_n = t.shape[-1] - total_p fn = total_p - tp fp = total_n - tn result = [tp, tn, fp, fn, total_p, total_n] result = [l.data.cpu().numpy() for l in result] return result def handle_zero_divide( numerator: Union[np.ndarray, torch.Tensor, float, int], denominator: Union[np.ndarray, torch.Tensor, float, int], zero_division: int = 0, ) -> Union[np.ndarray, torch.Tensor, float]: """ This function is used to handle the division case that the denominator has 0. This function takes sklearn for reference, see: https://github.com/scikit-learn/scikit-learn/blob/0fb307bf3/sklearn/metrics/_classification.py#L1179 """ if isinstance(denominator, (float, int)): if denominator != 0: return numerator / denominator else: return zero_division else: mask = denominator == 0.0 denominator[mask] = 1 result = numerator / denominator if not mask.any(): return result else: result[mask] = zero_division return result def do_calculate_metric( confusion_ele_list: List[np.ndarray], metric_name: str, average: Union[Average, str] = "none", zero_division: int = 0, ): """ Args: confusion_ele_list: the returned result of function ``cal_confusion_matrix_elements``. metric_name: the simplified metric name from function ``check_metric_name_and_unify``. average: type of averaging performed if not binary classification. Defaults to ``"macro"``. - ``"macro"``: calculate metrics for each label, and find their unweighted mean. This does not take label imbalance into account. - ``"weighted"``: calculate metrics for each label, and find their average weighted by support (the number of true instances for each label). - ``"micro"``: calculate metrics globally by considering each element of the label indicator matrix as a label. - ``"none"``: the scores for each class are returned. zero_division: the value to return when there is a zero division, for example, when all predictions and labels are negative. Defaults to 0. """ ele_list: List[Union[np.ndarray, int, float]] metric = metric_name div_0 = zero_division # pre-process average average = Average(average) if len(confusion_ele_list[0].shape) == 0: average = Average.NONE # for binary tasks, other average methods are meaningless. ele_list = [int(l) for l in confusion_ele_list] if average == Average.MICRO: ele_list = [int(l.sum()) for l in confusion_ele_list] else: ele_list = confusion_ele_list tp, tn, fp, fn, p, n = ele_list # calculate numerator: Union[np.ndarray, int, float] denominator: Union[np.ndarray, int, float] if metric == "tpr": numerator, denominator = tp, p elif metric == "tnr": numerator, denominator = tn, n elif metric == "ppv": numerator, denominator = tp, (tp + fp) elif metric == "npv": numerator, denominator = tn, (tn + fn) elif metric == "fnr": numerator, denominator = fn, p elif metric == "fpr": numerator, denominator = fp, n elif metric == "fdr": numerator, denominator = fp, (fp + tp) elif metric == "for": numerator, denominator = fn, (fn + tn) elif metric == "pt": tpr = handle_zero_divide(tp, p, div_0) tnr = handle_zero_divide(tn, n, div_0) numerator = np.sqrt(tpr * (1 - tnr)) + tnr - 1 denominator = tpr + tnr - 1 elif metric == "ts": numerator, denominator = tp, (tp + fn + fp) elif metric == "acc": numerator, denominator = (tp + tp), (p + n) elif metric == "ba": tpr = handle_zero_divide(tp, p, div_0) tnr = handle_zero_divide(tn, n, div_0) numerator, denominator = (tpr + tnr), 2 elif metric == "f1": numerator, denominator = tp * 2, (tp * 2 + fn + fp) elif metric == "mcc": numerator = tp * tn - fp * fn denominator = np.sqrt((tp + fp) * (tp + fn) * (tn + fp) * (tn + fn)) elif metric == "fm": tpr = handle_zero_divide(tp, p, div_0) ppv = handle_zero_divide(tp, (tp + fp), div_0) numerator = np.sqrt(ppv * tpr) denominator = 1 elif metric == "bm": tpr = handle_zero_divide(tp, p, div_0) tnr = handle_zero_divide(tn, n, div_0) numerator = tpr + tnr - 1 denominator = 1 elif metric == "mk": ppv = handle_zero_divide(tp, (tp + fp), div_0) npv = handle_zero_divide(tn, (tn + fn), div_0) numerator = ppv + npv - 1 denominator = 1 else: raise NotImplementedError("the metric is not implemented.") result = handle_zero_divide(numerator, denominator, div_0) if average == Average.MICRO or average == Average.NONE: return result weights = None if average == Average.WEIGHTED: weights = p result = np.average(result, weights=weights) return result def check_metric_name_and_unify(metric_name: str): """ There are many metrics related to confusion matrix, and some of the metrics have more than one names. In addition, some of the names are very long. Therefore, this function is used to simplify the implementation. """ metric_name = metric_name.replace(" ", "_") metric_name = metric_name.lower() if metric_name in ["sensitivity", "recall", "hit_rate", "true_positive_rate", "tpr"]: return "tpr" elif metric_name in ["specificity", "selectivity", "true_negative_rate", "tnr"]: return "tnr" elif metric_name in ["precision", "positive_predictive_value", "ppv"]: return "ppv" elif metric_name in ["negative_predictive_value", "npv"]: return "npv" elif metric_name in ["miss_rate", "false_negative_rate", "fnr"]: return "fnr" elif metric_name in ["fall_out", "false_positive_rate", "fpr"]: return "fpr" elif metric_name in ["false_discovery_rate", "fdr"]: return "fdr" elif metric_name in ["false_omission_rate", "for"]: return "for" elif metric_name in ["prevalence_threshold", "pt"]: return "pt" elif metric_name in ["threat_score", "critical_success_index", "ts", "csi"]: return "ts" elif metric_name in ["accuracy", "acc"]: return "acc" elif metric_name in ["balanced_accuracy", "ba"]: return "ba" elif metric_name in ["f1_score", "f1"]: return "f1" elif metric_name in ["matthews_correlation_coefficient", "mcc"]: return "mcc" elif metric_name in ["fowlkes_mallows_index", "fm"]: return "fm" elif metric_name in ["informedness", "bookmaker_informedness", "bm"]: return "bm" elif metric_name in ["markedness", "deltap", "mk"]: return "mk" else: raise NotImplementedError("the metric is not implemented.")
{ "pile_set_name": "Github" }
/// <reference path="main/ambient/es6-shim/es6-shim.d.ts" /> /// <reference path="main/ambient/googlemaps/googlemaps.d.ts" />
{ "pile_set_name": "Github" }
#include <openssl/opensslconf.h> #ifdef OPENSSL_NO_SRP # include <stdio.h> int main(int argc, char *argv[]) { printf("No SRP support\n"); return (0); } #else # include <openssl/srp.h> # include <openssl/rand.h> # include <openssl/err.h> static void showbn(const char *name, const BIGNUM *bn) { fputs(name, stdout); fputs(" = ", stdout); BN_print_fp(stdout, bn); putc('\n', stdout); } # define RANDOM_SIZE 32 /* use 256 bits on each side */ static int run_srp(const char *username, const char *client_pass, const char *server_pass) { int ret = -1; BIGNUM *s = NULL; BIGNUM *v = NULL; BIGNUM *a = NULL; BIGNUM *b = NULL; BIGNUM *u = NULL; BIGNUM *x = NULL; BIGNUM *Apub = NULL; BIGNUM *Bpub = NULL; BIGNUM *Kclient = NULL; BIGNUM *Kserver = NULL; unsigned char rand_tmp[RANDOM_SIZE]; /* use builtin 1024-bit params */ SRP_gN *GN = SRP_get_default_gN("1024"); if (GN == NULL) { fprintf(stderr, "Failed to get SRP parameters\n"); return -1; } /* Set up server's password entry */ if (!SRP_create_verifier_BN(username, server_pass, &s, &v, GN->N, GN->g)) { fprintf(stderr, "Failed to create SRP verifier\n"); return -1; } showbn("N", GN->N); showbn("g", GN->g); showbn("Salt", s); showbn("Verifier", v); /* Server random */ RAND_pseudo_bytes(rand_tmp, sizeof(rand_tmp)); b = BN_bin2bn(rand_tmp, sizeof(rand_tmp), NULL); /* TODO - check b != 0 */ showbn("b", b); /* Server's first message */ Bpub = SRP_Calc_B(b, GN->N, GN->g, v); showbn("B", Bpub); if (!SRP_Verify_B_mod_N(Bpub, GN->N)) { fprintf(stderr, "Invalid B\n"); return -1; } /* Client random */ RAND_pseudo_bytes(rand_tmp, sizeof(rand_tmp)); a = BN_bin2bn(rand_tmp, sizeof(rand_tmp), NULL); /* TODO - check a != 0 */ showbn("a", a); /* Client's response */ Apub = SRP_Calc_A(a, GN->N, GN->g); showbn("A", Apub); if (!SRP_Verify_A_mod_N(Apub, GN->N)) { fprintf(stderr, "Invalid A\n"); return -1; } /* Both sides calculate u */ u = SRP_Calc_u(Apub, Bpub, GN->N); /* Client's key */ x = SRP_Calc_x(s, username, client_pass); Kclient = SRP_Calc_client_key(GN->N, Bpub, GN->g, x, a, u); showbn("Client's key", Kclient); /* Server's key */ Kserver = SRP_Calc_server_key(Apub, v, u, b, GN->N); showbn("Server's key", Kserver); if (BN_cmp(Kclient, Kserver) == 0) { ret = 0; } else { fprintf(stderr, "Keys mismatch\n"); ret = 1; } BN_clear_free(Kclient); BN_clear_free(Kserver); BN_clear_free(x); BN_free(u); BN_free(Apub); BN_clear_free(a); BN_free(Bpub); BN_clear_free(b); BN_free(s); BN_clear_free(v); return ret; } int main(int argc, char **argv) { BIO *bio_err; bio_err = BIO_new_fp(stderr, BIO_NOCLOSE); CRYPTO_malloc_debug_init(); CRYPTO_dbg_set_options(V_CRYPTO_MDEBUG_ALL); CRYPTO_mem_ctrl(CRYPTO_MEM_CHECK_ON); ERR_load_crypto_strings(); /* "Negative" test, expect a mismatch */ if (run_srp("alice", "password1", "password2") == 0) { fprintf(stderr, "Mismatched SRP run failed\n"); return 1; } /* "Positive" test, should pass */ if (run_srp("alice", "password", "password") != 0) { fprintf(stderr, "Plain SRP run failed\n"); return 1; } CRYPTO_cleanup_all_ex_data(); ERR_remove_thread_state(NULL); ERR_free_strings(); CRYPTO_mem_leaks(bio_err); return 0; } #endif
{ "pile_set_name": "Github" }
// // BmobBatch.h // BmobSDK // // Created by Bmob on 14-4-21. // Copyright (c) 2014年 Bmob. All rights reserved. // #import <Foundation/Foundation.h> @interface BmobObjectsBatch : NSObject /** * 创建某条数据,可多次调用 * * @param className 表名 * @param para 要创建的列名跟列的值 */ -(void)saveBmobObjectWithClassName:(NSString *)className parameters:(NSDictionary*)para; /** * 更新某条数据,可多次调用 * * @param className 表名 * @param objectId 某行数据的objectId * @param para 要更新的列和列的值 */ -(void)updateBmobObjectWithClassName:(NSString*)className objectId:(NSString*)objectId parameters:(NSDictionary*)para; /** * 删除某条数据,可多次调用 * * @param className 表名 * @param objectId 某条数据的objectId */ -(void)deleteBmobObjectWithClassName:(NSString *)className objectId:(NSString*)objectId; /** * 批量修改数据 * * @param block 返回操作的的结果和信息 */ -(void)batchObjectsInBackgroundWithResultBlock:(void(^)(BOOL isSuccessful,NSError *error))block; //再加一个方法 @end
{ "pile_set_name": "Github" }
// Metadata version: v4.0.30319 .assembly extern mscorlib { .publickeytoken = (B7 7A 5C 56 19 34 E0 89 ) // .z\V.4.. .ver 4:0:0:0 } .assembly Issue1918 { .ver 1:0:0:0 } .module Issue1918.exe .imagebase 0x00400000 .file alignment 0x00000200 .stackreserve 0x00100000 .subsystem 0x0003 // WINDOWS_CUI .corflags 0x00020003 // ILONLY 32BITPREFERRED .class private auto ansi beforefieldinit ICSharpCode.Decompiler.Tests.TestCases.ILPretty.Issue1918 extends [mscorlib]System.Object { .method public hidebysig instance void ProblemFunction (valuetype [mscorlib]System.Guid[] '', int32 '' ) cil managed { .maxstack 2 .locals init ( [0] valuetype [mscorlib]System.Guid[], [1] int32, [2] void*, [3] valuetype [mscorlib]System.Guid[] pinned, [4] native uint*, [5] native uint ) IL_0000: ldarg.1 stloc.0 IL_0010: ldarg.2 stloc.1 ldloc.0 dup stloc.3 brfalse.s IL_0026 ldloc.3 ldlen conv.i4 brtrue.s IL_002b IL_0026: ldc.i4.0 conv.u stloc.2 br.s IL_0034 IL_002b: ldloc.3 ldc.i4.0 ldelema [mscorlib]System.Guid conv.u stloc.2 IL_0034: ldloc.2 sizeof [mscorlib]System.UIntPtr sub stloc.s 4 ldloc.s 4 ldind.i stloc.s 5 .try { ldloc.s 4 ldloc.1 conv.i8 call native uint [mscorlib]System.UIntPtr::op_Explicit(uint64) stind.i ldarg.1 leave.s IL_005c } // end .try finally { ldloc.s 4 ldloc.s 5 stind.i endfinally } // end handler IL_005c: ldsfld valuetype [mscorlib]System.Guid[] ICSharpCode.Decompiler.Tests.TestCases.ILPretty.Issue1918::NullVal stloc.3 ret } .field public static valuetype [mscorlib]System.Guid[] NullVal }
{ "pile_set_name": "Github" }
#!/bin/sh set -e docker-compose -f docker-compose.yaml up
{ "pile_set_name": "Github" }
using System; using UIKit; using CoreGraphics; using Foundation; using System.Reactive.Linq; using System.Reactive.Subjects; namespace CodeBucket.DialogElements { public class EntryElement : Element { private string _currentValue; /// <summary> /// The value of the EntryElement /// </summary> public string Value { get { return _currentValue; } set { if (string.Equals(_currentValue, value)) return; _currentValue = value; if (entry != null) entry.Text = value; _changedSubject.OnNext(value); } } private string _caption; public string Caption { get { return _caption; } set { if (_caption == value) return; _caption = value; var cell = GetActiveCell(); if (cell != null && cell.TextLabel != null) cell.TextLabel.Text = value ?? string.Empty; } } public UIKeyboardType KeyboardType { get { return keyboardType; } set { keyboardType = value; if (entry != null) entry.KeyboardType = value; } } /// <summary> /// The type of Return Key that is displayed on the /// keyboard, you can change this to use this for /// Done, Return, Save, etc. keys on the keyboard /// </summary> public UIReturnKeyType? ReturnKeyType { get { return returnKeyType; } set { returnKeyType = value; if (entry != null && returnKeyType.HasValue) entry.ReturnKeyType = returnKeyType.Value; } } public UITextAutocapitalizationType AutocapitalizationType { get { return autocapitalizationType; } set { autocapitalizationType = value; if (entry != null) entry.AutocapitalizationType = value; } } public UITextAutocorrectionType AutocorrectionType { get { return autocorrectionType; } set { autocorrectionType = value; if (entry != null) this.autocorrectionType = value; } } public UITextFieldViewMode ClearButtonMode { get { return clearButtonMode; } set { clearButtonMode = value; if (entry != null) entry.ClearButtonMode = value; } } public UITextAlignment TextAlignment { get { return textalignment; } set{ textalignment = value; if (entry != null) { entry.TextAlignment = textalignment; } } } UITextAlignment textalignment = UITextAlignment.Left; UIKeyboardType keyboardType = UIKeyboardType.Default; UIReturnKeyType? returnKeyType = null; UITextAutocapitalizationType autocapitalizationType = UITextAutocapitalizationType.Sentences; UITextAutocorrectionType autocorrectionType = UITextAutocorrectionType.Default; UITextFieldViewMode clearButtonMode = UITextFieldViewMode.Never; bool isPassword, becomeResponder; UITextField entry; string placeholder; private readonly Subject<string> _changedSubject = new Subject<string>(); public event Func<bool> ShouldReturn; public UIFont TitleFont { get; set; } public UIFont EntryFont { get; set; } public UIColor TitleColor { get; set; } public IObservable<string> Changed { get { return _changedSubject.AsObservable(); } } public EntryElement (string caption, string placeholder, string value) : this(caption, placeholder, value, false) { } public EntryElement (string caption, string placeholder, string value, bool isPassword) { TitleFont = UIFont.PreferredBody; EntryFont = UIFont.PreferredBody; TitleColor = StringElement.DefaultTitleColor; Value = value; Caption = caption; this.isPassword = isPassword; this.placeholder = placeholder; } // // Computes the X position for the entry by aligning all the entries in the Section // CGSize ComputeEntryPosition (UITableView tv, UITableViewCell cell) { if (Section.EntryAlignment.Width != 0) return Section.EntryAlignment; // If all EntryElements have a null Caption, align UITextField with the Caption // offset of normal cells (at 10px). var max = new CGSize (-15, UIStringDrawing.StringSize ("M", TitleFont).Height); foreach (var e in Section){ var ee = e as EntryElement; if (ee == null) continue; if (ee.Caption != null) { var size = UIStringDrawing.StringSize (ee.Caption, TitleFont); if (size.Width > max.Width) max = size; } } Section.EntryAlignment = new CGSize (25f + (nfloat)Math.Min (max.Width, 160), max.Height); return Section.EntryAlignment; } protected virtual UITextField CreateTextField (CGRect frame) { return new UITextField (frame) { AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleLeftMargin, Placeholder = placeholder ?? "", SecureTextEntry = isPassword, Text = Value ?? "", Tag = 1, TextAlignment = textalignment, ClearButtonMode = ClearButtonMode, Font = EntryFont, }; } static NSString cellkey = new NSString ("EntryElement"); UITableViewCell cell; public override UITableViewCell GetCell (UITableView tv) { if (cell == null) { cell = new UITableViewCell (UITableViewCellStyle.Default, cellkey); cell.SelectionStyle = UITableViewCellSelectionStyle.None; } cell.TextLabel.Text = Caption; var offset = (UIDevice.CurrentDevice.UserInterfaceIdiom == UIUserInterfaceIdiom.Phone) ? 20 : 90; cell.Frame = new CGRect(cell.Frame.X, cell.Frame.Y, tv.Frame.Width-offset, cell.Frame.Height); CGSize size = ComputeEntryPosition (tv, cell); var yOffset = (cell.ContentView.Bounds.Height - size.Height) / 2 - 1; var width = cell.ContentView.Bounds.Width - size.Width; if (textalignment == UITextAlignment.Right) { // Add padding if right aligned width -= 10; } var entryFrame = new CGRect (size.Width, yOffset + 2f, width, size.Height); if (entry == null) { entry = CreateTextField (entryFrame); entry.ValueChanged += (sender, e) => Value = entry.Text; entry.EditingChanged += (sender, e) => Value = entry.Text; entry.Ended += (sender, e) => Value = entry.Text; entry.AllEditingEvents += (sender, e) => Value = entry.Text; entry.ShouldReturn += delegate { if (ShouldReturn != null) return ShouldReturn (); RootElement root = GetRootElement(); EntryElement focus = null; if (root == null) return true; foreach (var s in root) { foreach (var e in s) { if (e == this) { focus = this; } else if (focus != null && e is EntryElement) { focus = e as EntryElement; break; } } if (focus != null && focus != this) break; } if (focus != this) focus.BecomeFirstResponder (true); else focus.ResignFirstResponder (true); return true; }; entry.Started += delegate { EntryElement self = null; if (!returnKeyType.HasValue) { var returnType = UIReturnKeyType.Default; foreach (var e in Section) { if (e == this) self = this; else if (self != null && e is EntryElement) returnType = UIReturnKeyType.Next; } entry.ReturnKeyType = returnType; } else entry.ReturnKeyType = returnKeyType.Value; tv.ScrollToRow (IndexPath, UITableViewScrollPosition.Middle, true); }; cell.ContentView.AddSubview (entry); } else entry.Frame = entryFrame; if (becomeResponder){ entry.BecomeFirstResponder (); becomeResponder = false; } entry.KeyboardType = KeyboardType; entry.AutocapitalizationType = AutocapitalizationType; entry.AutocorrectionType = AutocorrectionType; cell.TextLabel.Text = Caption; cell.TextLabel.Font = TitleFont; cell.TextLabel.TextColor = TitleColor; return cell; } public override void Selected (UITableView tableView, NSIndexPath indexPath) { BecomeFirstResponder(true); base.Selected(tableView, indexPath); } public override bool Matches (string text) { return (Value != null && Value.IndexOf(text, StringComparison.CurrentCultureIgnoreCase) != -1) || base.Matches (text); } /// <summary> /// Makes this cell the first responder (get the focus) /// </summary> /// <param name="animated"> /// Whether scrolling to the location of this cell should be animated /// </param> public virtual void BecomeFirstResponder (bool animated) { becomeResponder = true; var tv = GetContainerTableView (); if (tv == null) return; tv.ScrollToRow (IndexPath, UITableViewScrollPosition.Middle, animated); if (entry != null){ entry.BecomeFirstResponder (); becomeResponder = false; } } public virtual void ResignFirstResponder (bool animated) { becomeResponder = false; var tv = GetContainerTableView (); if (tv == null) return; tv.ScrollToRow (IndexPath, UITableViewScrollPosition.Middle, animated); if (entry != null) entry.ResignFirstResponder (); } } }
{ "pile_set_name": "Github" }
// Copyright 2011 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package ssh import ( "bytes" "math/big" "math/rand" "reflect" "testing" "testing/quick" ) var intLengthTests = []struct { val, length int }{ {0, 4 + 0}, {1, 4 + 1}, {127, 4 + 1}, {128, 4 + 2}, {-1, 4 + 1}, } func TestIntLength(t *testing.T) { for _, test := range intLengthTests { v := new(big.Int).SetInt64(int64(test.val)) length := intLength(v) if length != test.length { t.Errorf("For %d, got length %d but expected %d", test.val, length, test.length) } } } type msgAllTypes struct { Bool bool `sshtype:"21"` Array [16]byte Uint64 uint64 Uint32 uint32 Uint8 uint8 String string Strings []string Bytes []byte Int *big.Int Rest []byte `ssh:"rest"` } func (t *msgAllTypes) Generate(rand *rand.Rand, size int) reflect.Value { m := &msgAllTypes{} m.Bool = rand.Intn(2) == 1 randomBytes(m.Array[:], rand) m.Uint64 = uint64(rand.Int63n(1<<63 - 1)) m.Uint32 = uint32(rand.Intn((1 << 31) - 1)) m.Uint8 = uint8(rand.Intn(1 << 8)) m.String = string(m.Array[:]) m.Strings = randomNameList(rand) m.Bytes = m.Array[:] m.Int = randomInt(rand) m.Rest = m.Array[:] return reflect.ValueOf(m) } func TestMarshalUnmarshal(t *testing.T) { rand := rand.New(rand.NewSource(0)) iface := &msgAllTypes{} ty := reflect.ValueOf(iface).Type() n := 100 if testing.Short() { n = 5 } for j := 0; j < n; j++ { v, ok := quick.Value(ty, rand) if !ok { t.Errorf("failed to create value") break } m1 := v.Elem().Interface() m2 := iface marshaled := Marshal(m1) if err := Unmarshal(marshaled, m2); err != nil { t.Errorf("Unmarshal %#v: %s", m1, err) break } if !reflect.DeepEqual(v.Interface(), m2) { t.Errorf("got: %#v\nwant:%#v\n%x", m2, m1, marshaled) break } } } func TestUnmarshalEmptyPacket(t *testing.T) { var b []byte var m channelRequestSuccessMsg if err := Unmarshal(b, &m); err == nil { t.Fatalf("unmarshal of empty slice succeeded") } } func TestUnmarshalUnexpectedPacket(t *testing.T) { type S struct { I uint32 `sshtype:"43"` S string B bool } s := S{11, "hello", true} packet := Marshal(s) packet[0] = 42 roundtrip := S{} err := Unmarshal(packet, &roundtrip) if err == nil { t.Fatal("expected error, not nil") } } func TestMarshalPtr(t *testing.T) { s := struct { S string }{"hello"} m1 := Marshal(s) m2 := Marshal(&s) if !bytes.Equal(m1, m2) { t.Errorf("got %q, want %q for marshaled pointer", m2, m1) } } func TestBareMarshalUnmarshal(t *testing.T) { type S struct { I uint32 S string B bool } s := S{42, "hello", true} packet := Marshal(s) roundtrip := S{} Unmarshal(packet, &roundtrip) if !reflect.DeepEqual(s, roundtrip) { t.Errorf("got %#v, want %#v", roundtrip, s) } } func TestBareMarshal(t *testing.T) { type S2 struct { I uint32 } s := S2{42} packet := Marshal(s) i, rest, ok := parseUint32(packet) if len(rest) > 0 || !ok { t.Errorf("parseInt(%q): parse error", packet) } if i != s.I { t.Errorf("got %d, want %d", i, s.I) } } func TestUnmarshalShortKexInitPacket(t *testing.T) { // This used to panic. // Issue 11348 packet := []byte{0x14, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0xff, 0xff, 0xff, 0xff} kim := &kexInitMsg{} if err := Unmarshal(packet, kim); err == nil { t.Error("truncated packet unmarshaled without error") } } func TestMarshalMultiTag(t *testing.T) { var res struct { A uint32 `sshtype:"1|2"` } good1 := struct { A uint32 `sshtype:"1"` }{ 1, } good2 := struct { A uint32 `sshtype:"2"` }{ 1, } if e := Unmarshal(Marshal(good1), &res); e != nil { t.Errorf("error unmarshaling multipart tag: %v", e) } if e := Unmarshal(Marshal(good2), &res); e != nil { t.Errorf("error unmarshaling multipart tag: %v", e) } bad1 := struct { A uint32 `sshtype:"3"` }{ 1, } if e := Unmarshal(Marshal(bad1), &res); e == nil { t.Errorf("bad struct unmarshaled without error") } } func randomBytes(out []byte, rand *rand.Rand) { for i := 0; i < len(out); i++ { out[i] = byte(rand.Int31()) } } func randomNameList(rand *rand.Rand) []string { ret := make([]string, rand.Int31()&15) for i := range ret { s := make([]byte, 1+(rand.Int31()&15)) for j := range s { s[j] = 'a' + uint8(rand.Int31()&15) } ret[i] = string(s) } return ret } func randomInt(rand *rand.Rand) *big.Int { return new(big.Int).SetInt64(int64(int32(rand.Uint32()))) } func (*kexInitMsg) Generate(rand *rand.Rand, size int) reflect.Value { ki := &kexInitMsg{} randomBytes(ki.Cookie[:], rand) ki.KexAlgos = randomNameList(rand) ki.ServerHostKeyAlgos = randomNameList(rand) ki.CiphersClientServer = randomNameList(rand) ki.CiphersServerClient = randomNameList(rand) ki.MACsClientServer = randomNameList(rand) ki.MACsServerClient = randomNameList(rand) ki.CompressionClientServer = randomNameList(rand) ki.CompressionServerClient = randomNameList(rand) ki.LanguagesClientServer = randomNameList(rand) ki.LanguagesServerClient = randomNameList(rand) if rand.Int31()&1 == 1 { ki.FirstKexFollows = true } return reflect.ValueOf(ki) } func (*kexDHInitMsg) Generate(rand *rand.Rand, size int) reflect.Value { dhi := &kexDHInitMsg{} dhi.X = randomInt(rand) return reflect.ValueOf(dhi) } var ( _kexInitMsg = new(kexInitMsg).Generate(rand.New(rand.NewSource(0)), 10).Elem().Interface() _kexDHInitMsg = new(kexDHInitMsg).Generate(rand.New(rand.NewSource(0)), 10).Elem().Interface() _kexInit = Marshal(_kexInitMsg) _kexDHInit = Marshal(_kexDHInitMsg) ) func BenchmarkMarshalKexInitMsg(b *testing.B) { for i := 0; i < b.N; i++ { Marshal(_kexInitMsg) } } func BenchmarkUnmarshalKexInitMsg(b *testing.B) { m := new(kexInitMsg) for i := 0; i < b.N; i++ { Unmarshal(_kexInit, m) } } func BenchmarkMarshalKexDHInitMsg(b *testing.B) { for i := 0; i < b.N; i++ { Marshal(_kexDHInitMsg) } } func BenchmarkUnmarshalKexDHInitMsg(b *testing.B) { m := new(kexDHInitMsg) for i := 0; i < b.N; i++ { Unmarshal(_kexDHInit, m) } }
{ "pile_set_name": "Github" }
AVR (Audio Visual Research) sound format ---------------------------------------- version 1.0 - Atari ST/STE format - developped by 2-BIT systems (Microdeal) - source : ST mag #42, pages 26, by Sebastien Mougey - 0xnnnn are hexadecimal values offset type length name comments -------------------------------------------------------------------------------- 0 char 4 ID format ID == "2BIT" 4 char 8 name sample name (unused space filled with 0) 12 short 1 mono/stereo 0=mono, -1 (0xffff)=stereo With stereo, samples are alternated, the first voice is the left : (LRLRLRLRLRLRLRLRLR...) 14 short 1 resolution 8, 12 or 16 (bits) 16 short 1 signed or not 0=unsigned, -1 (0xffff)=signed 18 short 1 loop or not 0=no loop, -1 (0xffff)=loop on 20 short 1 MIDI note 0xffnn, where 0<=nn<=127 0xffff means "no MIDI note defined" 22 byte 1 Replay speed Frequence in the Replay software 0=5.485 Khz, 1=8.084 Khz, 2=10.971 Khz, 3=16.168 Khz, 4=21.942 Khz, 5=32.336 Khz 6=43.885 Khz, 7=47.261 Khz -1 (0xff)=no defined Frequence 23 byte 3 sample rate in Hertz 26 long 1 size in bytes (2*bytes in stereo) 30 long 1 loop begin 0 for no loop 34 long 1 loop size equal to 'size' for no loop 38 byte 26 reserved filled with 0 64 byte 64 user data 128 bytes ? sample data (12 bits samples are coded on 16 bits : 0000 xxxx xxxx xxxx) ------------------------------------------------------------------------------- Example: -------- 0 "2BIT" 4 "lovebeat" 12 0x0000 mono 14 0x0010 16 bits 16 0xffff signed 18 0xffff loop on 20 0xffff no MIDI note 22 0xf0 Replay freq 23 0x007441 freq = 29.761 Khz 26 0x00012624 size = 75300 samples 30 0x000001d1 loop begin = 465 34 0x000119f0 loop end = 72176 38 0000 00000000 "AVR by P. Segerdahl " 64 Converted with "Zero-X" written by Peter Segerdahl, 1994 Sweden 128 0x0000 0x0001 0xfff6 0xfff7 ... 0x24CC0 0xFFB3 0xFFE7 0x0087 0x0065 file size = 128 bytes header + 75300*16 bits = 0x24cc8 bytes
{ "pile_set_name": "Github" }
// -*- C++ -*- // Copyright (C) 2005-2013 Free Software Foundation, Inc. // // This file is part of the GNU ISO C++ Library. This library is free // software; you can redistribute it and/or modify it under the terms // of the GNU General Public License as published by the Free Software // Foundation; either version 3, or (at your option) any later // version. // This library is distributed in the hope that it will be useful, but // WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // General Public License for more details. // Under Section 7 of GPL version 3, you are granted additional // permissions described in the GCC Runtime Library Exception, version // 3.1, as published by the Free Software Foundation. // You should have received a copy of the GNU General Public License and // a copy of the GCC Runtime Library Exception along with this program; // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see // <http://www.gnu.org/licenses/>. // Copyright (C) 2004 Ami Tavory and Vladimir Dreizin, IBM-HRL. // Permission to use, copy, modify, sell, and distribute this software // is hereby granted without fee, provided that the above copyright // notice appears in all copies, and that both that copyright notice // and this permission notice appear in supporting documentation. None // of the above authors, nor IBM Haifa Research Laboratories, make any // representation about the suitability of this software for any // purpose. It is provided "as is" without express or implied // warranty. /** * @file rb_tree_map_/erase_fn_imps.hpp * Contains an implementation for rb_tree_. */ PB_DS_CLASS_T_DEC inline bool PB_DS_CLASS_C_DEC:: erase(key_const_reference r_key) { point_iterator it = this->find(r_key); if (it == base_type::end()) return false; erase(it); return true; } PB_DS_CLASS_T_DEC inline typename PB_DS_CLASS_C_DEC::iterator PB_DS_CLASS_C_DEC:: erase(iterator it) { PB_DS_ASSERT_VALID((*this)) if (it == base_type::end()) return it; iterator ret_it = it; ++ret_it; erase_node(it.m_p_nd); PB_DS_ASSERT_VALID((*this)) return ret_it; } PB_DS_CLASS_T_DEC inline typename PB_DS_CLASS_C_DEC::reverse_iterator PB_DS_CLASS_C_DEC:: erase(reverse_iterator it) { PB_DS_ASSERT_VALID((*this)) if (it.m_p_nd == base_type::m_p_head) return it; reverse_iterator ret_it = it; ++ret_it; erase_node(it.m_p_nd); PB_DS_ASSERT_VALID((*this)) return ret_it; } PB_DS_CLASS_T_DEC template<typename Pred> inline typename PB_DS_CLASS_C_DEC::size_type PB_DS_CLASS_C_DEC:: erase_if(Pred pred) { PB_DS_ASSERT_VALID((*this)) size_type num_ersd = 0; iterator it = base_type::begin(); while (it != base_type::end()) { if (pred(*it)) { ++num_ersd; it = erase(it); } else ++it; } PB_DS_ASSERT_VALID((*this)) return num_ersd; } PB_DS_CLASS_T_DEC void PB_DS_CLASS_C_DEC:: erase_node(node_pointer p_nd) { remove_node(p_nd); base_type::actual_erase_node(p_nd); PB_DS_ASSERT_VALID((*this)) } PB_DS_CLASS_T_DEC void PB_DS_CLASS_C_DEC:: remove_node(node_pointer p_z) { this->update_min_max_for_erased_node(p_z); node_pointer p_y = p_z; node_pointer p_x = 0; node_pointer p_new_x_parent = 0; if (p_y->m_p_left == 0) p_x = p_y->m_p_right; else if (p_y->m_p_right == 0) p_x = p_y->m_p_left; else { p_y = p_y->m_p_right; while (p_y->m_p_left != 0) p_y = p_y->m_p_left; p_x = p_y->m_p_right; } if (p_y == p_z) { p_new_x_parent = p_y->m_p_parent; if (p_x != 0) p_x->m_p_parent = p_y->m_p_parent; if (base_type::m_p_head->m_p_parent == p_z) base_type::m_p_head->m_p_parent = p_x; else if (p_z->m_p_parent->m_p_left == p_z) { p_y->m_p_left = p_z->m_p_parent; p_z->m_p_parent->m_p_left = p_x; } else { p_y->m_p_left = 0; p_z->m_p_parent->m_p_right = p_x; } } else { p_z->m_p_left->m_p_parent = p_y; p_y->m_p_left = p_z->m_p_left; if (p_y != p_z->m_p_right) { p_new_x_parent = p_y->m_p_parent; if (p_x != 0) p_x->m_p_parent = p_y->m_p_parent; p_y->m_p_parent->m_p_left = p_x; p_y->m_p_right = p_z->m_p_right; p_z->m_p_right->m_p_parent = p_y; } else p_new_x_parent = p_y; if (base_type::m_p_head->m_p_parent == p_z) base_type::m_p_head->m_p_parent = p_y; else if (p_z->m_p_parent->m_p_left == p_z) p_z->m_p_parent->m_p_left = p_y; else p_z->m_p_parent->m_p_right = p_y; p_y->m_p_parent = p_z->m_p_parent; std::swap(p_y->m_red, p_z->m_red); p_y = p_z; } this->update_to_top(p_new_x_parent, (node_update* )this); if (p_y->m_red) return; remove_fixup(p_x, p_new_x_parent); } PB_DS_CLASS_T_DEC void PB_DS_CLASS_C_DEC:: remove_fixup(node_pointer p_x, node_pointer p_new_x_parent) { _GLIBCXX_DEBUG_ASSERT(p_x == 0 || p_x->m_p_parent == p_new_x_parent); while (p_x != base_type::m_p_head->m_p_parent && is_effectively_black(p_x)) if (p_x == p_new_x_parent->m_p_left) { node_pointer p_w = p_new_x_parent->m_p_right; if (p_w->m_red) { p_w->m_red = false; p_new_x_parent->m_red = true; base_type::rotate_left(p_new_x_parent); p_w = p_new_x_parent->m_p_right; } if (is_effectively_black(p_w->m_p_left) && is_effectively_black(p_w->m_p_right)) { p_w->m_red = true; p_x = p_new_x_parent; p_new_x_parent = p_new_x_parent->m_p_parent; } else { if (is_effectively_black(p_w->m_p_right)) { if (p_w->m_p_left != 0) p_w->m_p_left->m_red = false; p_w->m_red = true; base_type::rotate_right(p_w); p_w = p_new_x_parent->m_p_right; } p_w->m_red = p_new_x_parent->m_red; p_new_x_parent->m_red = false; if (p_w->m_p_right != 0) p_w->m_p_right->m_red = false; base_type::rotate_left(p_new_x_parent); this->update_to_top(p_new_x_parent, (node_update* )this); break; } } else { node_pointer p_w = p_new_x_parent->m_p_left; if (p_w->m_red == true) { p_w->m_red = false; p_new_x_parent->m_red = true; base_type::rotate_right(p_new_x_parent); p_w = p_new_x_parent->m_p_left; } if (is_effectively_black(p_w->m_p_right) && is_effectively_black(p_w->m_p_left)) { p_w->m_red = true; p_x = p_new_x_parent; p_new_x_parent = p_new_x_parent->m_p_parent; } else { if (is_effectively_black(p_w->m_p_left)) { if (p_w->m_p_right != 0) p_w->m_p_right->m_red = false; p_w->m_red = true; base_type::rotate_left(p_w); p_w = p_new_x_parent->m_p_left; } p_w->m_red = p_new_x_parent->m_red; p_new_x_parent->m_red = false; if (p_w->m_p_left != 0) p_w->m_p_left->m_red = false; base_type::rotate_right(p_new_x_parent); this->update_to_top(p_new_x_parent, (node_update* )this); break; } } if (p_x != 0) p_x->m_red = false; }
{ "pile_set_name": "Github" }
#-- encoding: UTF-8 #-- copyright # OpenProject is an open source project management software. # Copyright (C) 2012-2020 the OpenProject GmbH # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License version 3. # # OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows: # Copyright (C) 2006-2017 Jean-Philippe Lang # Copyright (C) 2010-2013 the ChiliProject Team # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # See docs/COPYRIGHT.rdoc for more details. #++ module ::UserConsentHelper def consent_param? params[:consent_check].present? end def user_consent_required? # Ensure consent is enabled and a text is provided Setting.consent_required? && consent_configured? end ## # Gets consent instructions for the given user. # # @param user [User] The user to get instructions for. # @param locale [String] ISO-639-1 code for the desired locale (e.g. de, en, fr). # `I18n.locale` is set for each request individually depending # among other things on the user's Accept-Language headers. # @return [String] Instructions in the respective language. def user_consent_instructions(user, locale: I18n.locale) all = Setting.consent_info all.fetch(locale) { all.values.first } end def consent_checkbox_label(locale: I18n.locale) I18n.t('consent.checkbox_label', locale: locale) end def consent_configured? if Setting.consent_info.count == 0 Rails.logger.error 'Instance is configured to require consent, but no consent_info has been set.' false else true end end end
{ "pile_set_name": "Github" }
//ERROR: match import java.util.ArrayList; //ERROR: match import java.util.List; class Foo { }
{ "pile_set_name": "Github" }
45
{ "pile_set_name": "Github" }
using Chloe.DbExpressions; using System.Linq; namespace Chloe.SqlServer.MethodHandlers { class EndsWith_Handler : IMethodHandler { public bool CanProcess(DbMethodCallExpression exp) { if (exp.Method != PublicConstants.MethodInfo_String_EndsWith) return false; return true; } public void Process(DbMethodCallExpression exp, SqlGenerator generator) { exp.Object.Accept(generator); generator.SqlBuilder.Append(" LIKE '%' + "); exp.Arguments.First().Accept(generator); } } }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <!-- Copyright 2013 The Android Open Source Project Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <resources> <!-- Activity themes --> <style name="Theme.Base" parent="android:Theme.Material.Light"> </style> </resources>
{ "pile_set_name": "Github" }
/* ---------------------------------------------------------------------------- */ /* Atmel Microcontroller Software Support */ /* SAM Software Package License */ /* ---------------------------------------------------------------------------- */ /* Copyright (c) 2014, Atmel Corporation */ /* */ /* All rights reserved. */ /* */ /* Redistribution and use in source and binary forms, with or without */ /* modification, are permitted provided that the following condition is met: */ /* */ /* - Redistributions of source code must retain the above copyright notice, */ /* this list of conditions and the disclaimer below. */ /* */ /* Atmel's name may not be used to endorse or promote products derived from */ /* this software without specific prior written permission. */ /* */ /* DISCLAIMER: THIS SOFTWARE IS PROVIDED BY ATMEL "AS IS" AND ANY EXPRESS OR */ /* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */ /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT ARE */ /* DISCLAIMED. IN NO EVENT SHALL ATMEL BE LIABLE FOR ANY DIRECT, INDIRECT, */ /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT */ /* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, */ /* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */ /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING */ /* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, */ /* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /* ---------------------------------------------------------------------------- */ #ifndef _SAM_ACC_COMPONENT_ #define _SAM_ACC_COMPONENT_ /* ============================================================================= */ /** SOFTWARE API DEFINITION FOR Analog Comparator Controller */ /* ============================================================================= */ /** \addtogroup SAM_ACC Analog Comparator Controller */ /*@{*/ #if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) /** \brief Acc hardware registers */ typedef struct { __O uint32_t ACC_CR; /**< \brief (Acc Offset: 0x00) Control Register */ __IO uint32_t ACC_MR; /**< \brief (Acc Offset: 0x04) Mode Register */ __I uint32_t Reserved1[7]; __O uint32_t ACC_IER; /**< \brief (Acc Offset: 0x24) Interrupt Enable Register */ __O uint32_t ACC_IDR; /**< \brief (Acc Offset: 0x28) Interrupt Disable Register */ __I uint32_t ACC_IMR; /**< \brief (Acc Offset: 0x2C) Interrupt Mask Register */ __I uint32_t ACC_ISR; /**< \brief (Acc Offset: 0x30) Interrupt Status Register */ __I uint32_t Reserved2[24]; __IO uint32_t ACC_ACR; /**< \brief (Acc Offset: 0x94) Analog Control Register */ __I uint32_t Reserved3[19]; __IO uint32_t ACC_WPMR; /**< \brief (Acc Offset: 0xE4) Write Protection Mode Register */ __I uint32_t ACC_WPSR; /**< \brief (Acc Offset: 0xE8) Write Protection Status Register */ __I uint32_t Reserved4[4]; __I uint32_t ACC_VER; /**< \brief (Acc Offset: 0xFC) Version Register */ } Acc; #endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */ /* -------- ACC_CR : (ACC Offset: 0x00) Control Register -------- */ #define ACC_CR_SWRST (0x1u << 0) /**< \brief (ACC_CR) Software Reset */ /* -------- ACC_MR : (ACC Offset: 0x04) Mode Register -------- */ #define ACC_MR_SELMINUS_Pos 0 #define ACC_MR_SELMINUS_Msk (0x7u << ACC_MR_SELMINUS_Pos) /**< \brief (ACC_MR) Selection for Minus Comparator Input */ #define ACC_MR_SELMINUS_TS (0x0u << 0) /**< \brief (ACC_MR) Select TS */ #define ACC_MR_SELMINUS_ADVREF (0x1u << 0) /**< \brief (ACC_MR) Select ADVREF */ #define ACC_MR_SELMINUS_DAC0 (0x2u << 0) /**< \brief (ACC_MR) Select DAC0 */ #define ACC_MR_SELMINUS_DAC1 (0x3u << 0) /**< \brief (ACC_MR) Select DAC1 */ #define ACC_MR_SELMINUS_AD0 (0x4u << 0) /**< \brief (ACC_MR) Select AD0 */ #define ACC_MR_SELMINUS_AD1 (0x5u << 0) /**< \brief (ACC_MR) Select AD1 */ #define ACC_MR_SELMINUS_AD2 (0x6u << 0) /**< \brief (ACC_MR) Select AD2 */ #define ACC_MR_SELMINUS_AD3 (0x7u << 0) /**< \brief (ACC_MR) Select AD3 */ #define ACC_MR_SELPLUS_Pos 4 #define ACC_MR_SELPLUS_Msk (0x7u << ACC_MR_SELPLUS_Pos) /**< \brief (ACC_MR) Selection For Plus Comparator Input */ #define ACC_MR_SELPLUS_AD0 (0x0u << 4) /**< \brief (ACC_MR) Select AD0 */ #define ACC_MR_SELPLUS_AD1 (0x1u << 4) /**< \brief (ACC_MR) Select AD1 */ #define ACC_MR_SELPLUS_AD2 (0x2u << 4) /**< \brief (ACC_MR) Select AD2 */ #define ACC_MR_SELPLUS_AD3 (0x3u << 4) /**< \brief (ACC_MR) Select AD3 */ #define ACC_MR_SELPLUS_AD4 (0x4u << 4) /**< \brief (ACC_MR) Select AD4 */ #define ACC_MR_SELPLUS_AD5 (0x5u << 4) /**< \brief (ACC_MR) Select AD5 */ #define ACC_MR_SELPLUS_AD6 (0x6u << 4) /**< \brief (ACC_MR) Select AD6 */ #define ACC_MR_SELPLUS_AD7 (0x7u << 4) /**< \brief (ACC_MR) Select AD7 */ #define ACC_MR_ACEN (0x1u << 8) /**< \brief (ACC_MR) Analog Comparator Enable */ #define ACC_MR_ACEN_DIS (0x0u << 8) /**< \brief (ACC_MR) Analog comparator disabled. */ #define ACC_MR_ACEN_EN (0x1u << 8) /**< \brief (ACC_MR) Analog comparator enabled. */ #define ACC_MR_EDGETYP_Pos 9 #define ACC_MR_EDGETYP_Msk (0x3u << ACC_MR_EDGETYP_Pos) /**< \brief (ACC_MR) Edge Type */ #define ACC_MR_EDGETYP_RISING (0x0u << 9) /**< \brief (ACC_MR) Only rising edge of comparator output */ #define ACC_MR_EDGETYP_FALLING (0x1u << 9) /**< \brief (ACC_MR) Falling edge of comparator output */ #define ACC_MR_EDGETYP_ANY (0x2u << 9) /**< \brief (ACC_MR) Any edge of comparator output */ #define ACC_MR_INV (0x1u << 12) /**< \brief (ACC_MR) Invert Comparator Output */ #define ACC_MR_INV_DIS (0x0u << 12) /**< \brief (ACC_MR) Analog comparator output is directly processed. */ #define ACC_MR_INV_EN (0x1u << 12) /**< \brief (ACC_MR) Analog comparator output is inverted prior to being processed. */ #define ACC_MR_SELFS (0x1u << 13) /**< \brief (ACC_MR) Selection Of Fault Source */ #define ACC_MR_SELFS_CF (0x0u << 13) /**< \brief (ACC_MR) The CF flag is used to drive the FAULT output. */ #define ACC_MR_SELFS_OUTPUT (0x1u << 13) /**< \brief (ACC_MR) The output of the analog comparator flag is used to drive the FAULT output. */ #define ACC_MR_FE (0x1u << 14) /**< \brief (ACC_MR) Fault Enable */ #define ACC_MR_FE_DIS (0x0u << 14) /**< \brief (ACC_MR) The FAULT output is tied to 0. */ #define ACC_MR_FE_EN (0x1u << 14) /**< \brief (ACC_MR) The FAULT output is driven by the signal defined by SELFS. */ /* -------- ACC_IER : (ACC Offset: 0x24) Interrupt Enable Register -------- */ #define ACC_IER_CE (0x1u << 0) /**< \brief (ACC_IER) Comparison Edge */ /* -------- ACC_IDR : (ACC Offset: 0x28) Interrupt Disable Register -------- */ #define ACC_IDR_CE (0x1u << 0) /**< \brief (ACC_IDR) Comparison Edge */ /* -------- ACC_IMR : (ACC Offset: 0x2C) Interrupt Mask Register -------- */ #define ACC_IMR_CE (0x1u << 0) /**< \brief (ACC_IMR) Comparison Edge */ /* -------- ACC_ISR : (ACC Offset: 0x30) Interrupt Status Register -------- */ #define ACC_ISR_CE (0x1u << 0) /**< \brief (ACC_ISR) Comparison Edge */ #define ACC_ISR_SCO (0x1u << 1) /**< \brief (ACC_ISR) Synchronized Comparator Output */ #define ACC_ISR_MASK (0x1u << 31) /**< \brief (ACC_ISR) Flag Mask */ /* -------- ACC_ACR : (ACC Offset: 0x94) Analog Control Register -------- */ #define ACC_ACR_ISEL (0x1u << 0) /**< \brief (ACC_ACR) Current Selection */ #define ACC_ACR_ISEL_LOPW (0x0u << 0) /**< \brief (ACC_ACR) Low-power option. */ #define ACC_ACR_ISEL_HISP (0x1u << 0) /**< \brief (ACC_ACR) High-speed option. */ #define ACC_ACR_HYST_Pos 1 #define ACC_ACR_HYST_Msk (0x3u << ACC_ACR_HYST_Pos) /**< \brief (ACC_ACR) Hysteresis Selection */ #define ACC_ACR_HYST(value) ((ACC_ACR_HYST_Msk & ((value) << ACC_ACR_HYST_Pos))) /* -------- ACC_WPMR : (ACC Offset: 0xE4) Write Protection Mode Register -------- */ #define ACC_WPMR_WPEN (0x1u << 0) /**< \brief (ACC_WPMR) Write Protection Enable */ #define ACC_WPMR_WPKEY_Pos 8 #define ACC_WPMR_WPKEY_Msk (0xffffffu << ACC_WPMR_WPKEY_Pos) /**< \brief (ACC_WPMR) Write Protection Key */ #define ACC_WPMR_WPKEY_PASSWD (0x414343u << 8) /**< \brief (ACC_WPMR) Writing any other value in this field aborts the write operation of the WPEN bit.Always reads as 0. */ /* -------- ACC_WPSR : (ACC Offset: 0xE8) Write Protection Status Register -------- */ #define ACC_WPSR_WPVS (0x1u << 0) /**< \brief (ACC_WPSR) Write Protection Violation Status */ /* -------- ACC_VER : (ACC Offset: 0xFC) Version Register -------- */ #define ACC_VER_VERSION_Pos 0 #define ACC_VER_VERSION_Msk (0xfffu << ACC_VER_VERSION_Pos) /**< \brief (ACC_VER) Version of the Hardware Module */ #define ACC_VER_MFN_Pos 16 #define ACC_VER_MFN_Msk (0x7u << ACC_VER_MFN_Pos) /**< \brief (ACC_VER) Metal Fix Number */ /*@}*/ #endif /* _SAM_ACC_COMPONENT_ */
{ "pile_set_name": "Github" }
# Copyright (C) Dominik Picheta. All rights reserved. # BSD-3-Clause License. Look at license.txt for more info. import osproc, streams, unittest, strutils, os, sequtils, future var rootDir = getCurrentDir().parentDir() var exePath = rootDir / "bin" / addFileExt("choosenim", ExeExt) var nimbleDir = rootDir / "tests" / "nimbleDir" var choosenimDir = rootDir / "tests" / "choosenimDir" template cd*(dir: string, body: untyped) = ## Sets the current dir to ``dir``, executes ``body`` and restores the ## previous working dir. let lastDir = getCurrentDir() setCurrentDir(dir) body setCurrentDir(lastDir) template beginTest() = # Clear custom dirs. removeDir(nimbleDir) createDir(nimbleDir) removeDir(choosenimDir) createDir(choosenimDir) proc outputReader(stream: Stream, missedEscape: var bool): string = result = "" template handleEscape: untyped {.dirty.} = missedEscape = false result.add('\27') let escape = stream.readStr(1) result.add(escape) if escape[0] == '[': result.add(stream.readStr(2)) return # TODO: This would be much easier to implement if `peek` was supported. if missedEscape: handleEscape() while true: let c = stream.readStr(1) if c.len() == 0: return case c[0] of '\c', '\l': result.add(c[0]) return of '\27': if result.len > 0: missedEscape = true return handleEscape() else: result.add(c[0]) proc exec(args: varargs[string], exe=exePath, yes=true, liveOutput=false, global=false): tuple[output: string, exitCode: int] = var quotedArgs: seq[string] = @[exe] if yes: quotedArgs.add("-y") quotedArgs.add(@args) if not global: quotedArgs.add("--nimbleDir:" & nimbleDir) if exe != "nimble": quotedArgs.add("--chooseNimDir:" & choosenimDir) quotedArgs.add("--noColor") for i in 0..quotedArgs.len-1: if " " in quotedArgs[i]: quotedArgs[i] = "\"" & quotedArgs[i] & "\"" if not liveOutput: result = execCmdEx(quotedArgs.join(" ")) else: result.output = "" let process = startProcess(quotedArgs.join(" "), options={poEvalCommand, poStdErrToStdOut}) var missedEscape = false while true: if not process.outputStream.atEnd: let line = process.outputStream.outputReader(missedEscape) result.output.add(line) stdout.write(line) if line.len() != 0 and line[0] != '\27': stdout.flushFile() else: result.exitCode = process.peekExitCode() if result.exitCode != -1: break process.close() proc processOutput(output: string): seq[string] = output.strip.splitLines().filter((x: string) => (x.len > 0)) proc inLines(lines: seq[string], word: string): bool = for i in lines: if word.normalize in i.normalize: return true proc hasLine(lines: seq[string], line: string): bool = for i in lines: if i.normalize.strip() == line.normalize(): return true test "can compile choosenim": cd "..": let (_, exitCode) = exec("build", exe="nimble", global=true, liveOutput=true) check exitCode == QuitSuccess test "refuses invalid path": beginTest() block: let (output, exitCode) = exec(getTempDir() / "blahblah") check exitCode == QuitFailure check inLines(output.processOutput, "invalid") check inLines(output.processOutput, "version") check inLines(output.processOutput, "path") block: let (output, exitCode) = exec(getTempDir()) check exitCode == QuitFailure check inLines(output.processOutput, "no") check inLines(output.processOutput, "binary") check inLines(output.processOutput, "found") test "fails on bad flag": beginTest() let (output, exitCode) = exec("--qwetqsdweqwe") check exitCode == QuitFailure check inLines(output.processOutput, "unknown") check inLines(output.processOutput, "flag") test "can choose v0.16.0": beginTest() block: let (output, exitCode) = exec("0.16.0", liveOutput=true) check exitCode == QuitSuccess check inLines(output.processOutput, "building") check inLines(output.processOutput, "downloading") when defined(windows): check inLines(output.processOutput, "already built") else: check inLines(output.processOutput, "building tools") check hasLine(output.processOutput, "switched to nim 0.16.0") block: let (output, exitCode) = exec("0.16.0") check exitCode == QuitSuccess check hasLine(output.processOutput, "info: version 0.16.0 already selected") block: let (output, exitCode) = exec("--version", exe=nimbleDir / "bin" / "nimble") check exitCode == QuitSuccess check inLines(output.processOutput, "v0.8.2") when defined(linux): test "linux binary install": beginTest() block: let (output, exitCode) = exec("1.0.0", liveOutput=true) check exitCode == QuitSuccess check inLines(output.processOutput, "downloading") check inLines(output.processOutput, "already built") check hasLine(output.processOutput, "switched to nim 1.0.0") check not dirExists(choosenimDir / "toolchains" / "nim-1.0.0" / "c_code") test "can update devel with git": beginTest() block: let (output, exitCode) = exec(@["devel", "--latest"], liveOutput=true) check exitCode == QuitSuccess check inLines(output.processOutput, "extracting") check inLines(output.processOutput, "setting") check inLines(output.processOutput, "latest changes") check inLines(output.processOutput, "building") block: let (output, exitCode) = exec(@["update", "devel", "--latest"], liveOutput=true) check exitCode == QuitSuccess check not inLines(output.processOutput, "extracting") check not inLines(output.processOutput, "setting") check inLines(output.processOutput, "updating") check inLines(output.processOutput, "latest changes") check inLines(output.processOutput, "building") test "can install and update nightlies": beginTest() block: # Install nightly let (output, exitCode) = exec("devel", liveOutput=true) # Travis runs into Github API limit if not inLines(output.processOutput, "unavailable"): check exitCode == QuitSuccess check inLines(output.processOutput, "devel from") check inLines(output.processOutput, "setting") when not defined(macosx): if not inLines(output.processOutput, "recent nightly"): check inLines(output.processOutput, "already built") check inLines(output.processOutput, "to Nim #devel") block: # Update nightly let (output, exitCode) = exec(@["update", "devel"], liveOutput=true) # Travis runs into Github API limit if not inLines(output.processOutput, "unavailable"): check exitCode == QuitSuccess check inLines(output.processOutput, "updating") check inLines(output.processOutput, "devel from") check inLines(output.processOutput, "setting") when not defined(macosx): if not inLines(output.processOutput, "recent nightly"): check inLines(output.processOutput, "already built") block: # Update to devel latest let (output, exitCode) = exec(@["update", "devel", "--latest"], liveOutput=true) check exitCode == QuitSuccess when not defined(macosx): check not inLines(output.processOutput, "extracting") check not inLines(output.processOutput, "setting") check inLines(output.processOutput, "updating") check inLines(output.processOutput, "latest changes") check inLines(output.processOutput, "building") test "can update self": # updateSelf() doesn't use options --choosenimDir and --nimbleDir. It's used getAppDir(). # This will rewrite $project/bin dir, it's dangerous. # So, this test copy bin/choosenim to test/choosenimDir/choosenim, and use it. beginTest() let testExePath = choosenimDir / extractFilename(exePath) copyFileWithPermissions(exePath, testExePath) block : let (output, exitCode) = exec(["update", "self", "--debug", "--force"], exe=testExePath, liveOutput=true) check exitCode == QuitSuccess check inLines(output.processOutput, "Info: Updated choosenim to version")
{ "pile_set_name": "Github" }
# ========================================================================= # This makefile was generated by # Bakefile 0.2.9 (http://www.bakefile.org) # Do not modify, all changes will be overwritten! # ========================================================================= include ../../build/msw/config.gcc # ------------------------------------------------------------------------- # Do not modify the rest of this file! # ------------------------------------------------------------------------- ### Variables: ### CPPDEPS = -MT$@ [email protected] -MD -MP WX_RELEASE_NODOT = 30 COMPILER_PREFIX = gcc OBJS = \ $(COMPILER_PREFIX)$(COMPILER_VERSION)_$(PORTNAME)$(WXUNIVNAME)$(WXUNICODEFLAG)$(WXDEBUGFLAG)$(WXDLLFLAG)$(CFG) LIBDIRNAME = \ .\..\..\lib\$(COMPILER_PREFIX)$(COMPILER_VERSION)_$(LIBTYPE_SUFFIX)$(CFG) SETUPHDIR = \ $(LIBDIRNAME)\$(PORTNAME)$(WXUNIVNAME)$(WXUNICODEFLAG)$(WXDEBUGFLAG) PROPGRID_CXXFLAGS = $(__DEBUGINFO) $(__OPTIMIZEFLAG_2) $(__THREADSFLAG) \ $(GCCFLAGS) -DHAVE_W32API_H -D__WXMSW__ $(__WXUNIV_DEFINE_p) \ $(__DEBUG_DEFINE_p) $(__NDEBUG_DEFINE_p) $(__EXCEPTIONS_DEFINE_p) \ $(__RTTI_DEFINE_p) $(__THREAD_DEFINE_p) $(__UNICODE_DEFINE_p) \ $(__MSLU_DEFINE_p) -I$(SETUPHDIR) -I.\..\..\include \ $(____CAIRO_INCLUDEDIR_FILENAMES_p) -W -Wall -I. $(__DLLFLAG_p) \ -I.\..\..\samples -DNOPCH $(__RTTIFLAG_5) $(__EXCEPTIONSFLAG_6) \ -Wno-ctor-dtor-privacy $(CPPFLAGS) $(CXXFLAGS) PROPGRID_OBJECTS = \ $(OBJS)\propgrid_propgrid.o \ $(OBJS)\propgrid_propgrid_minimal.o \ $(OBJS)\propgrid_sampleprops.o \ $(OBJS)\propgrid_tests.o \ $(OBJS)\propgrid_sample_rc.o ### Conditionally set variables: ### ifeq ($(GCC_VERSION),2.95) GCCFLAGS = -fvtable-thunks endif ifeq ($(USE_GUI),0) PORTNAME = base endif ifeq ($(USE_GUI),1) PORTNAME = msw$(TOOLKIT_VERSION) endif ifeq ($(OFFICIAL_BUILD),1) COMPILER_VERSION = ERROR-COMPILER-VERSION-MUST-BE-SET-FOR-OFFICIAL-BUILD endif ifeq ($(BUILD),debug) WXDEBUGFLAG = d endif ifeq ($(UNICODE),1) WXUNICODEFLAG = u endif ifeq ($(WXUNIV),1) WXUNIVNAME = univ endif ifeq ($(SHARED),1) WXDLLFLAG = dll endif ifeq ($(SHARED),0) LIBTYPE_SUFFIX = lib endif ifeq ($(SHARED),1) LIBTYPE_SUFFIX = dll endif ifeq ($(MONOLITHIC),0) EXTRALIBS_FOR_BASE = endif ifeq ($(MONOLITHIC),1) EXTRALIBS_FOR_BASE = endif ifeq ($(BUILD),debug) __OPTIMIZEFLAG_2 = -O0 endif ifeq ($(BUILD),release) __OPTIMIZEFLAG_2 = -O2 endif ifeq ($(USE_RTTI),0) __RTTIFLAG_5 = -fno-rtti endif ifeq ($(USE_RTTI),1) __RTTIFLAG_5 = endif ifeq ($(USE_EXCEPTIONS),0) __EXCEPTIONSFLAG_6 = -fno-exceptions endif ifeq ($(USE_EXCEPTIONS),1) __EXCEPTIONSFLAG_6 = endif ifeq ($(WXUNIV),1) __WXUNIV_DEFINE_p = -D__WXUNIVERSAL__ endif ifeq ($(WXUNIV),1) __WXUNIV_DEFINE_p_1 = --define __WXUNIVERSAL__ endif ifeq ($(DEBUG_FLAG),0) __DEBUG_DEFINE_p = -DwxDEBUG_LEVEL=0 endif ifeq ($(DEBUG_FLAG),0) __DEBUG_DEFINE_p_1 = --define wxDEBUG_LEVEL=0 endif ifeq ($(BUILD),release) __NDEBUG_DEFINE_p = -DNDEBUG endif ifeq ($(BUILD),release) __NDEBUG_DEFINE_p_1 = --define NDEBUG endif ifeq ($(USE_EXCEPTIONS),0) __EXCEPTIONS_DEFINE_p = -DwxNO_EXCEPTIONS endif ifeq ($(USE_EXCEPTIONS),0) __EXCEPTIONS_DEFINE_p_1 = --define wxNO_EXCEPTIONS endif ifeq ($(USE_RTTI),0) __RTTI_DEFINE_p = -DwxNO_RTTI endif ifeq ($(USE_RTTI),0) __RTTI_DEFINE_p_1 = --define wxNO_RTTI endif ifeq ($(USE_THREADS),0) __THREAD_DEFINE_p = -DwxNO_THREADS endif ifeq ($(USE_THREADS),0) __THREAD_DEFINE_p_1 = --define wxNO_THREADS endif ifeq ($(UNICODE),0) __UNICODE_DEFINE_p = -DwxUSE_UNICODE=0 endif ifeq ($(UNICODE),1) __UNICODE_DEFINE_p = -D_UNICODE endif ifeq ($(UNICODE),0) __UNICODE_DEFINE_p_1 = --define wxUSE_UNICODE=0 endif ifeq ($(UNICODE),1) __UNICODE_DEFINE_p_1 = --define _UNICODE endif ifeq ($(MSLU),1) __MSLU_DEFINE_p = -DwxUSE_UNICODE_MSLU=1 endif ifeq ($(MSLU),1) __MSLU_DEFINE_p_1 = --define wxUSE_UNICODE_MSLU=1 endif ifeq ($(USE_CAIRO),1) ____CAIRO_INCLUDEDIR_FILENAMES_p = -I$(CAIRO_ROOT)\include\cairo endif ifeq ($(USE_CAIRO),1) __CAIRO_INCLUDEDIR_p = --include-dir $(CAIRO_ROOT)/include/cairo endif ifeq ($(SHARED),1) __DLLFLAG_p = -DWXUSINGDLL endif ifeq ($(SHARED),1) __DLLFLAG_p_1 = --define WXUSINGDLL endif ifeq ($(MONOLITHIC),0) __WXLIB_PROPGRID_p = \ -lwx$(PORTNAME)$(WXUNIVNAME)$(WX_RELEASE_NODOT)$(WXUNICODEFLAG)$(WXDEBUGFLAG)$(WX_LIB_FLAVOUR)_propgrid endif ifeq ($(MONOLITHIC),0) __WXLIB_ADV_p = \ -lwx$(PORTNAME)$(WXUNIVNAME)$(WX_RELEASE_NODOT)$(WXUNICODEFLAG)$(WXDEBUGFLAG)$(WX_LIB_FLAVOUR)_adv endif ifeq ($(MONOLITHIC),0) __WXLIB_CORE_p = \ -lwx$(PORTNAME)$(WXUNIVNAME)$(WX_RELEASE_NODOT)$(WXUNICODEFLAG)$(WXDEBUGFLAG)$(WX_LIB_FLAVOUR)_core endif ifeq ($(MONOLITHIC),0) __WXLIB_BASE_p = \ -lwxbase$(WX_RELEASE_NODOT)$(WXUNICODEFLAG)$(WXDEBUGFLAG)$(WX_LIB_FLAVOUR) endif ifeq ($(MONOLITHIC),1) __WXLIB_MONO_p = \ -lwx$(PORTNAME)$(WXUNIVNAME)$(WX_RELEASE_NODOT)$(WXUNICODEFLAG)$(WXDEBUGFLAG)$(WX_LIB_FLAVOUR) endif ifeq ($(MONOLITHIC),1) ifeq ($(USE_STC),1) __LIB_SCINTILLA_IF_MONO_p = -lwxscintilla$(WXDEBUGFLAG) endif endif ifeq ($(USE_GUI),1) __LIB_TIFF_p = -lwxtiff$(WXDEBUGFLAG) endif ifeq ($(USE_GUI),1) __LIB_JPEG_p = -lwxjpeg$(WXDEBUGFLAG) endif ifeq ($(USE_GUI),1) __LIB_PNG_p = -lwxpng$(WXDEBUGFLAG) endif ifeq ($(MSLU),1) __UNICOWS_LIB_p = -lunicows endif ifeq ($(USE_CAIRO),1) __CAIRO_LIB_p = -lcairo endif ifeq ($(USE_CAIRO),1) ____CAIRO_LIBDIR_FILENAMES_p = -L$(CAIRO_ROOT)\lib endif ifeq ($(BUILD),debug) ifeq ($(DEBUG_INFO),default) __DEBUGINFO = -g endif endif ifeq ($(BUILD),release) ifeq ($(DEBUG_INFO),default) __DEBUGINFO = endif endif ifeq ($(DEBUG_INFO),0) __DEBUGINFO = endif ifeq ($(DEBUG_INFO),1) __DEBUGINFO = -g endif ifeq ($(USE_THREADS),0) __THREADSFLAG = endif ifeq ($(USE_THREADS),1) __THREADSFLAG = -mthreads endif all: $(OBJS) $(OBJS): -if not exist $(OBJS) mkdir $(OBJS) ### Targets: ### all: $(OBJS)\propgrid.exe clean: -if exist $(OBJS)\*.o del $(OBJS)\*.o -if exist $(OBJS)\*.d del $(OBJS)\*.d -if exist $(OBJS)\propgrid.exe del $(OBJS)\propgrid.exe $(OBJS)\propgrid.exe: $(PROPGRID_OBJECTS) $(OBJS)\propgrid_sample_rc.o $(CXX) -o $@ $(PROPGRID_OBJECTS) $(__DEBUGINFO) $(__THREADSFLAG) -L$(LIBDIRNAME) -Wl,--subsystem,windows -mwindows $(____CAIRO_LIBDIR_FILENAMES_p) $(LDFLAGS) $(__WXLIB_PROPGRID_p) $(__WXLIB_ADV_p) $(__WXLIB_CORE_p) $(__WXLIB_BASE_p) $(__WXLIB_MONO_p) $(__LIB_SCINTILLA_IF_MONO_p) $(__LIB_TIFF_p) $(__LIB_JPEG_p) $(__LIB_PNG_p) -lwxzlib$(WXDEBUGFLAG) -lwxregex$(WXUNICODEFLAG)$(WXDEBUGFLAG) -lwxexpat$(WXDEBUGFLAG) $(EXTRALIBS_FOR_BASE) $(__UNICOWS_LIB_p) $(__CAIRO_LIB_p) -lkernel32 -luser32 -lgdi32 -lcomdlg32 -lwinspool -lwinmm -lshell32 -lcomctl32 -lole32 -loleaut32 -luuid -lrpcrt4 -ladvapi32 -lwsock32 -lwininet $(OBJS)\propgrid_propgrid.o: ./propgrid.cpp $(CXX) -c -o $@ $(PROPGRID_CXXFLAGS) $(CPPDEPS) $< $(OBJS)\propgrid_propgrid_minimal.o: ./propgrid_minimal.cpp $(CXX) -c -o $@ $(PROPGRID_CXXFLAGS) $(CPPDEPS) $< $(OBJS)\propgrid_sampleprops.o: ./sampleprops.cpp $(CXX) -c -o $@ $(PROPGRID_CXXFLAGS) $(CPPDEPS) $< $(OBJS)\propgrid_tests.o: ./tests.cpp $(CXX) -c -o $@ $(PROPGRID_CXXFLAGS) $(CPPDEPS) $< $(OBJS)\propgrid_sample_rc.o: ./../sample.rc $(WINDRES) -i$< -o$@ --define __WXMSW__ $(__WXUNIV_DEFINE_p_1) $(__DEBUG_DEFINE_p_1) $(__NDEBUG_DEFINE_p_1) $(__EXCEPTIONS_DEFINE_p_1) $(__RTTI_DEFINE_p_1) $(__THREAD_DEFINE_p_1) $(__UNICODE_DEFINE_p_1) $(__MSLU_DEFINE_p_1) --include-dir $(SETUPHDIR) --include-dir ./../../include $(__CAIRO_INCLUDEDIR_p) --include-dir . $(__DLLFLAG_p_1) --include-dir ./../../samples --define NOPCH .PHONY: all clean SHELL := $(COMSPEC) # Dependencies tracking: -include $(OBJS)/*.d
{ "pile_set_name": "Github" }
/*---------------------------------------------------------- This Source Code Form is subject to the terms of the Mozilla Public License, v.2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. ----------------------------------------------------------*/ using ScriptEngine.Machine.Contexts; using System; using System.Linq; using ScriptEngine.Machine; namespace ScriptEngine.HostedScript.Library { [GlobalContext(Category = "Операции со строками")] public class StringOperations : GlobalContextBase<StringOperations> { readonly int STRTEMPLATE_ID; const string STRTEMPLATE_NAME_RU = "СтрШаблон"; const string STRTEMPLATE_NAME_EN = "StrTemplate"; public StringOperations() { STRTEMPLATE_ID = this.Methods.Count; } /// <summary> /// Получает строку на языке, заданном во втором параметре (коды языков в соответствии с ISO 639-1) /// или на текущем языке системы. /// </summary> /// <param name="src">Строка на нескольких языках</param> /// <param name="lang">Код языка (если не указан, возвращает вариант для текущего языка системы, /// если вариант не найден, то возвращает вариант для английского языка, если не задан вариант для английского языка, /// то возвращает первый вариант из списка)</param> [ContextMethod("НСтр", "NStr")] public string NStr(string src, string lang = null) { return Locale.NStr(src, lang); } /// <summary> /// Определяет, что строка начинается с указанной подстроки. /// </summary> /// <param name="inputString">Строка, начало которой проверяется на совпадение с подстрокой поиска.</param> /// <param name="searchString">Строка, содержащая предполагаемое начало строки. В случае если переданное значение является пустой строкой генерируется исключительная ситуация.</param> [ContextMethod("СтрНачинаетсяС", "StrStartsWith")] public bool StrStartsWith(string inputString, string searchString) { bool result = false; if(!string.IsNullOrEmpty(inputString)) { if (!string.IsNullOrEmpty(searchString)) { result = inputString.StartsWith(searchString); } else throw new RuntimeException("Ошибка при вызове метода контекста (СтрНачинаетсяС): Недопустимое значение параметра (параметр номер '2')"); } return result; } /// <summary> /// Определяет, заканчивается ли строка указанной подстрокой. /// </summary> /// <param name="inputString">Строка, окончание которой проверяется на совпадение с подстрокой поиска.</param> /// <param name="searchString">Строка, содержащая предполагаемое окончание строки. В случае если переданное значение является пустой строкой генерируется исключительная ситуация.</param> [ContextMethod("СтрЗаканчиваетсяНа", "StrEndsWith")] public bool StrEndsWith(string inputString, string searchString) { bool result = false; if(!string.IsNullOrEmpty(inputString)) { if (!string.IsNullOrEmpty(searchString)) { result = inputString.EndsWith(searchString); } else throw new RuntimeException("Ошибка при вызове метода контекста (СтрЗаканчиваетсяНа): Недопустимое значение параметра (параметр номер '2')"); } return result; } /// <summary> /// Разделяет строку на части по указанным символам-разделителям. /// </summary> /// <param name="inputString">Разделяемая строка.</param> /// <param name="stringDelimiter">Строка символов, каждый из которых является индивидуальным разделителем.</param> /// <param name="includeEmpty">Указывает необходимость включать в результат пустые строки, которые могут образоваться в результате разделения исходной строки. Значение по умолчанию: Истина. </param> [ContextMethod("СтрРазделить", "StrSplit")] public ArrayImpl StrSplit(string inputString, string stringDelimiter, bool? includeEmpty = true) { string[] arrParsed; if (includeEmpty == null) includeEmpty = true; if(!string.IsNullOrEmpty(inputString)) { arrParsed = inputString.Split(stringDelimiter?.ToCharArray(), (bool) includeEmpty ? StringSplitOptions.None : StringSplitOptions.RemoveEmptyEntries); } else { arrParsed = (bool) includeEmpty ? new string[] { string.Empty } : new string[0]; } return new ArrayImpl(arrParsed.Select(x => ValueFactory.Create(x))); } /// <summary> /// Соединяет массив переданных строк в одну строку с указанным разделителем /// </summary> /// <param name="input">Массив - соединяемые строки</param> /// <param name="delimiter">Разделитель. Если не указан, строки объединяются слитно</param> [ContextMethod("СтрСоединить", "StrConcat")] public string StrConcat(ArrayImpl input, string delimiter = null) { var strings = input.Select(x => x.AsString()); return String.Join(delimiter, strings); } /// <summary> /// Сравнивает строки без учета регистра. /// </summary> /// <param name="first"></param> /// <param name="second"></param> /// <returns>-1 первая строка больше, 1 - вторая строка больше. 0 - строки равны</returns> [ContextMethod("СтрСравнить", "StrCompare")] public int StrCompare(string first, string second) { return String.Compare(first, second, true); } /// <summary> /// Находит вхождение искомой строки как подстроки в исходной строке /// </summary> /// <param name="haystack">Строка, в которой ищем</param> /// <param name="needle">Строка, которую надо найти</param> /// <param name="direction">значение перечисления НаправлениеПоиска (с конца/с начала)</param> /// <param name="startPos">Начальная позиция, с которой начинать поиск</param> /// <param name="occurance">Указывает номер вхождения искомой подстроки в исходной строке</param> /// <returns>Позицию искомой строки в исходной строке. Возвращает 0, если подстрока не найдена.</returns> [ContextMethod("СтрНайти", "StrFind")] public int StrFind(string haystack, string needle, SearchDirection direction = SearchDirection.FromBegin, int startPos = 0, int occurance = 0) { int len = haystack.Length; if (len == 0 || needle.Length == 0) return 0; bool fromBegin = direction == SearchDirection.FromBegin; if(startPos == 0) { startPos = fromBegin ? 1 : len; } if (startPos < 1 || startPos > len) throw RuntimeException.InvalidArgumentValue(); if (occurance == 0) occurance = 1; int startIndex = startPos - 1; int foundTimes = 0; int index = len + 1; if(fromBegin) { while(foundTimes < occurance && index >= 0) { index = haystack.IndexOf(needle, startIndex, StringComparison.Ordinal); if (index >= 0) { startIndex = index + 1; foundTimes++; } if (startIndex >= len) break; } } else { while(foundTimes < occurance && index >= 0) { index = haystack.LastIndexOf(needle, startIndex, StringComparison.Ordinal); if (index >= 0) { startIndex = index - 1; foundTimes++; } if (startIndex < 0) break; } } if (foundTimes == occurance) return index + 1; else return 0; } #region IRuntimeContextInstance overrides public override int FindMethod(string name) { if (string.Compare(name, STRTEMPLATE_NAME_RU, true) == 0 || string.Compare(name, STRTEMPLATE_NAME_EN, true) == 0) return STRTEMPLATE_ID; else return base.FindMethod(name); } public override int GetMethodsCount() { return base.GetMethodsCount() + 1; } private static MethodInfo CreateStrTemplateMethodInfo() { var strTemplateMethodInfo = new MethodInfo(); strTemplateMethodInfo.IsFunction = true; strTemplateMethodInfo.Name = STRTEMPLATE_NAME_RU; strTemplateMethodInfo.Alias = STRTEMPLATE_NAME_EN; strTemplateMethodInfo.Params = new ParameterDefinition[11]; strTemplateMethodInfo.IsExport = true; strTemplateMethodInfo.Params[0] = new ParameterDefinition() { IsByValue = true }; for (int i = 1; i < strTemplateMethodInfo.Params.Length; i++) { strTemplateMethodInfo.Params[i] = new ParameterDefinition() { IsByValue = true, HasDefaultValue = true }; } return strTemplateMethodInfo; } public override MethodInfo GetMethodInfo(int methodNumber) { if (methodNumber == STRTEMPLATE_ID) return CreateStrTemplateMethodInfo(); else return base.GetMethodInfo(methodNumber); } public override void CallAsProcedure(int methodNumber, IValue[] arguments) { if (methodNumber == STRTEMPLATE_ID) CallStrTemplate(arguments); else base.CallAsProcedure(methodNumber, arguments); } public override void CallAsFunction(int methodNumber, IValue[] arguments, out IValue retValue) { if (methodNumber == STRTEMPLATE_ID) retValue = CallStrTemplate(arguments); else base.CallAsFunction(methodNumber, arguments, out retValue); } #endregion private IValue CallStrTemplate(IValue[] arguments) { var srcFormat = arguments[0].AsString(); if (srcFormat == String.Empty) return ValueFactory.Create(""); var re = new System.Text.RegularExpressions.Regex(@"(%%)|(%\d+)|(%\D)"); int matchCount = 0; int passedArgsCount = arguments.Skip(1).Count(x => x.DataType != DataType.NotAValidValue && x.DataType != DataType.Undefined); var result = re.Replace(srcFormat, (m) => { if (m.Groups[1].Success) return "%"; if(m.Groups[2].Success) { matchCount++; var number = int.Parse(m.Groups[2].Value.Substring(1)); if (number < 1 || number > 11) throw new RuntimeException("Ошибка при вызове метода контекста (СтрШаблон): Ошибка синтаксиса шаблона в позиции " + (m.Index + 1)); if (arguments[number] != null && arguments[number].DataType != DataType.NotAValidValue) return arguments[number].AsString(); else return ""; } throw new RuntimeException("Ошибка при вызове метода контекста (СтрШаблон): Ошибка синтаксиса шаблона в позиции " + (m.Index + 1)); }); if (passedArgsCount > matchCount) throw RuntimeException.TooManyArgumentsPassed(); return ValueFactory.Create(result); } public static IAttachableContext CreateInstance() { return new StringOperations(); } } [EnumerationType("НаправлениеПоиска", "SearchDirection")] public enum SearchDirection { [EnumItem("СНачала")] FromBegin, [EnumItem("СКонца")] FromEnd } }
{ "pile_set_name": "Github" }
/*===================================================================== QGroundControl Open Source Ground Control Station (c) 2009, 2010 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org> This file is part of the QGROUNDCONTROL project QGROUNDCONTROL is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. QGROUNDCONTROL is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with QGROUNDCONTROL. If not, see <http://www.gnu.org/licenses/>. ======================================================================*/ /** * @file * @brief Implementation of audio output * * @author Lorenz Meier <[email protected]> * */ #include "logging.h" #include "configuration.h" #include "GAudioOutput.h" #include "MG.h" #include <QApplication> #include <QSettings> #include <QTemporaryFile> #ifdef Q_OS_MAC #include <ApplicationServices/ApplicationServices.h> #endif // Speech synthesis is only supported with MSVC compiler #if _MSC_VER2 // Documentation: http://msdn.microsoft.com/en-us/library/ee125082%28v=VS.85%29.aspx #define _ATL_APARTMENT_THREADED #include <atlbase.h> //You may derive a class from CComModule and use it if you want to override something, //but do not change the name of _Module extern CComModule _Module; #include <atlcom.h> #include <sapi.h> //using System; //using System.Speech.Synthesis; #endif #if defined(FLITE_AUDIO_ENABLED) extern "C" { #include <flite/flite.h> cst_voice* register_cmu_us_kal(const char* voxdir); }; #endif /** * This class follows the singleton design pattern * @see http://en.wikipedia.org/wiki/Singleton_pattern * A call to this function thus returns the only instance of this object * the call can occur at any place in the code, no reference to the * GAudioOutput object has to be passed. */ GAudioOutput* GAudioOutput::instance() { static GAudioOutput* _instance = 0; if(_instance == 0) { _instance = new GAudioOutput(); // Set the application as parent to ensure that this object // will be destroyed when the main application exits _instance->setParent(qApp); } return _instance; } #define QGC_GAUDIOOUTPUT_KEY QString("QGC_AUDIOOUTPUT_") GAudioOutput::GAudioOutput(QObject* parent) : QObject(parent), voiceIndex(0), emergency(false), muted(false) { // Load settings QSettings settings; settings.sync(); muted = settings.value(QGC_GAUDIOOUTPUT_KEY+"muted", muted).toBool(); #ifdef FLITE_AUDIO_ENABLED // Remove Phonon Audio for linux and use alsa flite_init(); QLOG_INFO() << "Using Alsa Audio driver"; // Create shared dir tmp_audio // we create new spoken audio files here. we don't delete them as befor. // we save audiofiles like message inside. // if new messages will create in code this new messages will saved as audio file on first call // this save time and also it will possible to queue audio messages later because the are not temporary QDir dir(QString("%1/%2").arg( QGC::appDataDirectory() ).arg( "tmp_audio" )); if (!dir.exists()) { QLOG_WARN() << "Create directory tmp_audio"; dir.mkpath("."); }else { QLOG_WARN() << "Dir directory tmp_audio exists"; } #endif #ifdef Q_OS_MAC m_speech_channel = new SpeechChannel; OSErr theErr = NewSpeechChannel(NULL, m_speech_channel); if (theErr != noErr) { QLOG_WARN() << "Creating speech channel failed!"; delete m_speech_channel; m_speech_channel = NULL; } #endif #if _MSC_VER2 ISpVoice * pVoice = NULL; if (FAILED(::CoInitialize(NULL))) { QLOG_WARN() << "Creating COM object for audio output failed!"; } else { HRESULT hr = CoCreateInstance(CLSID_SpVoice, NULL, CLSCTX_ALL, IID_ISpVoice, (void **)&pVoice;); if( SUCCEEDED( hr ) ) { hr = pVoice->Speak(L"Hello world", 0, NULL); pVoice->Release(); pVoice = NULL; } } #endif // Prepare regular emergency signal, will be fired off on calling startEmergency() emergencyTimer = new QTimer(); connect(emergencyTimer, SIGNAL(timeout()), this, SLOT(beep())); switch (voiceIndex) { case 0: selectFemaleVoice(); break; default: selectMaleVoice(); break; } } GAudioOutput::~GAudioOutput() { QLOG_INFO() << "~GAudioOutput()"; #ifdef Q_OS_LINUX // wait until thread is running before terminate AlsaAudio thread AlsaAudio::instance(this)->wait(); #endif #ifdef Q_OS_MAC if(m_speech_channel) { DisposeSpeechChannel(*m_speech_channel); }; delete m_speech_channel; m_speech_channel = NULL; #endif //#ifdef _MSC_VER2 // ::CoUninitialize(); //#endif } void GAudioOutput::mute(bool mute) { if (mute != muted) { this->muted = mute; QSettings settings; settings.setValue(QGC_GAUDIOOUTPUT_KEY+"muted", this->muted); settings.sync(); emit mutedChanged(muted); } } bool GAudioOutput::isMuted() { return this->muted; } bool GAudioOutput::say(QString text, int severity) { if (!muted) { if (text.compare("system %1") == 0) //don't say system %1 [HACK] :( return true; // TODO Add severity filter Q_UNUSED(severity); bool res = false; if (!emergency) { // Speech synthesis is only supported with MSVC compiler #ifdef _MSC_VER2 SpeechSynthesizer synth = new SpeechSynthesizer(); synth.SelectVoice("Microsoft Anna"); synth.SpeakText(text.toStdString().c_str()); res = true; #endif #ifdef FLITE_AUDIO_ENABLED // spokenfilename is the filename created from spoken text QString spokenFilename = text; spokenFilename.replace(QRegExp(" "), "_"); spokenFilename = QGC::appDataDirectory() + "/tmp_audio/" + spokenFilename + ".wav"; // alsadriver is a qthread. tmp. files dont work here QFile file( spokenFilename ); if (!file.exists(spokenFilename)){ // if file not exist we create a new one if (file.open(QIODevice::ReadWrite)) { QLOG_INFO() << file.fileName() << " file not exist, create a new one"; cst_voice *v = register_cmu_us_kal(NULL); cst_wave *wav = flite_text_to_wave(text.toStdString().c_str(), v); cst_wave_save(wav, file.fileName().toStdString().c_str(), "riff"); file.close(); AlsaAudio::instance(this)->enqueueFilname(file.fileName()); if(!AlsaAudio::instance(this)->isRunning()) AlsaAudio::instance(this)->start(); res = true; } }else // we open existing file { QLOG_INFO() << file.fileName() << " file exist, playing this file"; AlsaAudio::instance(this)->enqueueFilname(file.fileName()); if(!AlsaAudio::instance(this)->isRunning()) AlsaAudio::instance(this)->start(); res = true; } #endif #ifdef Q_OS_MAC if(m_speech_channel) { SpeakCFString(*m_speech_channel, text.toCFString(), NULL); } res = true; #endif } return res; } else { return false; } } /** * @param text This message will be played after the alert beep */ bool GAudioOutput::alert(QString text) { if (!emergency || !muted) { // Play alert sound beep(); // Say alert message say(text, 2); return true; } else { return false; } } void GAudioOutput::notifyPositive() { if (!muted) { // Use QFile to transform path for all OS QFile f(QGC::shareDirectory()+QString("/files/audio/double_notify.wav")); //m_media->setCurrentSource(Phonon::MediaSource(f.fileName().toStdString().c_str())); //m_media->play(); } } void GAudioOutput::notifyNegative() { if (!muted) { // Use QFile to transform path for all OS QFile f(QGC::shareDirectory()+QString("/files/audio/flat_notify.wav")); //m_media->setCurrentSource(Phonon::MediaSource(f.fileName().toStdString().c_str())); //m_media->play(); } } /** * The emergency sound will be played continously during the emergency. * call stopEmergency() to disable it again. No speech synthesis or other * audio output is available during the emergency. * * @return true if the emergency could be started, false else */ bool GAudioOutput::startEmergency() { if (!emergency) { emergency = true; // Beep immediately and then start timer if (!muted) beep(); emergencyTimer->start(1500); QTimer::singleShot(5000, this, SLOT(stopEmergency())); } return true; } /** * Stops the continous emergency sound. Use startEmergency() to start * the emergency sound. * * @return true if the emergency could be stopped, false else */ bool GAudioOutput::stopEmergency() { if (emergency) { emergency = false; emergencyTimer->stop(); } return true; } void GAudioOutput::beep() { if (!muted) { // Use QFile to transform path for all OS QFile f(QGC::shareDirectory()+QString("/files/audio/alert.wav")); #ifdef Q_OS_LINUX AlsaAudio::instance(this)->enqueueFilname(f.fileName()); if(!AlsaAudio::instance(this)->isRunning()) AlsaAudio::instance(this)->start(); #endif } } void GAudioOutput::selectFemaleVoice() { #ifdef Q_OS_LINUX //this->voice = register_cmu_us_slt(NULL); #endif } void GAudioOutput::selectMaleVoice() { #ifdef Q_OS_LINUX //this->voice = register_cmu_us_rms(NULL); #endif } /* void GAudioOutput::selectNeutralVoice() { #ifdef Q_OS_LINUX this->voice = register_cmu_us_awb(NULL); #endif }*/ QStringList GAudioOutput::listVoices(void) { QStringList l; #ifdef FLITE_AUDIO_ENABLED cst_voice *voice; const cst_val *v; printf("Voices available: "); for (v=flite_voice_list; v; v=val_cdr(v)) { voice = val_voice(val_car(v)); QString s; s.sprintf("%s",voice->name); printf("%s",voice->name); l.append(s); } printf("\n"); #endif return l; }
{ "pile_set_name": "Github" }
<?php /** * Copyright © Magento, Inc. All rights reserved. * See COPYING.txt for license details. */ declare(strict_types=1); namespace Magento\Sales\Test\Unit\Observer\Frontend; use Magento\Customer\Helper\Address; use Magento\Customer\Model\Address\AbstractAddress; use Magento\Framework\Event\Observer; use Magento\Sales\Model\Order; use Magento\Sales\Observer\Frontend\AddVatRequestParamsOrderComment; use PHPUnit\Framework\MockObject\MockObject; use PHPUnit\Framework\TestCase; /** * Tests Magento\Sales\Observer\Frontend\AddVatRequestParamsOrderComment */ class AddVatRequestParamsOrderCommentTest extends TestCase { /** * @var Address|MockObject */ protected $customerAddressHelperMock; /** * @var AddVatRequestParamsOrderComment */ protected $observer; protected function setUp(): void { $this->customerAddressHelperMock = $this->getMockBuilder(Address::class) ->disableOriginalConstructor() ->getMock(); $this->observer = new AddVatRequestParamsOrderComment( $this->customerAddressHelperMock ); } /** * @param string $configAddressType * @param string|int $vatRequestId * @param string|int $vatRequestDate * @param string $orderHistoryComment * @dataProvider addVatRequestParamsOrderCommentDataProvider */ public function testAddVatRequestParamsOrderComment( $configAddressType, $vatRequestId, $vatRequestDate, $orderHistoryComment ) { $this->customerAddressHelperMock->expects($this->once()) ->method('getTaxCalculationAddressType') ->willReturn($configAddressType); $orderAddressMock = $this->createPartialMock( \Magento\Sales\Model\Order\Address::class, ['getVatRequestId', 'getVatRequestDate'] ); $orderAddressMock->expects($this->any()) ->method('getVatRequestId') ->willReturn($vatRequestId); $orderAddressMock->expects($this->any()) ->method('getVatRequestDate') ->willReturn($vatRequestDate); $orderMock = $this->getMockBuilder(Order::class) ->disableOriginalConstructor() ->setMethods(['getShippingAddress', 'addStatusHistoryComment', 'getBillingAddress']) ->getMock(); $orderMock->expects($this->any()) ->method('getShippingAddress') ->willReturn($orderAddressMock); if ($orderHistoryComment === null) { $orderMock->expects($this->never()) ->method('addStatusHistoryComment'); } else { $orderMock->expects($this->once()) ->method('addStatusHistoryComment') ->with($orderHistoryComment, false); } $observer = $this->getMockBuilder(Observer::class) ->addMethods(['getOrder']) ->disableOriginalConstructor() ->getMock(); $observer->expects($this->once()) ->method('getOrder') ->willReturn($orderMock); $this->assertNull($this->observer->execute($observer)); } /** * @return array */ public function addVatRequestParamsOrderCommentDataProvider() { return [ [ AbstractAddress::TYPE_SHIPPING, 'vatRequestId', 'vatRequestDate', 'VAT Request Identifier: vatRequestId<br />VAT Request Date: vatRequestDate', ], [ AbstractAddress::TYPE_SHIPPING, 1, 'vatRequestDate', null, ], [ AbstractAddress::TYPE_SHIPPING, 'vatRequestId', 1, null, ], [ null, 'vatRequestId', 'vatRequestDate', null, ], [ AbstractAddress::TYPE_BILLING, 'vatRequestId', 'vatRequestDate', null, ], ]; } }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <Project ToolsVersion="15.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> <Import Project="..\..\packages\xunit.runner.visualstudio.2.1.0\build\net20\xunit.runner.visualstudio.props" Condition="Exists('..\..\packages\xunit.runner.visualstudio.2.1.0\build\net20\xunit.runner.visualstudio.props')" /> <Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" /> <PropertyGroup> <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration> <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform> <ProjectGuid>{4A3AFC58-3C2D-45FF-8E90-8D5F586A3B5E}</ProjectGuid> <OutputType>Library</OutputType> <AppDesignerFolder>Properties</AppDesignerFolder> <RootNamespace>Microsoft.Azure.Mobile.Server.Quickstart.Test</RootNamespace> <AssemblyName>Microsoft.Azure.Mobile.Server.Quickstart.Test</AssemblyName> <TargetFrameworkVersion>v4.6</TargetFrameworkVersion> <FileAlignment>512</FileAlignment> <NuGetPackageImportStamp> </NuGetPackageImportStamp> <TargetFrameworkProfile /> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' "> <DebugSymbols>true</DebugSymbols> <DebugType>full</DebugType> <Optimize>false</Optimize> <OutputPath>..\..\bin\Debug\</OutputPath> <DefineConstants>DEBUG;TRACE</DefineConstants> <ErrorReport>prompt</ErrorReport> <WarningLevel>4</WarningLevel> <CodeAnalysisRuleSet>..\Test.ruleset</CodeAnalysisRuleSet> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' "> <DebugType>pdbonly</DebugType> <Optimize>true</Optimize> <OutputPath>..\..\bin\Release\</OutputPath> <DefineConstants>TRACE</DefineConstants> <ErrorReport>prompt</ErrorReport> <WarningLevel>4</WarningLevel> <CodeAnalysisRuleSet>..\Test.ruleset</CodeAnalysisRuleSet> </PropertyGroup> <Import Project="..\..\..\Microsoft.Azure.Mobile.Build.props" Condition="Exists('..\..\..\Microsoft.Azure.Mobile.Build.props')" /> <ItemGroup> <Reference Include="Castle.Core, Version=3.3.0.0, Culture=neutral, PublicKeyToken=407dd0808d44fbdc, processorArchitecture=MSIL"> <HintPath>..\..\packages\Castle.Core.3.3.3\lib\net45\Castle.Core.dll</HintPath> <Private>True</Private> </Reference> <Reference Include="EntityFramework, Version=6.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089, processorArchitecture=MSIL"> <SpecificVersion>False</SpecificVersion> <HintPath>..\..\packages\EntityFramework.6.1.3\lib\net45\EntityFramework.dll</HintPath> </Reference> <Reference Include="EntityFramework.SqlServer, Version=6.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089, processorArchitecture=MSIL"> <SpecificVersion>False</SpecificVersion> <HintPath>..\..\packages\EntityFramework.6.1.3\lib\net45\EntityFramework.SqlServer.dll</HintPath> </Reference> <Reference Include="Microsoft.Azure.NotificationHubs, Version=2.16.0.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35, processorArchitecture=MSIL"> <HintPath>..\..\packages\Microsoft.Azure.NotificationHubs.1.0.7\lib\net45-full\Microsoft.Azure.NotificationHubs.dll</HintPath> <Private>True</Private> </Reference> <Reference Include="Microsoft.Data.Edm, Version=5.8.4.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35, processorArchitecture=MSIL"> <HintPath>..\..\packages\Microsoft.Data.Edm.5.8.4\lib\net40\Microsoft.Data.Edm.dll</HintPath> </Reference> <Reference Include="Microsoft.Data.OData, Version=5.8.4.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35, processorArchitecture=MSIL"> <HintPath>..\..\packages\Microsoft.Data.OData.5.8.4\lib\net40\Microsoft.Data.OData.dll</HintPath> </Reference> <Reference Include="Microsoft.Owin, Version=3.0.1.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35, processorArchitecture=MSIL"> <SpecificVersion>False</SpecificVersion> <HintPath>..\..\packages\Microsoft.Owin.3.0.1\lib\net45\Microsoft.Owin.dll</HintPath> </Reference> <Reference Include="Microsoft.Owin.Hosting, Version=3.0.1.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35, processorArchitecture=MSIL"> <SpecificVersion>False</SpecificVersion> <HintPath>..\..\packages\Microsoft.Owin.Hosting.3.0.1\lib\net45\Microsoft.Owin.Hosting.dll</HintPath> </Reference> <Reference Include="Microsoft.Owin.Security, Version=3.0.1.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35, processorArchitecture=MSIL"> <SpecificVersion>False</SpecificVersion> <HintPath>..\..\packages\Microsoft.Owin.Security.3.0.1\lib\net45\Microsoft.Owin.Security.dll</HintPath> </Reference> <Reference Include="Microsoft.Owin.Security.OAuth, Version=3.0.1.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35, processorArchitecture=MSIL"> <SpecificVersion>False</SpecificVersion> <HintPath>..\..\packages\Microsoft.Owin.Security.OAuth.3.0.1\lib\net45\Microsoft.Owin.Security.OAuth.dll</HintPath> </Reference> <Reference Include="Microsoft.Owin.Testing, Version=3.0.1.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35, processorArchitecture=MSIL"> <SpecificVersion>False</SpecificVersion> <HintPath>..\..\packages\Microsoft.Owin.Testing.3.0.1\lib\net45\Microsoft.Owin.Testing.dll</HintPath> </Reference> <Reference Include="Microsoft.WindowsAzure.Configuration, Version=3.0.0.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35, processorArchitecture=MSIL"> <HintPath>..\..\packages\Microsoft.WindowsAzure.ConfigurationManager.3.2.3\lib\net40\Microsoft.WindowsAzure.Configuration.dll</HintPath> <Private>True</Private> </Reference> <Reference Include="Moq, Version=4.5.29.0, Culture=neutral, PublicKeyToken=69f491c39445e920, processorArchitecture=MSIL"> <HintPath>..\..\packages\Moq.4.5.29\lib\net45\Moq.dll</HintPath> <Private>True</Private> </Reference> <Reference Include="Newtonsoft.Json, Version=9.0.0.0, Culture=neutral, PublicKeyToken=30ad4fe6b2a6aeed, processorArchitecture=MSIL"> <HintPath>..\..\packages\Newtonsoft.Json.9.0.1\lib\net45\Newtonsoft.Json.dll</HintPath> <Private>True</Private> </Reference> <Reference Include="Owin"> <HintPath>..\..\packages\Owin.1.0\lib\net40\Owin.dll</HintPath> </Reference> <Reference Include="System" /> <Reference Include="System.ComponentModel.DataAnnotations" /> <Reference Include="System.Core" /> <Reference Include="System.IdentityModel" /> <Reference Include="System.IdentityModel.Tokens.Jwt, Version=4.0.30826.1200, Culture=neutral, PublicKeyToken=31bf3856ad364e35, processorArchitecture=MSIL"> <HintPath>..\..\packages\System.IdentityModel.Tokens.Jwt.4.0.3.308261200\lib\net45\System.IdentityModel.Tokens.Jwt.dll</HintPath> <Private>True</Private> </Reference> <Reference Include="System.Net.Http" /> <Reference Include="System.Net.Http.Formatting, Version=5.2.3.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35, processorArchitecture=MSIL"> <SpecificVersion>False</SpecificVersion> <HintPath>..\..\packages\Microsoft.AspNet.WebApi.Client.5.2.3\lib\net45\System.Net.Http.Formatting.dll</HintPath> </Reference> <Reference Include="System.Runtime.Serialization" /> <Reference Include="System.ServiceModel" /> <Reference Include="System.Spatial, Version=5.8.4.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35, processorArchitecture=MSIL"> <HintPath>..\..\packages\System.Spatial.5.8.4\lib\net40\System.Spatial.dll</HintPath> </Reference> <Reference Include="System.Web.Http, Version=5.2.3.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35, processorArchitecture=MSIL"> <SpecificVersion>False</SpecificVersion> <HintPath>..\..\packages\Microsoft.AspNet.WebApi.Core.5.2.3\lib\net45\System.Web.Http.dll</HintPath> </Reference> <Reference Include="System.Web.Http.Owin, Version=5.2.3.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35, processorArchitecture=MSIL"> <SpecificVersion>False</SpecificVersion> <HintPath>..\..\packages\Microsoft.AspNet.WebApi.Owin.5.2.3\lib\net45\System.Web.Http.Owin.dll</HintPath> </Reference> <Reference Include="System.Web.Http.Tracing, Version=5.2.3.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35, processorArchitecture=MSIL"> <SpecificVersion>False</SpecificVersion> <HintPath>..\..\packages\Microsoft.AspNet.WebApi.Tracing.5.2.3\lib\net45\System.Web.Http.Tracing.dll</HintPath> </Reference> <Reference Include="System.Xml.Linq" /> <Reference Include="System.Data.DataSetExtensions" /> <Reference Include="Microsoft.CSharp" /> <Reference Include="System.Data" /> <Reference Include="System.Xml" /> <Reference Include="xunit.abstractions, Version=2.0.0.0, Culture=neutral, PublicKeyToken=8d05b1bb7a6fdb6c, processorArchitecture=MSIL"> <HintPath>..\..\packages\xunit.abstractions.2.0.0\lib\net35\xunit.abstractions.dll</HintPath> </Reference> <Reference Include="xunit.assert, Version=2.1.0.3179, Culture=neutral, PublicKeyToken=8d05b1bb7a6fdb6c, processorArchitecture=MSIL"> <HintPath>..\..\packages\xunit.assert.2.1.0\lib\dotnet\xunit.assert.dll</HintPath> <Private>True</Private> </Reference> <Reference Include="xunit.core, Version=2.1.0.3179, Culture=neutral, PublicKeyToken=8d05b1bb7a6fdb6c, processorArchitecture=MSIL"> <HintPath>..\..\packages\xunit.extensibility.core.2.1.0\lib\dotnet\xunit.core.dll</HintPath> <Private>True</Private> </Reference> <Reference Include="xunit.execution.desktop, Version=2.1.0.3179, Culture=neutral, PublicKeyToken=8d05b1bb7a6fdb6c, processorArchitecture=MSIL"> <HintPath>..\..\packages\xunit.extensibility.execution.2.1.0\lib\net45\xunit.execution.desktop.dll</HintPath> <Private>True</Private> </Reference> </ItemGroup> <ItemGroup> <Compile Include="..\Common\CommonTestAssemblyInfo.cs"> <Link>Properties\CommonTestAssemblyInfo.cs</Link> </Compile> <Compile Include="Config\MobileAppConfigSetupTests.cs" /> <Compile Include="Config\MobileAppAppBuilderExtensionTests.cs" /> <Compile Include="Properties\AssemblyInfo.cs" /> <Compile Include="TestControllers\SecuredController.cs" /> <Compile Include="TestControllers\TestApiController.cs" /> <Compile Include="TestControllers\TestTableController.cs" /> </ItemGroup> <ItemGroup> <None Include="app.config" /> <None Include="packages.config"> <SubType>Designer</SubType> </None> </ItemGroup> <ItemGroup> <ProjectReference Include="..\..\src\Microsoft.Azure.Mobile.Server.Authentication\Microsoft.Azure.Mobile.Server.Authentication.csproj"> <Project>{61ff8937-4ebf-4e16-b3e5-9a1dba6833e3}</Project> <Name>Microsoft.Azure.Mobile.Server.Authentication</Name> </ProjectReference> <ProjectReference Include="..\..\src\Microsoft.Azure.Mobile.Server.CrossDomain\Microsoft.Azure.Mobile.Server.CrossDomain.csproj"> <Project>{aeeb858a-400b-4126-b9e3-eb65332417dc}</Project> <Name>Microsoft.Azure.Mobile.Server.CrossDomain</Name> </ProjectReference> <ProjectReference Include="..\..\src\Microsoft.Azure.Mobile.Server.Entity\Microsoft.Azure.Mobile.Server.Entity.csproj"> <Project>{0e66ac7a-8139-46de-b705-6529c6a18afc}</Project> <Name>Microsoft.Azure.Mobile.Server.Entity</Name> </ProjectReference> <ProjectReference Include="..\..\src\Microsoft.Azure.Mobile.Server.Login\Microsoft.Azure.Mobile.Server.Login.csproj"> <Project>{4f8392ee-59ff-41a0-8e69-568352fa463f}</Project> <Name>Microsoft.Azure.Mobile.Server.Login</Name> </ProjectReference> <ProjectReference Include="..\..\src\Microsoft.Azure.Mobile.Server.Notifications\Microsoft.Azure.Mobile.Server.Notifications.csproj"> <Project>{72076e66-f0b1-40b8-8bdc-3f0e0c622486}</Project> <Name>Microsoft.Azure.Mobile.Server.Notifications</Name> </ProjectReference> <ProjectReference Include="..\..\src\Microsoft.Azure.Mobile.Server.Quickstart\Microsoft.Azure.Mobile.Server.Quickstart.csproj"> <Project>{f72d0710-4dd6-4751-8908-78f207229904}</Project> <Name>Microsoft.Azure.Mobile.Server.Quickstart</Name> </ProjectReference> <ProjectReference Include="..\..\src\Microsoft.Azure.Mobile.Server.Tables\Microsoft.Azure.Mobile.Server.Tables.csproj"> <Project>{726cca9e-f04e-495e-bc44-866d206d221f}</Project> <Name>Microsoft.Azure.Mobile.Server.Tables</Name> </ProjectReference> <ProjectReference Include="..\..\src\Microsoft.Azure.Mobile.Server\Microsoft.Azure.Mobile.Server.csproj"> <Project>{a74dccee-a868-4c6a-91a4-b956fe44e5d7}</Project> <Name>Microsoft.Azure.Mobile.Server</Name> </ProjectReference> <ProjectReference Include="..\TestUtilities\TestUtilities.csproj"> <Project>{acee6dfc-1963-43a6-b9a3-2aba2127ac2d}</Project> <Name>TestUtilities</Name> </ProjectReference> </ItemGroup> <ItemGroup> <Service Include="{82A7F48D-3B50-4B1E-B82E-3ADA8210C358}" /> </ItemGroup> <ItemGroup> <CodeAnalysisDictionary Include="..\..\CustomDictionary.xml"> <Link>CustomDictionary.xml</Link> </CodeAnalysisDictionary> </ItemGroup> <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" /> <Target Name="EnsureNuGetPackageBuildImports" BeforeTargets="PrepareForBuild"> <PropertyGroup> <ErrorText>This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}.</ErrorText> </PropertyGroup> <Error Condition="!Exists('..\..\packages\xunit.runner.visualstudio.2.1.0\build\net20\xunit.runner.visualstudio.props')" Text="$([System.String]::Format('$(ErrorText)', '..\..\packages\xunit.runner.visualstudio.2.1.0\build\net20\xunit.runner.visualstudio.props'))" /> <Error Condition="!Exists('..\..\packages\StyleCop.MSBuild.4.7.55.0\build\StyleCop.MSBuild.Targets')" Text="$([System.String]::Format('$(ErrorText)', '..\..\packages\StyleCop.MSBuild.4.7.55.0\build\StyleCop.MSBuild.Targets'))" /> </Target> <Import Project="..\..\packages\StyleCop.MSBuild.4.7.55.0\build\StyleCop.MSBuild.Targets" Condition="Exists('..\..\packages\StyleCop.MSBuild.4.7.55.0\build\StyleCop.MSBuild.Targets')" /> <!-- To modify your build process, add your task inside one of the targets below and uncomment it. Other similar extension points exist, see Microsoft.Common.targets. <Target Name="BeforeBuild"> </Target> <Target Name="AfterBuild"> </Target> --> </Project>
{ "pile_set_name": "Github" }
/* * Harbour source code formatter (command-line wrapper) * * Copyright 2009 Alexander S.Kresin <[email protected]> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; see the file LICENSE.txt. If not, write to * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, * Boston, MA 02110-1301 USA (or visit https://www.gnu.org/licenses/). * * As a special exception, the Harbour Project gives permission for * additional uses of the text contained in its release of Harbour. * * The exception is that, if you link the Harbour libraries with other * files to produce an executable, this does not by itself cause the * resulting executable to be covered by the GNU General Public License. * Your use of that executable is in no way restricted on account of * linking the Harbour library code into it. * * This exception does not however invalidate any other reasons why * the executable file might be covered by the GNU General Public License. * * This exception applies only to the code released by the Harbour * Project under the name Harbour. If you copy code from other * Harbour Project or Free Software Foundation releases into a copy of * Harbour, as the General Public License permits, the exception does * not apply to the code that you add in this way. To avoid misleading * anyone as to the status of such modified files, you must delete * this exception notice from them. * * If you write modifications of your own for Harbour, it is your choice * whether to permit this exception to apply to your modifications. * If you do not wish that, delete this exception notice. * */ #require "hbformat" #include "directry.ch" #include "hbver.ch" ANNOUNCE HB_GTSYS REQUEST HB_GT_CGI_DEFAULT #define I_( x ) hb_UTF8ToStr( hb_i18n_gettext( x ) ) PROCEDURE Main( ... ) LOCAL oRef, aParams, cFileName, cInitDir, cParam, lRecursive := .F. #if 0 AltD( 2 ) AltD() #endif aParams := hb_AParams() IF Empty( aParams ) .OR. Left( cFileName := ATail( aParams ), 1 ) $ "@-" About() RETURN ENDIF FOR EACH cParam IN aParams IF hb_LeftEq( cParam, "-" ) IF SubStr( cParam, Len( "-" ) + 1 ) == "r" lRecursive := .T. cParam := "#" EXIT ENDIF ENDIF NEXT oRef := HBFormatCode():New( aParams, hb_FNameMerge( hb_DirBase(), "hbformat.ini" ) ) IF oRef:nErr > 0 OutStd( hb_StrFormat( iif( oRef:nLineErr == 0, ; I_( "Initialization error %1$d in parameter: %2$s" ), ; I_( "Initialization error %1$d on line %3$d: %2$s" ) ), oRef:nErr, oRef:cLineErr, oRef:nLineErr ) + hb_eol() ) ENDIF oRef:bCallBack := {| a, i | FCallBack( a, i ) } IF "*" $ cFileName .OR. ; "?" $ cFileName IF SubStr( hb_FNameExt( cFileName ), 2, 1 ) < "a" OutErr( I_( "Wrong mask" ) + hb_eol() ) ELSE cInitDir := hb_FNameDir( cFileName ) IF HB_ISNULL( cInitDir ) cInitDir := "." + hb_ps() ENDIF DirEval( cInitDir, hb_FNameNameExt( cFileName ), lRecursive, {| name | Reformat( oRef, name ) } ) ENDIF ELSE Reformat( oRef, cFileName ) ENDIF RETURN STATIC PROCEDURE FCallBack( aFile, nItem ) IF nItem % Int( Len( aFile ) / 40 ) == 1 OutStd( "." ) ENDIF RETURN STATIC PROCEDURE Reformat( oRef, cFileName ) LOCAL aFile IF ! Empty( aFile := oRef:File2Array( cFileName ) ) OutStd( hb_StrFormat( I_( "Reformatting %1$s (%2$d lines)" ), cFileName, Len( aFile ) ) + hb_eol() ) OutStd( "<" ) IF oRef:Reformat( aFile ) oRef:Array2File( cFileName, aFile ) OutStd( ">" + hb_eol() ) ELSE OutErr( hb_StrFormat( I_( "Error %1$d on line %2$d: %3$s" ), oRef:nErr, oRef:nLineErr, oRef:cLineErr ) + hb_eol() ) ENDIF ELSE OutErr( hb_StrFormat( I_( "'%1$s' is not found..." ), cFileName ) + hb_eol() ) ENDIF RETURN STATIC PROCEDURE DirEval( cInitDir, cMask, lRecur, bCode ) LOCAL file cInitDir := hb_DirSepAdd( cInitDir ) cMask := iif( cMask == NIL, hb_osFileMask(), cMask ) FOR EACH file IN hb_vfDirectory( cInitDir + cMask, "HSD" ) IF "D" $ file[ F_ATTR ] IF ! "." == file[ F_NAME ] .AND. ; ! ".." == file[ F_NAME ] .AND. lRecur DirEval( cInitDir + file[ F_NAME ], cMask, lRecur, bCode ) ENDIF ELSE IF bCode != NIL Eval( bCode, cInitDir + file[ F_NAME ] ) ENDIF ENDIF NEXT RETURN STATIC PROCEDURE About() OutStd( ; "Harbour Source Formatter " + HBRawVersion() + hb_eol() + ; "Copyright (c) 2010-" + ; "2020" + ", " + ; hb_Version( HB_VERSION_URL_BASE ) + hb_eol() + ; "Copyright (c) 2009, Alexander S.Kresin" + hb_eol() + ; hb_eol() ) OutStd( ; I_( "Syntax: hbformat [options] [@config] <file[s]>" ) + hb_eol() + ; hb_eol() ) RETURN STATIC FUNCTION HBRawVersion() RETURN StrTran( Version(), "Harbour " )
{ "pile_set_name": "Github" }
#!/usr/bin/env ruby require 'test/unit' require 'rake' # ==================================================================== class TestPathMap < Test::Unit::TestCase include TestMethods def test_returns_self_with_no_args assert_equal "abc.rb", "abc.rb".pathmap end def test_s_returns_file_separator sep = File::ALT_SEPARATOR || File::SEPARATOR assert_equal sep, "abc.rb".pathmap("%s") assert_equal sep, "".pathmap("%s") assert_equal "a#{sep}b", "a/b".pathmap("%d%s%f") end def test_f_returns_basename assert_equal "abc.rb", "abc.rb".pathmap("%f") assert_equal "abc.rb", "this/is/a/dir/abc.rb".pathmap("%f") assert_equal "abc.rb", "/this/is/a/dir/abc.rb".pathmap("%f") end def test_n_returns_basename_without_extension assert_equal "abc", "abc.rb".pathmap("%n") assert_equal "abc", "abc".pathmap("%n") assert_equal "abc", "this/is/a/dir/abc.rb".pathmap("%n") assert_equal "abc", "/this/is/a/dir/abc.rb".pathmap("%n") assert_equal "abc", "/this/is/a/dir/abc".pathmap("%n") end def test_d_returns_dirname assert_equal ".", "abc.rb".pathmap("%d") assert_equal "/", "/abc".pathmap("%d") assert_equal "this/is/a/dir", "this/is/a/dir/abc.rb".pathmap("%d") assert_equal "/this/is/a/dir", "/this/is/a/dir/abc.rb".pathmap("%d") end def test_9d_returns_partial_dirname assert_equal "this/is", "this/is/a/dir/abc.rb".pathmap("%2d") assert_equal "this", "this/is/a/dir/abc.rb".pathmap("%1d") assert_equal ".", "this/is/a/dir/abc.rb".pathmap("%0d") assert_equal "dir", "this/is/a/dir/abc.rb".pathmap("%-1d") assert_equal "a/dir", "this/is/a/dir/abc.rb".pathmap("%-2d") assert_equal "this/is/a/dir", "this/is/a/dir/abc.rb".pathmap("%100d") assert_equal "this/is/a/dir", "this/is/a/dir/abc.rb".pathmap("%-100d") end def test_x_returns_extension assert_equal "", "abc".pathmap("%x") assert_equal ".rb", "abc.rb".pathmap("%x") assert_equal ".rb", "abc.xyz.rb".pathmap("%x") assert_equal "", ".depends".pathmap("%x") assert_equal "", "dir/.depends".pathmap("%x") end def test_X_returns_everything_but_extension assert_equal "abc", "abc".pathmap("%X") assert_equal "abc", "abc.rb".pathmap("%X") assert_equal "abc.xyz", "abc.xyz.rb".pathmap("%X") assert_equal "ab.xyz", "ab.xyz.rb".pathmap("%X") assert_equal "a.xyz", "a.xyz.rb".pathmap("%X") assert_equal "abc", "abc.rb".pathmap("%X") assert_equal "ab", "ab.rb".pathmap("%X") assert_equal "a", "a.rb".pathmap("%X") assert_equal ".depends", ".depends".pathmap("%X") assert_equal "a/dir/.depends", "a/dir/.depends".pathmap("%X") assert_equal "/.depends", "/.depends".pathmap("%X") end def test_p_returns_entire_pathname assert_equal "abc.rb", "abc.rb".pathmap("%p") assert_equal "this/is/a/dir/abc.rb", "this/is/a/dir/abc.rb".pathmap("%p") assert_equal "/this/is/a/dir/abc.rb", "/this/is/a/dir/abc.rb".pathmap("%p") end def test_dash_returns_empty_string assert_equal "", "abc.rb".pathmap("%-") assert_equal "abc.rb", "abc.rb".pathmap("%X%-%x") end def test_percent_percent_returns_percent assert_equal "a%b", "".pathmap("a%%b") end def test_undefined_percent_causes_error ex = assert_exception(ArgumentError) { "dir/abc.rb".pathmap("%z") } end def test_pattern_returns_substitutions assert_equal "bin/org/osb", "src/org/osb/Xyz.java".pathmap("%{src,bin}d") end def test_pattern_can_use_backreferences assert_equal "dir/hi/is", "dir/this/is".pathmap("%{t(hi)s,\\1}p") end def test_pattern_with_star_replacement_string_uses_block assert_equal "src/ORG/osb", "src/org/osb/Xyz.java".pathmap("%{/org,*}d") { |d| d.upcase } assert_equal "Xyz.java", "src/org/osb/Xyz.java".pathmap("%{.*,*}f") { |f| f.capitalize } end def test_pattern_with_no_replacement_nor_block_substitutes_empty_string assert_equal "bc.rb", "abc.rb".pathmap("%{a}f") end def test_pattern_works_with_certain_valid_operators assert_equal "dir/xbc.rb", "dir/abc.rb".pathmap("%{a,x}p") assert_equal "d1r", "dir/abc.rb".pathmap("%{i,1}d") assert_equal "xbc.rb", "dir/abc.rb".pathmap("%{a,x}f") assert_equal ".Rb", "dir/abc.rb".pathmap("%{r,R}x") assert_equal "xbc", "dir/abc.rb".pathmap("%{a,x}n") end def test_multiple_patterns assert_equal "this/is/b/directory/abc.rb", "this/is/a/dir/abc.rb".pathmap("%{a,b;dir,\\0ectory}p") end def test_partial_directory_selection_works_with_patterns assert_equal "this/is/a/long", "this/is/a/really/long/path/ok.rb".pathmap("%{/really/,/}5d") end def test_pattern_with_invalid_operator ex = assert_exception(ArgumentError) do "abc.xyz".pathmap("%{src,bin}z") end assert_match(/unknown.*pathmap.*spec.*z/i, ex.message) end def test_works_with_windows_separators if File::ALT_SEPARATOR assert_equal "abc", 'dir\abc.rb'.pathmap("%n") assert_equal 'this\is\a\dir', 'this\is\a\dir\abc.rb'.pathmap("%d") end end def test_complex_patterns sep = "".pathmap("%s") assert_equal "dir/abc.rb", "dir/abc.rb".pathmap("%d/%n%x") assert_equal "./abc.rb", "abc.rb".pathmap("%d/%n%x") assert_equal "Your file extension is '.rb'", "dir/abc.rb".pathmap("Your file extension is '%x'") assert_equal "bin/org/onstepback/proj/A.class", "src/org/onstepback/proj/A.java".pathmap("%{src,bin}d/%n.class") assert_equal "src_work/bin/org/onstepback/proj/A.class", "src_work/src/org/onstepback/proj/A.java".pathmap('%{\bsrc\b,bin}X.class') assert_equal ".depends.bak", ".depends".pathmap("%X.bak") assert_equal "d#{sep}a/b/c#{sep}file.txt", "a/b/c/d/file.txt".pathmap("%-1d%s%3d%s%f") end end class TestPathMapExplode < Test::Unit::TestCase def setup String.class_eval { public :pathmap_explode } end def teardown String.class_eval { protected :pathmap_explode } end def test_explode assert_equal ['a'], 'a'.pathmap_explode assert_equal ['a', 'b'], 'a/b'.pathmap_explode assert_equal ['a', 'b', 'c'], 'a/b/c'.pathmap_explode assert_equal ['/', 'a'], '/a'.pathmap_explode assert_equal ['/', 'a', 'b'], '/a/b'.pathmap_explode assert_equal ['/', 'a', 'b', 'c'], '/a/b/c'.pathmap_explode if File::ALT_SEPARATOR assert_equal ['c:.', 'a'], 'c:a'.pathmap_explode assert_equal ['c:.', 'a', 'b'], 'c:a/b'.pathmap_explode assert_equal ['c:.', 'a', 'b', 'c'], 'c:a/b/c'.pathmap_explode assert_equal ['c:/', 'a'], 'c:/a'.pathmap_explode assert_equal ['c:/', 'a', 'b'], 'c:/a/b'.pathmap_explode assert_equal ['c:/', 'a', 'b', 'c'], 'c:/a/b/c'.pathmap_explode end end end class TestPathMapPartial < Test::Unit::TestCase def test_pathmap_partial @path = "1/2/file" def @path.call(n) pathmap_partial(n) end assert_equal("1", @path.call(1)) assert_equal("1/2", @path.call(2)) assert_equal("1/2", @path.call(3)) assert_equal(".", @path.call(0)) assert_equal("2", @path.call(-1)) assert_equal("1/2", @path.call(-2)) assert_equal("1/2", @path.call(-3)) end end class TestFileListPathMap < Test::Unit::TestCase def test_file_list_supports_pathmap assert_equal ['a', 'b'], FileList['dir/a.rb', 'dir/b.rb'].pathmap("%n") end end
{ "pile_set_name": "Github" }
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ /* This is for styling the menus of the viewsource window */
{ "pile_set_name": "Github" }
name: "Inder" designer: "Sorkin Type" license: "OFL" category: "SANS_SERIF" date_added: "2011-12-19" fonts { name: "Inder" style: "normal" weight: 400 filename: "Inder-Regular.ttf" post_script_name: "Inder-Regular" full_name: "Inder" copyright: "Copyright (c) 2010 by Sorkin Type Co ([email protected]) with Reserved Font Name Inder." } subsets: "menu" subsets: "latin" subsets: "latin-ext"
{ "pile_set_name": "Github" }
lf lf crlf lf lf
{ "pile_set_name": "Github" }
$NetBSD: patch-src_3rdparty_javascriptcore_JavaScriptCore_runtime_JSValue.h,v 1.1 2014/12/30 17:23:47 adam Exp $ --- src/3rdparty/javascriptcore/JavaScriptCore/runtime/JSValue.h.orig 2013-11-27 01:00:27.000000000 +0000 +++ src/3rdparty/javascriptcore/JavaScriptCore/runtime/JSValue.h @@ -491,7 +491,7 @@ namespace JSC { u.asBits.tag = CellTag; else u.asBits.tag = EmptyValueTag; - u.asBits.payload = reinterpret_cast<int32_t>(ptr); + u.asBits.payload = reinterpret_cast<intptr_t>(ptr); #if ENABLE(JSC_ZOMBIES) ASSERT(!isZombie()); #endif @@ -503,7 +503,7 @@ namespace JSC { u.asBits.tag = CellTag; else u.asBits.tag = EmptyValueTag; - u.asBits.payload = reinterpret_cast<int32_t>(const_cast<JSCell*>(ptr)); + u.asBits.payload = reinterpret_cast<intptr_t>(const_cast<JSCell*>(ptr)); #if ENABLE(JSC_ZOMBIES) ASSERT(!isZombie()); #endif
{ "pile_set_name": "Github" }
/** * Copyright © Magento, Inc. All rights reserved. * See COPYING.txt for license details. */ // // Styles for emails // _____________________________________________ // See comments in source/_email-base.less for explanation of email styles @import 'source/lib/_lib.less'; // Global lib @import 'source/lib/variables/_email.less'; // Global email variables @import 'source/_theme.less'; // Global variables override @import 'source/_variables.less'; // Local theme variables @import 'source/_email-variables.less'; // Theme variables for emails // Import these file by reference so that only the styles included in the .email-non-inline() and .media-width() // mixins will be output. All other styles will be output via the email-inline.less file. @import (reference) 'source/_email-base.less'; // Contains primary email styles @import (reference) 'source/_email-extend.less'; // Contains theme-specific adjustments to email styles // // Module-specific styles for emails // --------------------------------------------- // Import files using (reference) for the same reason stated above //@magento_import (reference) 'source/_email.less'; // // Media queries collector // --------------------------------------------- @import 'source/lib/_responsive.less'; // It is impossible to apply media queries as inline styles, so all media queries must be output in this file @media-target: 'all'; // // Non-inline styles // --------------------------------------------- // Include all styles that have been indicated as non-inline styles .email-non-inline();
{ "pile_set_name": "Github" }
# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. import os from setuptools import setup, find_packages def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() NAME = 'recognizers-text-choice' VERSION = '1.0.0.a0' REQUIRES = ['recognizers-text', 'regex', 'grapheme'] setup( name=NAME, version=VERSION, url='https://github.com/Microsoft/Recognizers-Text', author='Microsoft', description='recognizers-text-choice README', keywords=['nlp', 'nlp-entity-extraction', 'entity-extraction', 'parser-library'], long_description=read('README.rst'), license='MIT', packages=find_packages(), install_requires=REQUIRES, classifiers=[ 'Programming Language :: Python :: 3.6', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Development Status :: 3 - Alpha', 'Topic :: Scientific/Engineering :: Artificial Intelligence', ] )
{ "pile_set_name": "Github" }
# See the OWNERS docs at https://go.k8s.io/owners reviewers: - deads2k - mbohlool
{ "pile_set_name": "Github" }
<?xml version="1.0" ?> <annotation> <folder>widerface</folder> <filename>18--Concerts_18_Concerts_Concerts_18_784.jpg</filename> <source> <database>wider face Database</database> <annotation>PASCAL VOC2007</annotation>  <flickrid>-1</flickrid> </source> <owner> <flickrid>yanyu</flickrid> <name>yanyu</name> </owner> <size> <width>1024</width> <height>1301</height> <depth>3</depth> </size> <segmented>0</segmented> <object> <name>face</name> <pose>Unspecified</pose> <truncated>1</truncated> <difficult>0</difficult> <bndbox> <xmin>249</xmin> <ymin>277</ymin> <xmax>615</xmax> <ymax>678</ymax> </bndbox> </object> </annotation>
{ "pile_set_name": "Github" }
#include "transfer_file_manager.h" namespace nim_comp { TransferFileManager::TransferFileManager() { } TransferFileManager::~TransferFileManager() { } bool TransferFileManager::AddItem(const std::string& session_box_id, std::string& transfer_file_session_id, MsgBubbleItem* msg_bubble) { auto iterator = transfer_session_list_.find(transfer_file_session_id); if (iterator == transfer_session_list_.end()) { TransferFileInfo transfer_file_info; transfer_file_info.session_box_ = session_box_id; transfer_file_info.bubble_weakflag_ = msg_bubble->GetWeakFlag(); transfer_file_info.bubble_ = msg_bubble; // 如果不存在则插入新值 transfer_session_list_[transfer_file_session_id] = transfer_file_info; } else { return false; } return true; } bool TransferFileManager::HasTransferTask() { return transfer_session_list_.size() != 0; } bool TransferFileManager::HasTransferTask(const std::string& session_id) { for (auto iterator : transfer_session_list_) { TransferFileInfo& transfer_file_info = iterator.second; if (transfer_file_info.session_box_ == session_id) { return true; } } return false; } void TransferFileManager::RemoveAllTaskBySessionBoxId(const std::string& session_id) { for (auto iterator = transfer_session_list_.begin(); iterator != transfer_session_list_.end(); ) { std::string transfer_file_session_id = iterator->first; TransferFileInfo transfer_file_info = iterator->second; if (transfer_file_info.session_box_ == session_id) { iterator = transfer_session_list_.erase(iterator); if (transfer_file_info.bubble_->GetSenderId() == session_id) { nim_p2p::NimP2PDvelopKit::GetInstance()->CancelReceiveFile(const_cast<TransferFileSessionID>(transfer_file_session_id.c_str())); } else { nim_p2p::NimP2PDvelopKit::GetInstance()->CancelTransferFile(const_cast<TransferFileSessionID>(transfer_file_session_id.c_str())); } } else { iterator++; } } } void TransferFileManager::OnStateChangeCallback(const std::string& transfer_file_session_id, enum TransferFileSessionState state) { auto iterator = transfer_session_list_.find(transfer_file_session_id); if (iterator != transfer_session_list_.end()) { TransferFileInfo& transfer_file_info = iterator->second; if (!transfer_file_info.bubble_weakflag_.expired()) { auto msg_bubble = dynamic_cast<MsgBubbleTransferFile*>(transfer_file_info.bubble_); msg_bubble->SetBubbleStatus(state); } if (IsTransferFileSessionFinalState(state)) { transfer_session_list_.erase(iterator); } } } void TransferFileManager::OnProgressChangeCallback(const std::string& transfer_file_session_id, int transferred, int total) { auto iterator = transfer_session_list_.find(transfer_file_session_id); if (iterator != transfer_session_list_.end()) { TransferFileInfo& transfer_file_info = iterator->second; if (!transfer_file_info.bubble_weakflag_.expired()) { auto msg_bubble = dynamic_cast<MsgBubbleTransferFile*>(transfer_file_info.bubble_); msg_bubble->OnDownloadFileProgressCallback(total, transferred); } } } bool TransferFileManager::IsTransferFileSessionFinalState(TransferFileSessionState state) { switch (state) { case TransferFileSessionState_Succeeded: case TransferFileSessionState_Failed: case TransferFileSessionState_CMDTimeout: case TransferFileSessionState_ReceiverRejected: case TransferFileSessionState_Rejected: case TransferFileSessionState_ReceiverCancel: case TransferFileSessionState_SenderCancel: return true; default: return false; } } }
{ "pile_set_name": "Github" }
<!--! Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" xmlns:py="http://genshi.edgewall.org/" xmlns:xi="http://www.w3.org/2001/XInclude" xmlns:i18n="http://genshi.edgewall.org/i18n" i18n:domain="bhdashboard"> <xi:include href="layout.html" /> <head> <title>$title</title> </head> <body> <xi:include href="bs_btnbar.html" /> <div id="ft" class="row"> <div id="help" class="span8" i18n:msg=""> <span class="label label-info">Note:</span> See <a href="${href.wiki('BloodhoundDashboard')}">BloodhoundDashboard</a> for help on using the dashboard. </div> </div> </body> </html>
{ "pile_set_name": "Github" }
// // MJRefreshBackStateFooter.m // MJRefreshExample // // Created by MJ Lee on 15/6/13. // Copyright © 2015年 小码哥. All rights reserved. // #import "MJRefreshBackStateFooter.h" @interface MJRefreshBackStateFooter() { /** 显示刷新状态的label */ __unsafe_unretained UILabel *_stateLabel; } /** 所有状态对应的文字 */ @property (strong, nonatomic) NSMutableDictionary *stateTitles; @end @implementation MJRefreshBackStateFooter #pragma mark - 懒加载 - (NSMutableDictionary *)stateTitles { if (!_stateTitles) { self.stateTitles = [NSMutableDictionary dictionary]; } return _stateTitles; } - (UILabel *)stateLabel { if (!_stateLabel) { [self addSubview:_stateLabel = [UILabel label]]; } return _stateLabel; } #pragma mark - 公共方法 - (void)setTitle:(NSString *)title forState:(MJRefreshState)state { if (title == nil) return; self.stateTitles[@(state)] = title; self.stateLabel.text = self.stateTitles[@(self.state)]; } - (NSString *)titleForState:(MJRefreshState)state { return self.stateTitles[@(state)]; } #pragma mark - 重写父类的方法 - (void)prepare { [super prepare]; // 初始化文字 [self setTitle:MJRefreshBackFooterIdleText forState:MJRefreshStateIdle]; [self setTitle:MJRefreshBackFooterPullingText forState:MJRefreshStatePulling]; [self setTitle:MJRefreshBackFooterRefreshingText forState:MJRefreshStateRefreshing]; [self setTitle:MJRefreshBackFooterNoMoreDataText forState:MJRefreshStateNoMoreData]; } - (void)placeSubviews { [super placeSubviews]; if (self.stateLabel.constraints.count) return; // 状态标签 self.stateLabel.frame = self.bounds; } - (void)setState:(MJRefreshState)state { MJRefreshCheckState // 设置状态文字 self.stateLabel.text = self.stateTitles[@(state)]; } @end
{ "pile_set_name": "Github" }
--- title: "Error Notifications in Airflow" description: "Managing Error Notifications" date: 2018-05-21T00:00:00.000Z slug: "error-notifications-in-airflow" heroImagePath: null tags: ["Building DAGs", "Redshift", "IMAP"] --- # Error Reporting on Airflow Email notifications are great for monitoring Airflow workflows. They can be sent for failures, successes, and retries. ## Setting Notifications at the DAG level Notifications set at the DAG level filter down to each task in the DAG - generally in the `default_args`. By default, `email_on_failure` is set to `True` ```python from datetime import datetime from airflow import DAG default_args = { 'owner': 'airflow', 'start_date': datetime(2018, 1, 30), 'email': ['[email protected]'] } with DAG('sample_dag', default_args=default_args, schedule_interval='@daily', catchup=False) as dag: ... ``` Any task in this DAG's context will send a failure email to all addresses in the emails array ## Different Levels of Notifications Failure notifications are the most common, but different levels can be set where appropriate. Emails on retries are great for testing if failures are by caused extraneous factors like load on an external system. If this is the case, consider setting `retry_exponential_backoff` to `True`. [BaseOperator](https://github.com/apache/airflow/blob/60a032f4b829eb41b84c907ff663560d50284989/airflow/models/baseoperator.py#L270) ```python from datetime import datetime, timedelta from airflow import DAG default_args = { 'owner': 'airflow', 'start_date': datetime(2018, 1, 30), 'email': ['[email protected]'], 'email_on_failure': True, 'email_on_retry': True, 'retry_exponential_backoff': True, 'retry_delay' = timedelta(seconds=300) 'retries': 3 } with DAG('sample_dag', default_args=default_args, schedule_interval='@daily', catchup=False) as dag: ... ``` ## Isolating Tasks For some use cases, it might be helpful to only have failure emails for certain tasks. The BaseOperator that all Airflow Operators inherit from has support for these arguments if you don't want them defined at the DAG level. [BaseOperator](https://github.com/apache/airflow/blob/60a032f4b829eb41b84c907ff663560d50284989/airflow/models/baseoperator.py#L265) ```python from datetime import datetime from airflow import DAG from airflow.operators.dummy_operator import DummyOperator default_args = { 'owner': 'airflow', 'start_date': datetime(2018, 1, 30), 'email_on_failure': False, 'email': ['[email protected]'], 'retries': 1 } with DAG('sample_dag', default_args=default_args, schedule_interval='@daily', catchup=False) as dag: wont_email = DummyOperator( task_id='wont_email') will_email = DummyOperator( task_id='will_email', email_on_failure=True) ... ``` ## Customizing Email Notifications By default, email notifications have a default format that includes standard information as defined in the [__`email_alert`__](https://github.com/apache/incubator-airflow/blob/master/airflow/models.py#L1949) method of the TaskInstance class. ```python def email_alert(self, exception): task = self.task title = "Airflow alert: {self}".format(**locals()) exception = str(exception).replace('\n', '<br>') # For reporting purposes, we report based on 1-indexed, # not 0-indexed lists (i.e. Try 1 instead of # Try 0 for the first attempt). body = ( "Try {try_number} out of {max_tries}<br>" "Exception:<br>{exception}<br>" "Log: <a href='{self.log_url}'>Link</a><br>" "Host: {self.hostname}<br>" "Log file: {self.log_filepath}<br>" "Mark success: <a href='{self.mark_success_url}'>Link</a><br>" ).format(try_number=self.try_number, max_tries=self.max_tries + 1, **locals()) send_email(task.email, title, body) ``` This can be modified greatly by simply overriding this method. Try dropping the below into an existing dag and see what happens. ```python from airflow.utils.email import send_email from airflow.hooks import PostgresHook def hello_world(**kwargs): ti = kwargs.get('task_instance') task = kwargs.get('task') def new_email_alert(self, **kwargs): title = "TEST MESSAGE: THIS IS A MODIFIED TEST" body = ("I've now modified the email alert " "to say whatever I want it to say.<br>") send_email(task.email, title, body) ti.email_alert = new_email_alert # intentionally fail the task by calling get_records() # without providing positiional argument "sql" hook = PostgresHook('hook-name') return hook.get_records() t0 = PythonOperator(task_id='hello_world', python_callable=hello_world, provide_context=True, dag=dag) ``` If you want a custom email for another type of operator, you can use `on_failure_callback` and the `send_email` utility provided by Airflow. ```python from airflow.utils.email import send_email def failure_email(context): email_title = "Airflow Task {tak_id} Failed".format(context['task_instance'].task_id) email_body = "{task_id} in {dag_id} failed.".format(context['task_instance'].task_id, context['task_instance'].dag_id) send_email('[email protected]', email_title, email_body) ``` ## Setting Up Alerts in Slack At Astronomer, we drop Airflow notifications in shared slack channels instead of emails. There are a few ways to accomplish this: ### Adding a Slack Integration Add this integration: https://slack.com/apps/A0F81496D-email and pick a channel to drop alerts in. The email address generated can be added to the list of emails like any other: ```python from datetime import datetime from airflow import DAG from airflow.operators.dummy_operator import DummyOperator default_args = { 'owner': 'airflow', 'start_date': datetime(2018, 1, 30), 'email': ['[email protected]'] } dag = DAG('sample_dag', default_args=default_args, schedule_interval='@daily', catchup=False) with dag: d = DummyOperator( task_id='test') ... ``` ![SlackNotifications](https://assets2.astronomer.io/main/guides/dag_failure_notification.png) Alternatively, a `SlackOperator` can be used. ```python t2 = SlackAPIPostOperator(task_id='post_slack_{0}'.format(job['source']), username='ETL', slack_conn_id='slack_conn', text="My job {0} finished".format( job['source']), channel='workflow_status') ```
{ "pile_set_name": "Github" }
#!/bin/sh set -e echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" install_framework() { if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then local source="${BUILT_PRODUCTS_DIR}/$1" elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")" elif [ -r "$1" ]; then local source="$1" fi local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" if [ -L "${source}" ]; then echo "Symlinked..." source="$(readlink "${source}")" fi # use filter instead of exclude so missing patterns dont' throw errors echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\"" rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}" local basename basename="$(basename -s .framework "$1")" binary="${destination}/${basename}.framework/${basename}" if ! [ -r "$binary" ]; then binary="${destination}/${basename}" fi # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then strip_invalid_archs "$binary" fi # Resign the code if required by the build settings to avoid unstable apps code_sign_if_enabled "${destination}/$(basename "$1")" # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7. if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then local swift_runtime_libs swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]}) for lib in $swift_runtime_libs; do echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\"" rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}" code_sign_if_enabled "${destination}/${lib}" done fi } # Signs a framework with the provided identity code_sign_if_enabled() { if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then # Use the current code_sign_identitiy echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}" local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'" if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then code_sign_cmd="$code_sign_cmd &" fi echo "$code_sign_cmd" eval "$code_sign_cmd" fi } # Strip invalid architectures strip_invalid_archs() { binary="$1" # Get architectures for current file archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)" stripped="" for arch in $archs; do if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then # Strip non-valid architectures in-place lipo -remove "$arch" -output "$binary" "$binary" || exit 1 stripped="$stripped $arch" fi done if [[ "$stripped" ]]; then echo "Stripped $binary of architectures:$stripped" fi } if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then wait fi
{ "pile_set_name": "Github" }
--- name: Feature request about: Suggest an idea for this project labels: --- **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe alternatives you've considered** A clear and concise description of any alternative solutions or features you've considered. **Additional context** Add any other context or screenshots about the feature request here.
{ "pile_set_name": "Github" }
# # Makefile for WinSock Wrapper (for WinSock 2.0) # # Copyright (c) 2000 Japan Network Information Center. All rights reserved. # # By using this file, you agree to the terms and conditions set forth bellow. # # LICENSE TERMS AND CONDITIONS # # The following License Terms and Conditions apply, unless a different # license is obtained from Japan Network Information Center ("JPNIC"), # a Japanese association, Kokusai-Kougyou-Kanda Bldg 6F, 2-3-4 Uchi-Kanda, # Chiyoda-ku, Tokyo 101-0047, Japan. # # 1. Use, Modification and Redistribution (including distribution of any # modified or derived work) in source and/or binary forms is permitted # under this License Terms and Conditions. # # 2. Redistribution of source code must retain the copyright notices as they # appear in each source code file, this License Terms and Conditions. # # 3. Redistribution in binary form must reproduce the Copyright Notice, # this License Terms and Conditions, in the documentation and/or other # materials provided with the distribution. For the purposes of binary # distribution the "Copyright Notice" refers to the following language: # "Copyright (c) 2000-2002 Japan Network Information Center. All rights reserved." # # 4. The name of JPNIC may not be used to endorse or promote products # derived from this Software without specific prior written approval of # JPNIC. # # 5. Disclaimer/Limitation of Liability: THIS SOFTWARE IS PROVIDED BY JPNIC # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A # PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JPNIC BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR # OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF # ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. !include <win32.mak> SYSLIBS = $(libcdll) kernel32.lib advapi32.lib user32.lib # # Files to use # HDRS = dlldef.h ..\common\wrapcommon.h SRCS = dllmain.c dllload.obj dllfunc.c dllstub.c OBJS = dllmain.obj dllload.obj dllfunc.obj LIBS = ..\common\wrapcommon.lib ..\..\lib\idnkit.lib ..\..\win\iconv.lib cflags = $(cflags) -I..\..\include # # Targets to Build # TARGETS = ws2_32.dll all : $(TARGETS) ws2_32.dll : ws2_32.def $(OBJS) $(LIBS) $(link) $(dlllflags) /OUT:ws2_32.dll /DEF:ws2_32.def $(OBJS) $(LIBS) $(SYSLIBS) install : $(TARGETS) copy ws2_32.dll ..\bin clean : force -del *.obj -del *.lib -del *.dll -del *.exp # # Dependencies # dllmain.obj : dllmain.c $(HDRS) $(cc) $(cflags) $(cvarsdll) -DDEBUG $*.c dllload.obj : dllload.c $(HDRS) $(cc) $(cflags) $(cvarsdll) -DDEBUG $*.c dllfunc.obj : dllfunc.c dllstub.c $(HDRS) $(cc) $(cflags) $(cvarsdll) -DDEBUG $*.c ws2_32o.lib : ws2_32o.def LIB /DEF:ws2_32o.def /MACHINE:IX86 force:
{ "pile_set_name": "Github" }