code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module Exploration.Logic.ManyValued where
-- | A Many Valued logic is an algebraic
-- | structure (m, (^), not, false), satisfying:
-- (^) associative, commutative
-- false neutral to (^)
-- not (not x) = x
-- x ^ (not false) = not false
-- not (not x ^ y) ^ y = not (not y ^ x) ^ x
class ManyValued m where
(<+>) :: m -> m -> m
false :: m
not :: m -> m
| SamuelSchlesinger/Exploration | Exploration/Logic/ManyValued.hs | mit | 396 | 0 | 8 | 120 | 52 | 33 | 19 | 5 | 0 |
module Coins where
import Data.Ord
import Data.Set (Set)
import qualified Data.Set as Set
----------------------------------------
type Coin = Integer
data CoinSet = CoinSet {
face :: Coin,
count :: Integer
}
instance Show CoinSet where
show (CoinSet face count) = show face ++ "x" ++ show count
instance Eq CoinSet where
x == y = (face x == face y && count x == count y)
instance Ord CoinSet where
compare = (comparing face) `mappend` (comparing count)
----------------------------------------
type Change = Set CoinSet
-- instance Ord CoinSet where
-- compare = (comparing face) `mappend` (comparing count) | kirhgoff/haskell-sandbox | euler31/Coins.hs | mit | 629 | 0 | 10 | 115 | 182 | 100 | 82 | 15 | 0 |
{-# LANGUAGE
TypeOperators
, FlexibleContexts
, TypeFamilies
, UndecidableInstances #-}
module Calculus.Connectives.Linear where
import Calculus.Expr
import Calculus.Connectives.Simple
import Auxiliary.NameSource
import Data.Functor
import Control.Monad.State
-- Simultaneous conjunction
data SimConj f = SimConj f f
-- Alternative conjunction
data AltConj f = AltConj f f
-- Disjunction (external choice)
data Disj f = Disj f f
-- Linear implication (resource implication)
data ResImpl f = ResImpl f f
-- Unrestricted implication
data ValImpl f = ValImpl f f
-- Unit (trivial goal which requires no resources)
data Unit f = Unit
-- Top (trivial goal which consumes all resources)
data Top f = Top
-- Zero (impossibility)
data Zero f = Zero
-- Modal operation (states that `f` is valid)
data Modal f = Modal f
{-
LinearInput.
-}
type LinearInput = SimConj :+: AltConj :+: Disj :+:
ResImpl :+: ValImpl :+: Unit :+:
Top :+: Zero :+: Modal :+: SimpleInput
type LinearFormula = Formula LinearInput
{-
Functor instances.
-}
instance Functor SimConj where
fmap f (SimConj a b) = SimConj (f a) (f b)
instance Functor AltConj where
fmap f (AltConj a b) = AltConj (f a) (f b)
instance Functor Disj where
fmap f (Disj a b) = Disj (f a) (f b)
instance Functor ResImpl where
fmap f (ResImpl a b) = ResImpl (f a) (f b)
instance Functor ValImpl where
fmap f (ValImpl a b) = ValImpl (f a) (f b)
instance Functor Unit where
fmap _ Unit = Unit
instance Functor Top where
fmap _ Top = Top
instance Functor Zero where
fmap _ Zero = Zero
instance Functor Modal where
fmap f (Modal g) = Modal (f g)
{-
Smart constructors.
-}
(&&:) :: (SimConj :<: f) => Expr f -> Expr f -> Expr f
(&&:) f = inject . SimConj f
($$:) :: (AltConj :<: f) => Expr f -> Expr f -> Expr f
($$:) f = inject . AltConj f
(||:) :: (Disj :<: f) => Expr f -> Expr f -> Expr f
(||:) f = inject . Disj f
(->:) :: (ResImpl :<: f) => Expr f -> Expr f -> Expr f
(->:) f = inject . ResImpl f
(=>:) :: (ValImpl :<: f) => Expr f -> Expr f -> Expr f
(=>:) f = inject . ValImpl f
unit :: (Unit :<: f) => Expr f
unit = inject Unit
top :: (Top :<: f) => Expr f
top = inject Top
zero :: (Zero :<: f) => Expr f
zero = inject Zero
modal :: (Modal :<: f) => Expr f -> Expr f
modal = inject . Modal
{- Lists of synchronious and asynchronious connectives -}
type RightAsyncInput = ResImpl :+: ValImpl :+: AltConj :+: Top :+: ForAll
type RightSyncInput = SimConj :+: Unit :+: Disj :+: Zero :+: Modal :+: Exists
type LeftAsyncInput = RightSyncInput
type LeftSyncInput = RightAsyncInput
{- Render -}
instance Render SimConj where
render (SimConj a b) = renderBinOp "&&" (a, b)
instance Render AltConj where
render (AltConj a b) = renderBinOp "$$" (a, b)
instance Render Disj where
render (Disj a b) = renderBinOp "||" (a, b)
instance Render ResImpl where
render (ResImpl a b) = renderBinOp "->" (a, b)
instance Render ValImpl where
render (ValImpl a b) = renderBinOp "=>" (a, b)
instance Render Unit where
render Unit = return "1"
instance Render Top where
render Top = return "T"
instance Render Zero where
render Zero = return "0"
instance Render Modal where
render (Modal a) =
do ns <- get
return $ "!" ++ pretty ns a
| wowofbob/calculus | Calculus/Connectives/Linear.hs | mit | 3,367 | 0 | 13 | 808 | 1,209 | 635 | 574 | -1 | -1 |
module Jira.API.Authentication.KeyUtils where
import Control.Applicative
import Crypto.Types.PubKey.RSA (PrivateKey (..), PublicKey (..))
import Data.Maybe
import OpenSSL.EVP.PKey (toKeyPair, toPublicKey)
import OpenSSL.PEM (PemPasswordSupply (..),
readPrivateKey, readPublicKey)
import OpenSSL.RSA
openSslRsaPrivateKeyFromPem :: String -> IO RSAKeyPair
openSslRsaPrivateKeyFromPem pemString = fromJust . toKeyPair <$> readPrivateKey pemString PwNone
openSslRsaPublicKeyFromPem :: String -> IO RSAPubKey
openSslRsaPublicKeyFromPem pemString = fromJust . toPublicKey <$> readPublicKey pemString
fromOpenSslPrivateKey :: RSAKeyPair -> PrivateKey
fromOpenSslPrivateKey rsaKey =
let publicKey = fromOpenSslPublicKey rsaKey
in PrivateKey publicKey (rsaD rsaKey) 0 0 0 0 0
fromOpenSslPublicKey :: RSAKey k => k -> PublicKey
fromOpenSslPublicKey rsaKey = PublicKey (rsaSize rsaKey) (rsaN rsaKey) (rsaE rsaKey)
readPemPrivateKey :: String -> IO PrivateKey
readPemPrivateKey = fmap fromOpenSslPrivateKey . openSslRsaPrivateKeyFromPem
readPemPublicKey :: String -> IO PublicKey
readPemPublicKey = fmap fromOpenSslPublicKey . openSslRsaPublicKeyFromPem
| dsmatter/jira-api | src/Jira/API/Authentication/KeyUtils.hs | mit | 1,275 | 0 | 9 | 254 | 293 | 157 | 136 | 22 | 1 |
Config
{ template = " %StdinReader% } ----==|i|==---- { %battery% %cpu% %memory% %date% "
, commands =
[ Run StdinReader
, Run Battery
[ "--template" , "<acstatus>"
, "--Low" , "20"
, "--High" , "50"
, "--normal" , "green"
, "--low" , "red"
, "--"
, "-o" , "<left>% ~<timeleft>"
, "-O" , "<left>% ..."
, "-i" , ""
] 40
, Run Cpu
[ "--template" , "cpu <total>%"
, "--Low" , "10"
, "--High" , "25"
, "--normal" , "green"
, "--high" , "red"
] 40
, Run Memory
[ "--template" , "mem <usedratio>%"
, "--Low" , "20"
, "--High" , "50"
, "--normal" , "green"
, "--high" , "red"
] 40
, Run Date "%#Z %R %#a %F" "date" 600
]
}
| ysmiraak/home | .xmonad/xmobar.hs | epl-1.0 | 888 | 0 | 9 | 396 | 177 | 108 | 69 | -1 | -1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.UI.GLUT.State
-- Copyright : (c) Sven Panne 2002-2005
-- License : BSD-style (see the file libraries/GLUT/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : stable
-- Portability : portable
--
-- GLUT maintains a considerable amount of programmer visible state. Some (but
-- not all) of this state may be directly retrieved.
--
--------------------------------------------------------------------------------
module Graphics.UI.GLUT.State (
-- * State of all windows
windowBorderWidth, windowHeaderHeight,
-- * State of the /current window/
rgba,
BufferDepth, rgbaBufferDepths, colorBufferDepth,
doubleBuffered, stereo,
accumBufferDepths, depthBufferDepth, stencilBufferDepth,
SampleCount, sampleCount, formatID,
fullScreenMode,
-- * GLUT state pertaining to the layers of the /current window/
damaged,
-- * Timing
elapsedTime,
-- * Device information
-- $DeviceInformation
screenSize, screenSizeMM,
hasKeyboard,
ButtonCount, numMouseButtons,
numSpaceballButtons,
DialCount, numDialsAndButtons,
numTabletButtons,
AxisCount, PollRate, joystickInfo,
supportedNumAuxBuffers, supportedSamplesPerPixel,
-- * GLUT information
glutVersion, initState
) where
import Control.Monad
import Data.StateVar
import Foreign.C.Types
import Foreign.Marshal.Alloc
import Foreign.Marshal.Array
import Foreign.Ptr
import Foreign.Storable
import Graphics.Rendering.OpenGL ( GLenum, Size(..) )
import Graphics.UI.GLUT.Overlay
import Graphics.UI.GLUT.QueryUtils
import Graphics.UI.GLUT.Raw
import Graphics.UI.GLUT.Window
--------------------------------------------------------------------------------
-- | Contains 'True' when the current layer of the /current window/ is in RGBA
-- mode, 'False' means color index mode.
rgba :: GettableStateVar Bool
rgba = makeGettableStateVar$ simpleGet i2b glut_WINDOW_RGBA
-- | Bit depth of a buffer
type BufferDepth = Int
-- | Contains the number of red, green, blue, and alpha bits in the color buffer
-- of the /current window\'s/ current layer (0 in color index mode).
rgbaBufferDepths ::
GettableStateVar (BufferDepth, BufferDepth, BufferDepth, BufferDepth)
rgbaBufferDepths = makeGettableStateVar $ do
r <- simpleGet fromIntegral glut_WINDOW_RED_SIZE
g <- simpleGet fromIntegral glut_WINDOW_GREEN_SIZE
b <- simpleGet fromIntegral glut_WINDOW_BLUE_SIZE
a <- simpleGet fromIntegral glut_WINDOW_ALPHA_SIZE
return (r, g, b, a)
-- | Contains the total number of bits in the color buffer of the /current
-- window\'s/ current layer. For an RGBA layer, this is the sum of the red,
-- green, blue, and alpha bits. For an color index layer, this is the number
-- of bits of the color indexes.
colorBufferDepth :: GettableStateVar BufferDepth
colorBufferDepth =
makeGettableStateVar $ simpleGet fromIntegral glut_WINDOW_BUFFER_SIZE
-- | Contains 'True' when the current layer of the /current window/ is double
-- buffered, 'False' otherwise.
doubleBuffered :: GettableStateVar Bool
doubleBuffered = makeGettableStateVar $ simpleGet i2b glut_WINDOW_DOUBLEBUFFER
-- | Contains 'True' when the current layer of the /current window/ is stereo,
-- 'False' otherwise.
stereo :: GettableStateVar Bool
stereo = makeGettableStateVar $ simpleGet i2b glut_WINDOW_STEREO
-- | Contains the number of red, green, blue, and alpha bits in the accumulation
-- buffer of the /current window\'s/ current layer (0 in color index mode).
accumBufferDepths ::
GettableStateVar (BufferDepth, BufferDepth, BufferDepth, BufferDepth)
accumBufferDepths = makeGettableStateVar $ do
r <- simpleGet fromIntegral glut_WINDOW_ACCUM_RED_SIZE
g <- simpleGet fromIntegral glut_WINDOW_ACCUM_GREEN_SIZE
b <- simpleGet fromIntegral glut_WINDOW_ACCUM_BLUE_SIZE
a <- simpleGet fromIntegral glut_WINDOW_ACCUM_ALPHA_SIZE
return (r, g, b, a)
-- | Contains the number of bits in the depth buffer of the /current window\'s/
-- current layer.
depthBufferDepth :: GettableStateVar BufferDepth
depthBufferDepth =
makeGettableStateVar $ simpleGet fromIntegral glut_WINDOW_DEPTH_SIZE
-- | Contains the number of bits in the stencil buffer of the /current
-- window\'s/ current layer.
stencilBufferDepth :: GettableStateVar BufferDepth
stencilBufferDepth =
makeGettableStateVar $ simpleGet fromIntegral glut_WINDOW_STENCIL_SIZE
-- | Number of samples for multisampling
type SampleCount = Int
-- | Contains the number of samples for multisampling for the /current window./
sampleCount :: GettableStateVar SampleCount
sampleCount =
makeGettableStateVar $ simpleGet fromIntegral glut_WINDOW_NUM_SAMPLES
-- | Contains the window system dependent format ID for the current layer of the
-- /current window/. On X11 GLUT implementations, this is the X visual ID. On
-- Win32 GLUT implementations, this is the Win32 Pixel Format Descriptor number.
-- This value is returned for debugging, benchmarking, and testing ease.
formatID :: GettableStateVar Int
formatID = makeGettableStateVar $ simpleGet fromIntegral glut_WINDOW_FORMAT_ID
--------------------------------------------------------------------------------
-- | (/freeglut only/) Contains 'True' if the /current window/ is in full screen
-- mode, 'False' otherwise.
fullScreenMode :: StateVar Bool
fullScreenMode = makeStateVar getFullScreenMode setFullScreenMode
getFullScreenMode :: IO Bool
getFullScreenMode = simpleGet i2b glut_FULL_SCREEN
setFullScreenMode :: Bool -> IO ()
setFullScreenMode newMode = do
oldMode <- getFullScreenMode
unless (newMode == oldMode) fullScreenToggle
--------------------------------------------------------------------------------
-- | Contains the number of milliseconds since
-- 'Graphics.UI.GLUT.Initialization.initialize' was called.
elapsedTime :: GettableStateVar Int
elapsedTime = makeGettableStateVar $ simpleGet fromIntegral glut_ELAPSED_TIME
--------------------------------------------------------------------------------
-- | Contains 'True' if the given plane of the /current window/ has been
-- damaged (by window system activity) since the last display callback was
-- triggered. Calling 'Graphics.UI.GLUT.Window.postRedisplay' or
-- 'Graphics.UI.GLUT.Overlay.postOverlayRedisplay' will not set this 'True'.
damaged :: Layer -> GettableStateVar Bool
damaged l = makeGettableStateVar $ layerGet isDamaged (marshalDamagedLayer l)
where isDamaged d = d /= 0 && d /= -1
marshalDamagedLayer x = case x of
Normal -> glut_NORMAL_DAMAGED
Overlay -> glut_OVERLAY_DAMAGED
--------------------------------------------------------------------------------
-- $DeviceInformation
-- If a device is not available, the following state variables contain
-- 'Nothing', otherwise they return 'Just' the specific device information.
-- Only a screen is always assumed.
--------------------------------------------------------------------------------
-- | The size of the screen in pixels.
screenSize :: GettableStateVar Size
screenSize =
makeGettableStateVar $ do
wpx <- simpleGet fromIntegral glut_SCREEN_WIDTH
hpx <- simpleGet fromIntegral glut_SCREEN_HEIGHT
return $ Size wpx hpx
-- | The size of the screen in millimeters.
screenSizeMM :: GettableStateVar Size
screenSizeMM =
makeGettableStateVar $ do
wmm <- simpleGet fromIntegral glut_SCREEN_WIDTH_MM
hmm <- simpleGet fromIntegral glut_SCREEN_HEIGHT_MM
return $ Size wmm hmm
--------------------------------------------------------------------------------
-- | Contains 'True' if a keyboard is present, 'False' otherwise.
hasKeyboard :: GettableStateVar Bool
hasKeyboard = makeGettableStateVar $ deviceGet i2b glut_HAS_KEYBOARD
--------------------------------------------------------------------------------
-- | Number of buttons of an input device
type ButtonCount = Int
-- | Contains 'Just' the number of buttons of an attached mouse or 'Nothing' if
-- there is none.
numMouseButtons :: GettableStateVar (Maybe ButtonCount)
numMouseButtons =
getDeviceInfo glut_HAS_MOUSE $
deviceGet fromIntegral glut_NUM_MOUSE_BUTTONS
--------------------------------------------------------------------------------
-- | Contains 'Just' the number of buttons of the attached Spaceball or 'Nothing'
-- if there is none.
numSpaceballButtons :: GettableStateVar (Maybe ButtonCount)
numSpaceballButtons =
getDeviceInfo glut_HAS_SPACEBALL $
deviceGet fromIntegral glut_NUM_SPACEBALL_BUTTONS
--------------------------------------------------------------------------------
-- | Number of dials of a dial and button box
type DialCount = Int
-- | Contains 'Just' the number of dials and buttons of an attached dial &
-- button box or 'Nothing' if there is none.
numDialsAndButtons :: GettableStateVar (Maybe (DialCount, ButtonCount))
numDialsAndButtons =
getDeviceInfo glut_HAS_DIAL_AND_BUTTON_BOX $ do
d <- deviceGet fromIntegral glut_NUM_DIALS
b <- deviceGet fromIntegral glut_NUM_BUTTON_BOX_BUTTONS
return (d, b)
--------------------------------------------------------------------------------
-- | Contains 'Just' the number of buttons of an attached tablet or 'Nothing' if
-- there is none.
numTabletButtons :: GettableStateVar (Maybe ButtonCount)
numTabletButtons =
getDeviceInfo glut_HAS_TABLET $
deviceGet fromIntegral glut_NUM_TABLET_BUTTONS
--------------------------------------------------------------------------------
-- | Number of axes of a joystick
type AxisCount = Int
-- | The a rate at which a joystick is polled (in milliseconds)
type PollRate = Int
-- | Contains 'Just' the number of buttons of an attached joystick, the number
-- of joystick axes, and the rate at which the joystick is polled. Contains
-- 'Nothing' if there is no joystick attached.
joystickInfo :: GettableStateVar (Maybe (ButtonCount, PollRate, AxisCount))
joystickInfo =
getDeviceInfo glut_HAS_JOYSTICK $ do
b <- deviceGet fromIntegral glut_JOYSTICK_BUTTONS
a <- deviceGet fromIntegral glut_JOYSTICK_AXES
r <- deviceGet fromIntegral glut_JOYSTICK_POLL_RATE
return (b, a, r)
-----------------------------------------------------------------------------
-- | (/freeglut only/) Contains a list of the number of auxiliary buffers
-- supported, in increasing order.
supportedNumAuxBuffers :: GettableStateVar [Int]
supportedNumAuxBuffers = getModeValues glut_AUX
-- | (/freeglut only/) Contains a list of the number of samples per pixel
-- supported for multisampling, in increasing order.
supportedSamplesPerPixel :: GettableStateVar [SampleCount]
supportedSamplesPerPixel = getModeValues (fromIntegral glut_MULTISAMPLE)
getModeValues :: Integral a => GLenum -> GettableStateVar [a]
getModeValues what = makeGettableStateVar $
alloca $ \sizeBuffer -> do
valuesBuffer <- glutGetModeValues what sizeBuffer
size <- peek sizeBuffer
fmap (map fromIntegral) $ peekArray (fromIntegral size) valuesBuffer
--------------------------------------------------------------------------------
-- Convenience unmarshalers
i2b :: CInt -> Bool
i2b = (/= 0)
--------------------------------------------------------------------------------
getDeviceInfo :: GLenum -> IO a -> GettableStateVar (Maybe a)
getDeviceInfo dev act =
makeGettableStateVar $ do
hasDevice <- deviceGet i2b dev
if hasDevice then fmap Just act else return Nothing
-----------------------------------------------------------------------------
-- | Contains version of GLUT in the form of
-- @/flavour/ /major/./minor/./patchlevel/@, where @/flavour/@ is one of
-- @GLUT@, @freeglut@ or @OpenGLUT@.
glutVersion :: GettableStateVar String
glutVersion = makeGettableStateVar $ do
let isGLUT = isUnknown "glutSetOption"
isFreeglut = isUnknown "glutSetWindowStayOnTop"
isUnknown = fmap (== nullFunPtr) . getAPIEntryInternal
showVersionPart x = shows (x `mod` 100)
showVersion v = showVersionPart (v `div` 10000) . showChar '.' .
showVersionPart (v `div` 100) . showChar '.' .
showVersionPart v
g <- isGLUT
if g
then return "GLUT 3.7" -- ToDo: just guessing
else do f <- isFreeglut
v <- simpleGet id glut_VERSION
let prefix = if f then "freeglut" else "OpenGLUT"
return $ showString prefix . showChar ' ' . showVersion v $ ""
-----------------------------------------------------------------------------
-- | (/freeglut only/) Contains the thickness of the sizing border around the
-- perimeter of a window that can be resized, in pixels.
windowBorderWidth :: GettableStateVar Int
windowBorderWidth =
makeGettableStateVar (simpleGet fromIntegral glut_WINDOW_BORDER_WIDTH)
-----------------------------------------------------------------------------
-- | (/freeglut only/) Contains the height of the header\/caption area of a
-- window in pixels.
windowHeaderHeight :: GettableStateVar Int
windowHeaderHeight =
makeGettableStateVar (simpleGet fromIntegral glut_WINDOW_HEADER_HEIGHT)
-----------------------------------------------------------------------------
-- | (/freeglut only/) Contains 'True' if GLUT has been initialized
-- with 'Graphics.UI.GLUT.Initialization.initialize' or
-- 'Graphics.UI.GLUT.Initialization.getArgsAndInitialize' has and not yet
-- been de-initialized with 'Graphics.UI.GLUT.Initialization.exit'. Contains
-- 'False' otherwise.
initState :: GettableStateVar Bool
initState = makeGettableStateVar$ simpleGet i2b glut_INIT_STATE
| ducis/haAni | hs/common/Graphics/UI/GLUT/State.hs | gpl-2.0 | 13,668 | 0 | 17 | 2,117 | 1,777 | 969 | 808 | 169 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.IReader
-- License : GPL-2
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- This module defines a list type and operations on it; it further
-- provides functions which write in and out the list. The goal is to
-- make it easy for the user to store a large number of text buffers
-- and cycle among them, making edits as she goes. The idea is
-- inspired by \"incremental reading\", see
-- <http://en.wikipedia.org/wiki/Incremental_reading>.
module Yi.IReader where
import Control.Exception (SomeException, catch)
import Control.Monad (join, void)
import Data.Binary (Binary, decode, encodeFile)
import qualified Data.ByteString.Char8 as B (ByteString, pack, readFile, unpack)
import qualified Data.ByteString.Lazy.Char8 as BL (fromChunks)
import Data.Default (Default, def)
import Data.Sequence as S (Seq, ViewL (EmptyL, (:<)),
ViewR ((:>)), empty, length,
null, splitAt, viewl, viewr,
(<|), (><), (|>))
import Data.Typeable (Typeable)
import Yi.Buffer.HighLevel (replaceBufferContent, topB)
import Yi.Buffer.Misc (elemsB, getBufferDyn, putBufferDyn)
import Yi.Editor (withCurrentBuffer)
import Yi.Keymap (YiM)
import Yi.Paths (getConfigPath)
import qualified Yi.Rope as R (fromString, toString)
import Yi.Types (YiVariable)
import Yi.Utils (io)
-- | TODO: Why 'B.ByteString'?
type Article = B.ByteString
newtype ArticleDB = ADB { unADB :: Seq Article }
deriving (Typeable, Binary)
instance Default ArticleDB where
def = ADB S.empty
instance YiVariable ArticleDB
-- | Take an 'ArticleDB', and return the first 'Article' and an
-- ArticleDB - *without* that article.
split :: ArticleDB -> (Article, ArticleDB)
split (ADB adb) = case viewl adb of
EmptyL -> (B.pack "", def)
(a :< b) -> (a, ADB b)
-- | Get the first article in the list. We use the list to express
-- relative priority; the first is the most, the last least. We then
-- just cycle through - every article gets equal time.
getLatestArticle :: ArticleDB -> Article
getLatestArticle = fst . split -- we only want the article
-- | We remove the old first article, and we stick it on the end of the
-- list using the presumably modified version.
removeSetLast :: ArticleDB -> Article -> ArticleDB
removeSetLast adb old = ADB (unADB (snd (split adb)) S.|> old)
-- we move the last entry to the entry 'length `div` n'from the
-- beginning; so 'shift 1' would do nothing (eg. the last index is 50,
-- 50 `div` 1 == 50, so the item would be moved to where it is) 'shift
-- 2' will move it to the middle of the list, though; last index = 50,
-- then 50 `div` 2 will shift the item to index 25, and so on down to
-- 50 `div` 50 - the head of the list/Seq.
shift :: Int -> ArticleDB -> ArticleDB
shift n adb = if n < 2 || lst < 2 then adb else ADB $ (r S.|> lastentry) >< s'
where lst = S.length (unADB adb) - 1
(r,s) = S.splitAt (lst `div` n) (unADB adb)
(s' :> lastentry) = S.viewr s
-- | Insert a new article with top priority (that is, at the front of the list).
insertArticle :: ArticleDB -> Article -> ArticleDB
insertArticle (ADB adb) new = ADB (new S.<| adb)
-- | Serialize given 'ArticleDB' out.
writeDB :: ArticleDB -> YiM ()
writeDB adb = void $ io . join . fmap (`encodeFile` adb) $ getArticleDbFilename
-- | Read in database from 'getArticleDbFilename' and then parse it
-- into an 'ArticleDB'.
readDB :: YiM ArticleDB
readDB = io $ (getArticleDbFilename >>= r) `catch` returnDefault
where r = fmap (decode . BL.fromChunks . return) . B.readFile
-- We read in with strict bytestrings to guarantee the file is
-- closed, and then we convert it to the lazy bytestring
-- data.binary expects. This is inefficient, but alas...
returnDefault (_ :: SomeException) = return def
-- | Get articles.db database of locations to visit
getArticleDbFilename :: IO FilePath
getArticleDbFilename = getConfigPath "articles.db"
-- | Returns the database as it exists on the disk, and the current Yi
-- buffer contents. Note that the Default typeclass gives us an empty
-- Seq. So first we try the buffer state in the hope we can avoid a
-- very expensive read from disk, and if we find nothing (that is, if
-- we get an empty Seq), only then do we call 'readDB'.
oldDbNewArticle :: YiM (ArticleDB, Article)
oldDbNewArticle = do
saveddb <- withCurrentBuffer getBufferDyn
newarticle <- B.pack . R.toString <$> withCurrentBuffer elemsB
if not $ S.null (unADB saveddb)
then return (saveddb, newarticle)
else readDB >>= \olddb -> return (olddb, newarticle)
-- | Given an 'ArticleDB', dump the scheduled article into the buffer
-- (replacing previous contents).
setDisplayedArticle :: ArticleDB -> YiM ()
setDisplayedArticle newdb = do
let next = getLatestArticle newdb
withCurrentBuffer $ do
replaceBufferContent $ R.fromString (B.unpack next)
topB -- replaceBufferContents moves us to bottom?
putBufferDyn newdb
-- | Go to next one. This ignores the buffer, but it doesn't remove
-- anything from the database. However, the ordering does change.
nextArticle :: YiM ()
nextArticle = do
(oldb,_) <- oldDbNewArticle
-- Ignore buffer, just set the first article last
let newdb = removeSetLast oldb (getLatestArticle oldb)
writeDB newdb
setDisplayedArticle newdb
-- | Delete current article (the article as in the database), and go
-- to next one.
deleteAndNextArticle :: YiM ()
deleteAndNextArticle = do
(oldb,_) <- oldDbNewArticle -- throw away changes
let ndb = ADB $ case viewl (unADB oldb) of -- drop 1st article
EmptyL -> empty
(_ :< b) -> b
writeDB ndb
setDisplayedArticle ndb
-- | The main action. We fetch the old database, we fetch the modified
-- article from the buffer, then we call the function 'updateSetLast'
-- which removes the first article and pushes our modified article to
-- the end of the list.
saveAndNextArticle :: Int -> YiM ()
saveAndNextArticle n = do
(oldb,newa) <- oldDbNewArticle
let newdb = shift n $ removeSetLast oldb newa
writeDB newdb
setDisplayedArticle newdb
-- | Assume the buffer is an entirely new article just imported this
-- second, and save it. We don't want to use 'updateSetLast' since
-- that will erase an article.
saveAsNewArticle :: YiM ()
saveAsNewArticle = do
oldb <- readDB -- make sure we read from disk - we aren't in iread-mode!
(_,newa) <- oldDbNewArticle -- we ignore the fst - the Default is 'empty'
let newdb = insertArticle oldb newa
writeDB newdb
| yi-editor/yi | yi-ireader/src/Yi/IReader.hs | gpl-2.0 | 7,186 | 0 | 15 | 1,738 | 1,296 | 729 | 567 | 94 | 2 |
{- Textures.hs; Mun Hon Cheong ([email protected]) 2005
This module is for loading textures
-}
module Textures where
import Graphics.UI.GLUT
import TGA (readTga)
import Data.Word (Word8)
import Foreign.Marshal.Alloc (free)
-- read a list of images and returns a list of textures
-- all images are assumed to be in the TGA image format
getAndCreateTextures :: [String] -> IO [Maybe TextureObject]
getAndCreateTextures fileNames = do
fileNamesExts <- return (map (("tga/" ++) . (++ ".tga")) fileNames)
texData <- mapM readImageC fileNamesExts
texObjs <- mapM createTexture texData
return texObjs
-- read a single texture
getAndCreateTexture :: String -> IO (Maybe TextureObject)
getAndCreateTexture fileName = do
texData <- readImageC ("tga/" ++ fileName ++ ".tga")
texObj <- createTexture texData
return texObj
-- read the image data
readImageC :: String -> IO (Maybe (Size, PixelData Word8))
readImageC path = readTga path
-- creates the texture
createTexture :: (Maybe (Size, PixelData a)) -> IO (Maybe TextureObject)
createTexture (Just ((Size x y), pixels@(PixelData _ _ ptr))) = do
[texName] <- genObjectNames 1 -- generate our texture.
--rowAlignment Unpack $= 1
textureBinding Texture2D $= Just texName -- make our new texture the current texture.
--generateMipmap Texture2D $= Enabled
build2DMipmaps Texture2D RGBA' (fromIntegral x) (fromIntegral y) pixels
textureFilter Texture2D $= ((Linear', Just Nearest), Linear')
--textureWrapMode Texture2D S $= (Repeated, Repeat)
--textureWrapMode Texture2D T $= (Repeated, Repeat)
textureFunction $= Modulate
free ptr
return (Just texName)
createTexture Nothing = return Nothing
| kvelicka/frag | src/Textures.hs | gpl-2.0 | 1,756 | 0 | 13 | 350 | 431 | 220 | 211 | 28 | 1 |
module Language.SMTLib2.Internals.Proof.Verify where
import qualified Language.SMTLib2.Internals.Backend as B
import Language.SMTLib2.Internals.Monad
import Language.SMTLib2.Internals.Embed
import Language.SMTLib2.Internals.Proof
import Language.SMTLib2
import qualified Language.SMTLib2.Internals.Expression as E
import Data.GADT.Compare
import Data.GADT.Show
import Control.Monad.State
import Control.Monad.Except
import qualified Data.Map as Map
verifyZ3Proof :: B.Backend b => B.Proof b -> SMT b ()
verifyZ3Proof pr = do
res <- runExceptT (evalStateT (verifyProof analyzeProof (\name args res -> do
b <- gets backend
verifyZ3Rule (BackendInfo b) name args res) pr) Map.empty)
case res of
Right _ -> return ()
Left err -> error $ "Error in proof: "++err
verifyZ3Rule :: (GetType e,Extract i e,GEq e,Monad m,GShow e)
=> i -> String -> [ProofResult e] -> ProofResult e -> ExceptT String m ()
verifyZ3Rule _ "asserted" [] q = return ()
verifyZ3Rule i "mp" [p,impl] q = case p of
ProofExpr p' -> case q of
ProofExpr q' -> case impl of
ProofExpr (extract i -> Just (Implies (rp ::: rq ::: Nil)))
-> case geq p' rp of
Just Refl -> case geq q' rq of
Just Refl -> return ()
Nothing -> throwError "right hand side of implication doesn't match result"
Nothing -> throwError "left hand side of implication doesn't match argument"
ProofExpr (extract i -> Just (Eq (rp ::: rq ::: Nil)))
-> case geq p' rp of
Just Refl -> case geq q' rq of
Just Refl -> return ()
Nothing -> throwError "right hand side of implication doesn't match result"
Nothing -> throwError "left hand side of implication doesn't match argument"
_ -> throwError "second argument isn't an implication"
_ -> throwError "result type can't be equisatisfiable equality"
_ -> throwError "first argument can't be equisatisfiable equality"
verifyZ3Rule i "reflexivity" [] res = case res of
EquivSat e1 e2 -> case geq e1 e2 of
Just Refl -> return ()
Nothing -> throwError "arguments must be the same"
ProofExpr (extract i -> Just (Eq (x ::: y ::: Nil)))
-> case geq x y of
Just Refl -> return ()
Nothing -> throwError "arguments must be the same"
_ -> throwError "result must be equality"
verifyZ3Rule i "symmetry" [rel] res = case rel of
EquivSat x y -> case res of
EquivSat y' x' -> case geq x x' of
Just Refl -> case geq y y' of
Just Refl -> return ()
Nothing -> throwError "argument mismatch"
Nothing -> throwError "argument mismatch"
_ -> throwError "argument mismatch"
ProofExpr (extract i -> Just (E.App r1 (x ::: y ::: Nil)))
-> case res of
ProofExpr (extract i -> Just (E.App r2 (ry ::: rx ::: Nil)))
-> case geq x rx of
Just Refl -> case geq y ry of
Just Refl -> case geq r1 r2 of
Just Refl -> case r1 of
E.Eq _ _ -> return ()
E.Logic E.And _ -> return ()
E.Logic E.Or _ -> return ()
E.Logic E.XOr _ -> return ()
_ -> throwError "relation is not symmetric"
_ -> throwError "result must be the same relation"
_ -> throwError "argument mismatch"
_ -> throwError "argument mismatch"
_ -> throwError "result must be a relation"
_ -> throwError "argument must be a relation"
--verifyZ3Rule i "transitivity"
verifyZ3Rule i name args res = error $ "Cannot verify rule "++show name++" "++show args++" => "++show res
| hguenther/smtlib2 | Language/SMTLib2/Internals/Proof/Verify.hs | gpl-3.0 | 3,634 | 0 | 24 | 1,002 | 1,187 | 576 | 611 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE Rank2Types #-}
module SNet
( runSNet
, runSNetCustom
, SNet
, SNetIn
, SNetOut
, globIn
, globOut
, dummyIn
, dummyOut
, module SNet.Interfaces
, Variants (..)
, VariantMap (..)
, Pattern (..)
, RecEntry (..)
, Record (Rec)
, RecType (Data)
, CInt (..)
, syncro
, box
, (-->)
, (!!)
, parallel
, (*)
) where
import Prelude hiding ((!!), (*))
import Control.Concurrent
import Control.Exception
import Control.Monad
import Control.Monad.State
import Data.Default
import Foreign.C.Types
import System.IO.Error
import SNet.Interfaces
import SNet.Combinators
import SNet.Network
import SNet.Pattern
import SNet.Record
import SNet.Stream
import SNet.Task
import SNet.Variants
type SNetIn a = MVar a -> Stream -> IO a
type SNetOut a = MonadIO m => MVar a -> m Stream
globIn :: SNetIn a
globIn stop output = do
openStream output
handle eof $
forever $ do
rec <- readLn :: IO (Record Data)
writeStream output rec
where eof e = do
closeStream output
val <- takeMVar stop
if isEOFError e
then return val
else ioError e
globOut :: SNetOut ()
globOut mvar = taskIO () (liftIO . print) (liftIO stop)
where stop = putMVar mvar ()
dummyIn :: [Record Data] -> SNetIn a
dummyIn inputList stop output = do
openStream output
mapM_ (writeStream output) inputList
closeStream output
takeMVar stop
dummyOut :: SNetOut [Record Data]
dummyOut mvar = do
taskIO [] (modify . (:)) (get >>= stop)
where stop = liftIO . putMVar mvar . reverse
runSNetCustom :: SNetIn a -> SNetOut a -> SNet -> IO a
runSNetCustom snetin snetout net = do
stopMVar <- newEmptyMVar
input <- snetout stopMVar >>= spawnSNet net def
snetin stopMVar input
runSNet :: SNet -> IO ()
runSNet = runSNetCustom globIn globOut
| merijn/SNet2.0 | SNet.hs | gpl-3.0 | 1,879 | 3 | 14 | 457 | 645 | 346 | 299 | 82 | 2 |
{-# LANGUAGE OverloadedStrings #-}
-- |
-- Copyright : Herbert Valerio Riedel
-- SPDX-License-Identifier: GPL-3.0-or-later
module CabalEdit
( cabalEditXRev
, cabalSplitAtField
, PkgRev
) where
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BS8
import Data.Semigroup
import qualified Distribution.Parsec.Common as C
import qualified Distribution.Parsec.Field as C
import qualified Distribution.Parsec.Parser as C
type PkgRev = Word
-- | Insert or replace existing "x-revision" line
--
-- __NOTE__: This uses 'cabalSplitAtField' and therefore currently
-- supports only simplified (i.e. without use of @{@ @}@ layout
-- tokens) Cabal file grammar
cabalEditXRev :: PkgRev -> ByteString -> ByteString
cabalEditXRev xrev oldcab = pre1 <> mid1 <> xrevLine <> post1
where
(pre0,_,post0) = either (error . show) id $ cabalSplitAtField "x-revision" oldcab
(pre1,mid1,post1) = either (error . show) id $ cabalSplitAtField "version" (pre0 <> post0)
ls = BS8.lines oldcab
xrevLine = "x-revision: " <> BS8.pack (show xrev) <> if isCRLF then "\r\n" else "\n"
-- simple heuristic
isCRLF = case ls of
[] -> False
("":_) -> False
(l1:_) -> BS8.last l1 == '\r'
-- | Split a cabal file into three fragments: before the (first
-- occurence of a field), the field, and the rest after the field.
--
-- This should, in spirit, follow the invariant (assuming there are no parsing failures):
--
-- > cabalSplitAtField someField (pre <> mid <> post) == Right (pre, mid, post)
--
-- If field not found, the middle and trailing fragment will be empty 'ByteString's
--
-- This operation is quite universal; it can be used to remove fields,
-- or insert content before a field or after a field etc
--
-- __NOTE__: This doesn't properly handle layout-mode (i.e. as controlled by the @{@ and @}@ tokens) yet
cabalSplitAtField :: ByteString -- ^ fieldname
-> ByteString -- ^ @.cabal@ description
-> Either String (ByteString,ByteString,ByteString) -- ^ pre, mid, post
cabalSplitAtField fname cab0 = do
fields <- either (Left . show) Right $ C.readFields cab0
case [ (npos, vals) | C.Field (C.Name (C.Position npos _) n) vals <- fields, n == fname ] of
[] -> pure (cab0, BS8.empty, BS8.empty)
(npos, vals):_ -> case vals of
[] -> pure $! go (npos-1) 1
_ -> let C.FieldLine (C.Position pos2 _) _ = last vals
in pure $! go (npos-1) (1+pos2-npos)
where
cab0lines = BS8.lines cab0
-- TODO: detect '{'s in `mid` and fixup k to include the closing '}'
--
-- NB: it's not enough to simply look at the next field's line
-- number, as we don't want 'mid' to include trailing comments
-- which conventionally belong to the next fields; i.e. in case of
-- something like
--
-- name: foo
-- version: 1.2.3
-- x-revision: {
-- 42
-- }
-- -- for reasons this must remain Custom instead of Simple
-- build-type: Custm
-- synopsis: ...
--
-- We want the middle of 'cabalSplitAtField "x-revision"' to stop
-- right after the '}' and *not* include the comment
--
go j k = (BS8.unlines pre, BS8.unlines mid, BS8.unlines post)
where
(pre, midpost) = splitAt j cab0lines
(mid,post) = splitAt k midpost
| hvr/hackage-cli | src/CabalEdit.hs | gpl-3.0 | 3,533 | 0 | 20 | 963 | 666 | 378 | 288 | 37 | 4 |
{-
- Copyright (C) 2013 Alexander Berntsen <[email protected]>
-
- This file is part of bweakfwu.
-
- bweakfwu is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- bweakfwu is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with bweakfwu. If not, see <http://www.gnu.org/licenses/>.
-} module Geometry where
import Graphics.Gloss.Data.Vector (Vector)
-- | 'Width' is the width of an object.
type Width = Float
-- | 'Height' is the height of an object.
type Height = Float
-- | 'Radius' is the radius of an object.
type Radius = Float
-- | 'Normal' is a normal vector.
type Normal = Vector
| plaimi/bweakfwu | src/bweakfwu/Geometry.hs | gpl-3.0 | 1,033 | 0 | 5 | 186 | 45 | 32 | 13 | 6 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
-- | Interface for the RNAcentral REST webservice.
--
module Biobase.RNAlien.RNAcentralHTTP (rnaCentralHTTP,
buildSequenceViaMD5Query,
buildStringViaMD5Query,
getRNACentralEntries,
showRNAcentralAlienEvaluation,
RNAcentralEntryResponse(..),
RNAcentralEntry(..)
) where
import Network.HTTP.Conduit
import qualified Data.ByteString.Lazy.Char8 as L8
--import qualified Data.ByteString.Char8 as BS8
import Network.Socket
import Control.Concurrent
import Data.Text
import Data.Aeson
import GHC.Generics
import qualified Data.Digest.Pure.MD5 as M
import Data.Either
import Biobase.Fasta.Strict
import Biobase.Types.BioSequence
--Datatypes
-- | Data structure for RNAcentral entry response
data RNAcentralEntryResponse = RNAcentralEntryResponse
{
count :: Int,
next :: Maybe Text,
previous :: Maybe Text,
results :: [RNAcentralEntry]
}
deriving (Show, Eq, Generic)
instance ToJSON RNAcentralEntryResponse where
toJSON = genericToJSON defaultOptions
--toEncoding = genericToEncoding defaultOptions
instance FromJSON RNAcentralEntryResponse
data RNAcentralEntry = RNAcentralEntry
{
url :: Text,
rnacentral_id :: Text,
md5 :: Text,
sequence :: Text,
length :: Int,
xrefs :: Text,
publications :: Text
}
deriving (Show, Eq, Generic)
instance ToJSON RNAcentralEntry where
toJSON = genericToJSON defaultOptions
--toEncoding = genericToEncoding defaultOptions
instance FromJSON RNAcentralEntry
-- | Send query and parse return XML
startSession :: String -> IO (Either String RNAcentralEntryResponse)
startSession query' = do
requestXml <- withSocketsDo
$ sendQuery query'
--putStr (L8.unpack requestXml)
let eitherErrorResponse = eitherDecode requestXml :: Either String RNAcentralEntryResponse
return eitherErrorResponse
-- | Send query and return response XML
sendQuery :: String -> IO L8.ByteString
sendQuery query' = do
let address = "http://rnacentral.org/api/v1/rna/"
let request = address ++ query'
--putStrLn request
simpleHttp request
-- | Function for querying the RNAcentral REST interface.
rnaCentralHTTP :: String -> IO (Either String RNAcentralEntryResponse)
rnaCentralHTTP query' =
startSession query'
-- | Function for delayed queries to the RNAcentral REST interface. Enforces the maximum 20 requests per second policy.
delayedRNACentralHTTP :: String -> IO (Either String RNAcentralEntryResponse)
delayedRNACentralHTTP query' = do
threadDelay 55000
startSession query'
getRNACentralEntries :: [String] -> IO [Either String RNAcentralEntryResponse]
getRNACentralEntries queries = do
mapM delayedRNACentralHTTP queries
-- | Build a query from a input sequence
--
-- TODO [chzs] consider using strict bytestring as long as possible.
--
-- TODO [chzs] consider giving useful typelevel names to the types in @Fasta@.
-- One may give a type-level name to the sequence identifier, and an identifier
-- (like @DNA@) to the biosequence type.
buildSequenceViaMD5Query :: Fasta () () -> String
buildSequenceViaMD5Query s = qString
where querySequence = L8.fromStrict . _bioSequence $ _fasta s
querySequenceUreplacedwithT = L8.map bsreplaceUT querySequence
querySequenceU2Twolb = L8.filter ((/= '\n')) querySequenceUreplacedwithT
md5Sequence = M.md5 querySequenceU2Twolb
qString = "?md5=" ++ show md5Sequence
--Build a query from a input string
buildStringViaMD5Query :: String -> String
buildStringViaMD5Query s = qString
where querySequenceUreplacedwithT = L8.map bsreplaceUT (L8.pack s)
querySequenceU2Twolb = L8.filter ((/= '\n')) querySequenceUreplacedwithT
md5Sequence = M.md5 querySequenceU2Twolb
qString = "?md5=" ++ show md5Sequence
showRNAcentralAlienEvaluation :: [Either String RNAcentralEntryResponse] -> String
showRNAcentralAlienEvaluation responses = output
where resultEntries = Prelude.concatMap results (rights responses)
resulthead = "rnacentral_id\tmd5\tlength\n"
resultentries = Prelude.concatMap showRNAcentralAlienEvaluationLine resultEntries
output = if Prelude.null resultentries then "No matching sequences found in RNAcentral\n" else resulthead ++ resultentries
showRNAcentralAlienEvaluationLine :: RNAcentralEntry -> String
showRNAcentralAlienEvaluationLine entry = unpack (rnacentral_id entry) ++ "\t" ++ unpack (md5 entry) ++ "\t" ++ show (Biobase.RNAlien.RNAcentralHTTP.length entry) ++"\n"
bsreplaceUT :: Char -> Char
bsreplaceUT a
| a == 'U' = 'T'
| otherwise = a
| eggzilla/RNAlien | Biobase/RNAlien/RNAcentralHTTP.hs | gpl-3.0 | 4,723 | 0 | 12 | 885 | 893 | 485 | 408 | 87 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Storage.Buckets.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Patches a bucket. Changes to the bucket will be readable immediately
-- after writing, but configuration changes may take time to propagate.
--
-- /See:/ <https://developers.google.com/storage/docs/json_api/ Cloud Storage JSON API Reference> for @storage.buckets.patch@.
module Network.Google.Resource.Storage.Buckets.Patch
(
-- * REST Resource
BucketsPatchResource
-- * Creating a Request
, bucketsPatch
, BucketsPatch
-- * Request Lenses
, bpIfMetagenerationMatch
, bpPredefinedACL
, bpBucket
, bpPayload
, bpPredefinedDefaultObjectACL
, bpUserProject
, bpIfMetagenerationNotMatch
, bpProjection
, bpProvisionalUserProject
) where
import Network.Google.Prelude
import Network.Google.Storage.Types
-- | A resource alias for @storage.buckets.patch@ method which the
-- 'BucketsPatch' request conforms to.
type BucketsPatchResource =
"storage" :>
"v1" :>
"b" :>
Capture "bucket" Text :>
QueryParam "ifMetagenerationMatch" (Textual Int64) :>
QueryParam "predefinedAcl" BucketsPatchPredefinedACL
:>
QueryParam "predefinedDefaultObjectAcl"
BucketsPatchPredefinedDefaultObjectACL
:>
QueryParam "userProject" Text :>
QueryParam "ifMetagenerationNotMatch" (Textual Int64)
:>
QueryParam "projection" BucketsPatchProjection :>
QueryParam "provisionalUserProject" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Bucket :> Patch '[JSON] Bucket
-- | Patches a bucket. Changes to the bucket will be readable immediately
-- after writing, but configuration changes may take time to propagate.
--
-- /See:/ 'bucketsPatch' smart constructor.
data BucketsPatch =
BucketsPatch'
{ _bpIfMetagenerationMatch :: !(Maybe (Textual Int64))
, _bpPredefinedACL :: !(Maybe BucketsPatchPredefinedACL)
, _bpBucket :: !Text
, _bpPayload :: !Bucket
, _bpPredefinedDefaultObjectACL :: !(Maybe BucketsPatchPredefinedDefaultObjectACL)
, _bpUserProject :: !(Maybe Text)
, _bpIfMetagenerationNotMatch :: !(Maybe (Textual Int64))
, _bpProjection :: !(Maybe BucketsPatchProjection)
, _bpProvisionalUserProject :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'BucketsPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bpIfMetagenerationMatch'
--
-- * 'bpPredefinedACL'
--
-- * 'bpBucket'
--
-- * 'bpPayload'
--
-- * 'bpPredefinedDefaultObjectACL'
--
-- * 'bpUserProject'
--
-- * 'bpIfMetagenerationNotMatch'
--
-- * 'bpProjection'
--
-- * 'bpProvisionalUserProject'
bucketsPatch
:: Text -- ^ 'bpBucket'
-> Bucket -- ^ 'bpPayload'
-> BucketsPatch
bucketsPatch pBpBucket_ pBpPayload_ =
BucketsPatch'
{ _bpIfMetagenerationMatch = Nothing
, _bpPredefinedACL = Nothing
, _bpBucket = pBpBucket_
, _bpPayload = pBpPayload_
, _bpPredefinedDefaultObjectACL = Nothing
, _bpUserProject = Nothing
, _bpIfMetagenerationNotMatch = Nothing
, _bpProjection = Nothing
, _bpProvisionalUserProject = Nothing
}
-- | Makes the return of the bucket metadata conditional on whether the
-- bucket\'s current metageneration matches the given value.
bpIfMetagenerationMatch :: Lens' BucketsPatch (Maybe Int64)
bpIfMetagenerationMatch
= lens _bpIfMetagenerationMatch
(\ s a -> s{_bpIfMetagenerationMatch = a})
. mapping _Coerce
-- | Apply a predefined set of access controls to this bucket.
bpPredefinedACL :: Lens' BucketsPatch (Maybe BucketsPatchPredefinedACL)
bpPredefinedACL
= lens _bpPredefinedACL
(\ s a -> s{_bpPredefinedACL = a})
-- | Name of a bucket.
bpBucket :: Lens' BucketsPatch Text
bpBucket = lens _bpBucket (\ s a -> s{_bpBucket = a})
-- | Multipart request metadata.
bpPayload :: Lens' BucketsPatch Bucket
bpPayload
= lens _bpPayload (\ s a -> s{_bpPayload = a})
-- | Apply a predefined set of default object access controls to this bucket.
bpPredefinedDefaultObjectACL :: Lens' BucketsPatch (Maybe BucketsPatchPredefinedDefaultObjectACL)
bpPredefinedDefaultObjectACL
= lens _bpPredefinedDefaultObjectACL
(\ s a -> s{_bpPredefinedDefaultObjectACL = a})
-- | The project to be billed for this request. Required for Requester Pays
-- buckets.
bpUserProject :: Lens' BucketsPatch (Maybe Text)
bpUserProject
= lens _bpUserProject
(\ s a -> s{_bpUserProject = a})
-- | Makes the return of the bucket metadata conditional on whether the
-- bucket\'s current metageneration does not match the given value.
bpIfMetagenerationNotMatch :: Lens' BucketsPatch (Maybe Int64)
bpIfMetagenerationNotMatch
= lens _bpIfMetagenerationNotMatch
(\ s a -> s{_bpIfMetagenerationNotMatch = a})
. mapping _Coerce
-- | Set of properties to return. Defaults to full.
bpProjection :: Lens' BucketsPatch (Maybe BucketsPatchProjection)
bpProjection
= lens _bpProjection (\ s a -> s{_bpProjection = a})
-- | The project to be billed for this request if the target bucket is
-- requester-pays bucket.
bpProvisionalUserProject :: Lens' BucketsPatch (Maybe Text)
bpProvisionalUserProject
= lens _bpProvisionalUserProject
(\ s a -> s{_bpProvisionalUserProject = a})
instance GoogleRequest BucketsPatch where
type Rs BucketsPatch = Bucket
type Scopes BucketsPatch =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/devstorage.full_control"]
requestClient BucketsPatch'{..}
= go _bpBucket _bpIfMetagenerationMatch
_bpPredefinedACL
_bpPredefinedDefaultObjectACL
_bpUserProject
_bpIfMetagenerationNotMatch
_bpProjection
_bpProvisionalUserProject
(Just AltJSON)
_bpPayload
storageService
where go
= buildClient (Proxy :: Proxy BucketsPatchResource)
mempty
| brendanhay/gogol | gogol-storage/gen/Network/Google/Resource/Storage/Buckets/Patch.hs | mpl-2.0 | 6,993 | 0 | 20 | 1,605 | 989 | 571 | 418 | 144 | 1 |
-- | This module exports shortcuts for some of JVM instructions (which are defined in JVM.Assembler).
-- These functions get Constants, put them into constants pool and generate instruction using index
-- of constant in the pool.
module JVM.Builder.Instructions where
import Data.Word
import qualified Data.ByteString.Lazy as B
import Codec.Binary.UTF8.String (encodeString)
import Data.String
import JVM.ClassFile
import JVM.Assembler
import JVM.Builder.Monad
nop :: Generator e g => g e ()
nop = i0 NOP
aconst_null :: Generator e g => g e ()
aconst_null = i0 ACONST_NULL
iconst_m1 :: Generator e g => g e ()
iconst_m1 = i0 ICONST_M1
iconst_0 :: Generator e g => g e ()
iconst_0 = i0 ICONST_0
iconst_1 :: Generator e g => g e ()
iconst_1 = i0 ICONST_1
iconst_2 :: Generator e g => g e ()
iconst_2 = i0 ICONST_2
iconst_3 :: Generator e g => g e ()
iconst_3 = i0 ICONST_3
iconst_4 :: Generator e g => g e ()
iconst_4 = i0 ICONST_4
iconst_5 :: Generator e g => g e ()
iconst_5 = i0 ICONST_5
lconst_0 :: Generator e g => g e ()
lconst_0 = i0 LCONST_0
lconst_1 :: Generator e g => g e ()
lconst_1 = i0 LCONST_1
fconst_0 :: Generator e g => g e ()
fconst_0 = i0 FCONST_0
fconst_1 :: Generator e g => g e ()
fconst_1 = i0 FCONST_1
fconst_2 :: Generator e g => g e ()
fconst_2 = i0 FCONST_2
dconst_0 :: Generator e g => g e ()
dconst_0 = i0 DCONST_0
dconst_1 :: Generator e g => g e ()
dconst_1 = i0 DCONST_1
bipush :: Generator e g => Word8 -> g e ()
bipush x = i0 (BIPUSH x)
sipush :: Generator e g => Word16 -> g e ()
sipush x = i0 (SIPUSH x)
ldc1 :: Generator e g => Constant Direct -> g e ()
ldc1 x = i8 LDC1 x
ldc2 :: Generator e g => Constant Direct -> g e ()
ldc2 x = i1 LDC2 x
ldc2w :: Generator e g => Constant Direct -> g e ()
ldc2w x = i1 LDC2W x
iload :: Generator e g => Constant Direct -> g e ()
iload x = i8 ILOAD x
lload :: Generator e g => Constant Direct -> g e ()
lload x = i8 LLOAD x
fload :: Generator e g => Constant Direct -> g e ()
fload x = i8 FLOAD x
dload :: Generator e g => Constant Direct -> g e ()
dload x = i8 DLOAD x
aload :: Generator e g => Constant Direct -> g e ()
aload x = i8 ALOAD x
iload_ :: Generator e g => IMM -> g e ()
iload_ x = i0 (ILOAD_ x)
lload_ :: Generator e g => IMM -> g e ()
lload_ x = i0 (LLOAD_ x)
fload_ :: Generator e g => IMM -> g e ()
fload_ x = i0 (FLOAD_ x)
dload_ :: Generator e g => IMM -> g e ()
dload_ x = i0 (DLOAD_ x)
aload_ :: Generator e g => IMM -> g e ()
aload_ x = i0 (ALOAD_ x)
iaload :: Generator e g => g e ()
iaload = i0 IALOAD
laload :: Generator e g => g e ()
laload = i0 LALOAD
faload :: Generator e g => g e ()
faload = i0 FALOAD
daload :: Generator e g => g e ()
daload = i0 DALOAD
aaload :: Generator e g => g e ()
aaload = i0 AALOAD
caload :: Generator e g => g e ()
caload = i0 CALOAD
saload :: Generator e g => g e ()
saload = i0 SALOAD
istore :: Generator e g => Constant Direct -> g e ()
istore x = i8 ISTORE x
lstore :: Generator e g => Constant Direct -> g e ()
lstore x = i8 LSTORE x
fstore :: Generator e g => Constant Direct -> g e ()
fstore x = i8 FSTORE x
dstore :: Generator e g => Constant Direct -> g e ()
dstore x = i8 DSTORE x
astore :: Generator e g => Constant Direct -> g e ()
astore x = i8 ASTORE x
istore_ :: Generator e g => Word8 -> g e ()
istore_ x = i0 (ISTORE x)
lstore_ :: Generator e g => Word8 -> g e ()
lstore_ x = i0 (LSTORE x)
fstore_ :: Generator e g => Word8 -> g e ()
fstore_ x = i0 (FSTORE x)
dstore_ :: Generator e g => Word8 -> g e ()
dstore_ x = i0 (DSTORE x)
astore_ :: Generator e g => Word8 -> g e ()
astore_ x = i0 (ASTORE x)
iastore :: Generator e g => g e ()
iastore = i0 IASTORE
lastore :: Generator e g => g e ()
lastore = i0 LASTORE
fastore :: Generator e g => g e ()
fastore = i0 FASTORE
dastore :: Generator e g => g e ()
dastore = i0 DASTORE
aastore :: Generator e g => g e ()
aastore = i0 AASTORE
bastore :: Generator e g => g e ()
bastore = i0 BASTORE
castore :: Generator e g => g e ()
castore = i0 CASTORE
sastore :: Generator e g => g e ()
sastore = i0 SASTORE
pop :: Generator e g => g e ()
pop = i0 POP
pop2 :: Generator e g => g e ()
pop2 = i0 POP2
dup :: Generator e g => g e ()
dup = i0 DUP
dup_x1 :: Generator e g => g e ()
dup_x1 = i0 DUP_X1
dup_x2 :: Generator e g => g e ()
dup_x2 = i0 DUP_X2
dup2 :: Generator e g => g e ()
dup2 = i0 DUP2
dup2_x1 :: Generator e g => g e ()
dup2_x1 = i0 DUP2_X1
dup2_x2 :: Generator e g => g e ()
dup2_x2 = i0 DUP2_X2
swap :: Generator e g => g e ()
swap = i0 SWAP
iadd :: Generator e g => g e ()
iadd = i0 IADD
ladd :: Generator e g => g e ()
ladd = i0 LADD
fadd :: Generator e g => g e ()
fadd = i0 FADD
dadd :: Generator e g => g e ()
dadd = i0 DADD
isub :: Generator e g => g e ()
isub = i0 ISUB
lsub :: Generator e g => g e ()
lsub = i0 LSUB
fsub :: Generator e g => g e ()
fsub = i0 FSUB
dsub :: Generator e g => g e ()
dsub = i0 DSUB
imul :: Generator e g => g e ()
imul = i0 IMUL
lmul :: Generator e g => g e ()
lmul = i0 LMUL
fmul :: Generator e g => g e ()
fmul = i0 FMUL
dmul :: Generator e g => g e ()
dmul = i0 DMUL
idiv :: Generator e g => g e ()
idiv = i0 IDIV
ldiv :: Generator e g => g e ()
ldiv = i0 LDIV
fdiv :: Generator e g => g e ()
fdiv = i0 FDIV
ddiv :: Generator e g => g e ()
ddiv = i0 DDIV
irem :: Generator e g => g e ()
irem = i0 IREM
lrem :: Generator e g => g e ()
lrem = i0 LREM
frem :: Generator e g => g e ()
frem = i0 FREM
drem :: Generator e g => g e ()
drem = i0 DREM
ineg :: Generator e g => g e ()
ineg = i0 INEG
lneg :: Generator e g => g e ()
lneg = i0 LNEG
fneg :: Generator e g => g e ()
fneg = i0 FNEG
dneg :: Generator e g => g e ()
dneg = i0 DNEG
ishl :: Generator e g => g e ()
ishl = i0 ISHL
lshl :: Generator e g => g e ()
lshl = i0 LSHL
ishr :: Generator e g => g e ()
ishr = i0 ISHR
lshr :: Generator e g => g e ()
lshr = i0 LSHR
iushr :: Generator e g => g e ()
iushr = i0 IUSHR
lushr :: Generator e g => g e ()
lushr = i0 LUSHR
iand :: Generator e g => g e ()
iand = i0 IAND
land :: Generator e g => g e ()
land = i0 LAND
ior :: Generator e g => g e ()
ior = i0 IOR
lor :: Generator e g => g e ()
lor = i0 LOR
ixor :: Generator e g => g e ()
ixor = i0 IXOR
lxor :: Generator e g => g e ()
lxor = i0 LXOR
iinc :: Generator e g => Word8 -> Word8 -> g e ()
iinc x y = i0 (IINC x y)
i2l :: Generator e g => g e ()
i2l = i0 I2L
i2f :: Generator e g => g e ()
i2f = i0 I2F
i2d :: Generator e g => g e ()
i2d = i0 I2D
l2i :: Generator e g => g e ()
l2i = i0 L2I
l2f :: Generator e g => g e ()
l2f = i0 L2F
l2d :: Generator e g => g e ()
l2d = i0 L2D
f2i :: Generator e g => g e ()
f2i = i0 F2I
f2l :: Generator e g => g e ()
f2l = i0 F2L
f2d :: Generator e g => g e ()
f2d = i0 F2D
d2i :: Generator e g => g e ()
d2i = i0 D2I
d2l :: Generator e g => g e ()
d2l = i0 D2L
d2f :: Generator e g => g e ()
d2f = i0 D2F
i2b :: Generator e g => g e ()
i2b = i0 I2B
i2c :: Generator e g => g e ()
i2c = i0 I2C
i2s :: Generator e g => g e ()
i2s = i0 I2S
lcmp :: Generator e g => g e ()
lcmp = i0 LCMP
-- | Wide instruction
wide :: Generator e g => (Word8 -> Instruction) -> Constant Direct -> g e ()
wide fn c = do
ix <- addToPool c
let ix0 = fromIntegral (ix `div` 0x100) :: Word8
ix1 = fromIntegral (ix `mod` 0x100) :: Word8
i0 (WIDE ix0 $ fn ix1)
new :: Generator e g => B.ByteString -> g e ()
new cls =
i1 NEW (CClass cls)
newArray :: Generator e g => ArrayType -> g e ()
newArray t =
i0 (NEWARRAY $ atype2byte t)
allocNewArray :: Generator e g => B.ByteString -> g e ()
allocNewArray cls =
i1 ANEWARRAY (CClass cls)
invokeVirtual :: Generator e g => B.ByteString -> NameType (Method Direct) -> g e ()
invokeVirtual cls sig =
i1 INVOKEVIRTUAL (CMethod cls sig)
invokeStatic :: Generator e g => B.ByteString -> NameType (Method Direct) -> g e ()
invokeStatic cls sig =
i1 INVOKESTATIC (CMethod cls sig)
invokeSpecial :: Generator e g => B.ByteString -> NameType (Method Direct) -> g e ()
invokeSpecial cls sig =
i1 INVOKESPECIAL (CMethod cls sig)
getStaticField :: Generator e g => B.ByteString -> NameType (Field Direct) -> g e ()
getStaticField cls sig =
i1 GETSTATIC (CField cls sig)
loadString :: Generator e g => String -> g e ()
loadString str =
i8 LDC1 (CString $ fromString $ encodeString $ str)
allocArray :: Generator e g => B.ByteString -> g e ()
allocArray cls =
i1 ANEWARRAY (CClass cls)
| MateVM/hs-java | JVM/Builder/Instructions.hs | lgpl-3.0 | 8,558 | 0 | 12 | 2,295 | 4,424 | 2,134 | 2,290 | 277 | 1 |
module Data.List.Tools (
takeUntil
, dropUntil
, mulLists
, defaultElem
, isIncludedElem
, setAt
, modifyAt
) where
takeUntil :: (a -> Bool) -> [a] -> [a]
dropUntil :: (a -> Bool) -> [a] -> [a]
takeUntil _ [] = []
takeUntil p (x:xs)
| p x = [x]
| otherwise = x : takeUntil p xs
dropUntil _ [] = []
dropUntil p (x:xs)
| p x = xs
| otherwise = dropUntil p xs
mulLists :: [[a]] -> [[a]]
mulLists [] = [[]]
mulLists (xs:xss) = [ x:xs_ | x <- xs, xs_ <- mulLists xss ]
defaultElem :: a -> [a] -> [a]
defaultElem dflt [] = [ dflt ]
defaultElem _ lst = lst
isIncludedElem :: Eq a => [a] -> [a] -> Bool
isIncludedElem lst1 lst2 = and $ map (flip elem lst2) lst1
setAt :: [a] -> Int -> a -> [a]
setAt xs i x = take i xs ++ [x] ++ drop (i + 1) xs
modifyAt :: [a] -> Int -> (a -> a) -> [a]
modifyAt xs i f = take i xs ++ [f $ xs !! i] ++ drop (i + 1) xs
| YoshikuniJujo/yjtools_haskell | Data/List/Tools.hs | lgpl-3.0 | 910 | 0 | 9 | 262 | 512 | 271 | 241 | 30 | 1 |
{-# LANGUAGE NamedFieldPuns,RecordWildCards #-}
module StearnsWharf.XML.Common where
import Data.Maybe (fromJust)
import qualified Text.XML.Light as X
import qualified Data.List as LU
type AttValPair = (String,String)
toQName :: String -> X.QName
toQName s = X.QName s Nothing Nothing
xmlAttr :: String -> X.Element -> Maybe String
xmlAttr s el = X.findAttr (toQName s) el
xmlElement :: String -> X.Element -> Maybe X.Element
xmlElement s doc = X.findElement (X.unqual s) doc
xmlElements :: String -> X.Element -> [X.Element]
xmlElements s doc = X.findElements (X.unqual s) doc
getAttValPair :: X.Attr -> AttValPair
getAttValPair attr = (k,v)
where k = X.qName $ X.attrKey attr
v = X.attrVal attr
findAttVal :: [AttValPair] -> String -> String
findAttVal pairs aname = snd hit
where (Just hit) = LU.find (\v -> (fst v) == aname) pairs
maybeFindAttVal :: [AttValPair] -> String -> Maybe String
maybeFindAttVal pairs aname = result
where hit = LU.find (\v -> (fst v) == aname) pairs
result | hit == Nothing = Nothing
| otherwise = Just (snd $ fromJust hit)
| baalbek/stearnswharf | src/StearnsWharf/XML/Common.hs | lgpl-3.0 | 1,118 | 0 | 12 | 230 | 425 | 224 | 201 | 26 | 1 |
module PutJson where
import Data.List (intercalate)
import SimpleJSON
renderJValue :: JValue -> String
renderJValue (JString s) = show s
renderJValue (JNumber n) = show n
renderJValue (JBool True) = "true"
renderJValue (JBool False) = "false"
renderJValue JNull = "null"
renderJValue (JObject o) = "{" ++ pairs o ++ "}"
where pairs [] = ""
pairs ps = intercalate ", " $ map renderPair ps
renderPair (k, v) = show k ++ ": " ++ renderJValue v
renderJValue (JArray a) = "[" ++ values a ++ "]"
where values [] = ""
values vs = intercalate ", " $ map renderJValue vs
putJValue :: JValue -> IO()
putJValue v = putStrLn (renderJValue v)
| EricYT/Haskell | src/real_haskell/chapter-5/PutJSON.hs | apache-2.0 | 682 | 0 | 9 | 164 | 269 | 133 | 136 | 18 | 3 |
module Code30_Parity where
import Code30_Loopless hiding (bop,wop)
data Spider' = Node' (Bool,Bool) Int [Leg']
data Leg' = Dn' Spider' | Up' Spider'
decorate :: Spider -> Spider'
decorate (Node a legs) = node' a (map (mapLeg decorate) legs)
mapLeg :: (Spider -> Spider') -> Leg -> Leg'
mapLeg f (Up x) = Up' (f x)
mapLeg f (Dn x) = Dn' (f x)
node' :: Int -> [Leg'] -> Spider'
node' a legs = Node' (foldr op (True,True) legs) a legs
op :: Leg' -> (Bool, Bool) -> (Bool, Bool)
op (Up' (Node' (w,b) _ _)) (w',b') = (w /= b && w', b && b')
op (Dn' (Node' (w,b) _ _)) (w',b') = (w && w', w /= b && b')
bop,wop :: Leg' -> [Int] -> [Int]
bop (Up' (Node' (_,b) _ legs)) cs
= reverse (foldr bop (revif b cs) legs)
bop (Dn' (Node' (w,_) a legs)) cs
= foldr wop (revif (not w ) cs) legs ++ [a] ++ foldr bop (revif w cs) legs
wop (Up' (Node' (_,b) a legs)) cs
= foldr wop (revif b cs) legs ++ [a] ++ foldr bop (revif (not b) cs) legs
wop (Dn' (Node' (w,_) _ legs)) cs
= reverse (foldr wop (revif w cs) legs)
revif :: Bool -> [a] -> [a]
revif b cs = if b then reverse cs else cs | sampou-org/pfad | Code/Code30_Parity.hs | bsd-3-clause | 1,164 | 0 | 11 | 324 | 677 | 361 | 316 | 25 | 2 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
-- | This module contains responses from Telegram Bot API
module Web.Telegram.API.Bot.Responses
( -- * Types
Response (..)
, ResponseParameters (..)
, GetMeResponse
, MessageResponse
, ChatActionResponse
, UpdatesResponse
, FileResponse
, UserProfilePhotosResponse
, SetWebhookResponse
, InlineQueryResponse
, CallbackQueryResponse
, KickChatMemberResponse
, LeaveChatResponse
, UnbanChatMemberResponse
, GetChatResponse
, GetChatAdministratorsResponse
, GetChatMembersCountResponse
, GetChatMemberResponse
, GetWebhookInfoResponse
) where
import Data.Aeson
import GHC.Generics
import Web.Telegram.API.Bot.Data
import Web.Telegram.API.Bot.JsonExt
data Response a = Response
{
result :: a
, parameters :: Maybe ResponseParameters
} deriving (Show, Generic, FromJSON)
data ResponseParameters = ResponseParameters
{
res_migrate_to_chat_id :: Maybe Int -- ^ The group has been migrated to a supergroup with the specified identifier. This number may be greater than 32 bits and some programming languages may have difficulty/silent defects in interpreting it. But it is smaller than 52 bits, so a signed 64 bit integer or double-precision float type are safe for storing this identifier.
, res_retry_after :: Maybe Int -- ^ In case of exceeding flood control, the number of seconds left to wait before the request can be repeated
} deriving (Show, Generic)
instance FromJSON ResponseParameters where
parseJSON = parseJsonDrop 4
-- | This object represents 'getMe' response
type GetMeResponse = Response User
-- | This object represents message response
type MessageResponse = Response Message
-- | This object represents 'sendChatAction' response
type ChatActionResponse = Response Bool
-- | This object represents 'getUpdates' response
type UpdatesResponse = Response [Update]
-- | This object represents file response
type FileResponse = Response File
-- | This object represents user profile photos response
type UserProfilePhotosResponse = Response UserProfilePhotos
-- | This object represents 'setWebhook' response
type SetWebhookResponse = Response Bool
-- | This object represents 'answerInlineQuery' response
type InlineQueryResponse = Response Bool
-- | This object represents 'answerCallbackQuery' response
type CallbackQueryResponse = Response Bool
-- | This object represents 'kickChatMember' response
type KickChatMemberResponse = Response Bool
type LeaveChatResponse = Response Bool
-- | This object represents 'unbanChatMember' response
type UnbanChatMemberResponse = Response Bool
type GetChatResponse = Response Chat
type GetChatAdministratorsResponse = Response [ChatMember]
type GetChatMembersCountResponse = Response Int
type GetChatMemberResponse = Response ChatMember
type GetWebhookInfoResponse = Response WebhookInfo
| cblp/haskell-telegram-api | src/Web/Telegram/API/Bot/Responses.hs | bsd-3-clause | 3,059 | 0 | 9 | 587 | 373 | 229 | 144 | 55 | 0 |
module Graphics.UI.WX.Turtle.Move(
-- * types
Field,
Coordinates(..),
-- * process Field
openField,
closeField,
waitField,
topleft,
center,
coordinates,
fieldSize,
-- * draws
forkField,
flushField,
clearLayer,
clearCharacter,
moveTurtle,
-- * event
oninputtext,
onclick,
onrelease,
ondrag,
onmotion,
onkeypress,
ontimer,
addLayer,
addCharacter
) where
import Graphics.UI.WX.Turtle.State(TurtleState(..), makeShape)
import Graphics.UI.WX.Turtle.Field(
Field, Layer, Character, Coordinates(..),
openField, closeField, waitField, coordinates, topleft, center,
fieldSize, forkField, flushField, clearLayer,
clearCharacter, addLayer, addCharacter,
oninputtext, onclick, onrelease, ondrag, onmotion, onkeypress, ontimer,
fieldColor, drawLine, fillRectangle, fillPolygon, writeString,
drawImage, undoField, undoLayer, drawCharacter, drawCharacterAndLine)
import Text.XML.YJSVG(SVG(..), Position(..), Font(..), FontWeight(..))
import qualified Text.XML.YJSVG as S(topleft)
import Control.Concurrent(threadDelay)
import Control.Monad(when, unless, forM_)
import Control.Monad.Tools(unlessM)
import Data.Maybe(isJust)
--------------------------------------------------------------------------------
moveTurtle :: Field -> Character -> Layer -> TurtleState -> TurtleState -> IO ()
moveTurtle _ _ _ _ TurtleState{sleep = Just t} = threadDelay $ 1000 * t
moveTurtle f _ _ _ TurtleState{flush = True} = flushField f True $ return ()
moveTurtle f c l t0 t1 = do
(w, h) <- fieldSize f
when (undo t1) $ fl $ do
when (clear t0) redraw
when (isJust $ draw t0) $ do
-- unlessM (undoLayer l) $ clearLayer l >> redraw
undoField f
when (visible t1) $ drawTtl (direction t0) $ position t0
when (visible t1) $ do
forM_ (directions t0 t1) $ \dir -> fl $
drawTtl dir (position t0) >> threadDelay (interval t0)
forM_ (positions w h t0 t1) $ \p -> fl $
drawTtl (direction t1) p >> threadDelay (interval t0)
fl $ drawTtl (direction t1) $ position t1
when (visible t0 && not (visible t1)) $ fl $ clearCharacter c
when (clear t1) $ fl $ clearLayer l
unless (undo t1) $ fl $ maybe (return ()) (drawSVG f l) (draw t1)
where
fl = flushField f $ stepbystep t0
redraw = mapM_ (drawSVG f l) $ reverse $ drawed t1
drawTtl dir pos = drawTurtle f c t1 dir pos begin
begin | undo t1 && pendown t0 = Just $ position t1
| pendown t1 = Just $ position t0
| otherwise = Nothing
drawSVG :: Field -> Layer -> SVG -> IO ()
drawSVG f l (Line p0 p1 clr lw) = drawLine f l lw clr p0 p1
drawSVG f l (Rect pos w h 0 fc _) = fillRectangle f l pos w h fc
drawSVG f l (Polyline ps fc lc lw) = fillPolygon f l ps fc lc lw
drawSVG f l (Fill clr) = fieldColor f l clr
drawSVG f l (Text pos sz clr fnt str) = writeString f l fnt sz clr pos str
drawSVG f l (Image pos w h fp) = drawImage f l fp pos w h
drawSVG _ _ _ = error "not implemented"
positions :: Double -> Double -> TurtleState -> TurtleState -> [Position]
positions w h t0 t1 =
maybe [] (mkPositions w h (position t0) (position t1)) $ positionStep t0
mkPositions :: Double -> Double -> Position -> Position -> Double -> [Position]
mkPositions w h p1 p2 step = case (p1, p2) of
(Center x0 y0, Center x1 y1) -> map (uncurry Center) $ mp x0 y0 x1 y1
(TopLeft x0 y0, TopLeft x1 y1) -> map (uncurry TopLeft) $ mp x0 y0 x1 y1
_ -> mkPositions w h (S.topleft w h p1) (S.topleft w h p2) step
where
mp x0 y0 x1 y1 = let dist = ((x1 - x0) ** 2 + (y1 - y0) ** 2) ** (1 / 2)
in take (floor $ dist / step) $ zip
[x0, x0 + step * (x1 - x0) / dist .. ]
[y0, y0 + step * (y1 - y0) / dist .. ]
directions :: TurtleState -> TurtleState -> [Double]
directions t0 t1 = case directionStep t0 of
Nothing -> []
Just step -> [ds, ds + dd .. de - dd]
where
dd = if de > ds then step else - step
ds = direction t0
de = direction t1
drawTurtle :: Field -> Character -> TurtleState -> Double -> Position ->
Maybe Position -> IO ()
drawTurtle f c ts@TurtleState{fillcolor = fclr, pencolor = clr} dir pos = maybe
(drawCharacter f c fclr clr (makeShape ts dir pos) (pensize ts))
(drawCharacterAndLine f c fclr clr (makeShape ts dir pos) (pensize ts) pos)
| YoshikuniJujo/wxturtle | src/Graphics/UI/WX/Turtle/Move.hs | bsd-3-clause | 4,135 | 76 | 17 | 816 | 1,906 | 993 | 913 | 96 | 3 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeOperators #-}
module Data.FAlgebra.Hole
( module Data.FAlgebra.Base
, HoleF(..)
, HoleM(..)
) where
import Data.FAlgebra.Base
import Data.Functor
-- |Transform a functor to allow holes of type a
data HoleF a f r = Hole a | Full (f r)
deriving (Eq, Show, Functor)
instance Natural f f' => Natural f (HoleF a f') where
nat = Full . nat
instance RestrictedNatural s f f' => RestrictedNatural s f (HoleF a f') where
rnat s = Full . rnat s
-- |Hole filling structure
-- This isn't quite the dual of what I have for annotations
-- But it definitely makes more sense here so maybe I was wrong
-- with annotations.
-- Note that when a is an f-coalgebra we can make a -> f r
-- from a -> t through a -> f a -> f r
newtype HoleM a f r = HoleM { runHoleM :: a -> f r }
instance (Functor f, Conatural f f') => RestrictedConatural (HoleM a f) f (HoleF a f') where
rconat (HoleM fillHole) (Hole a) = fillHole a
rconat (HoleM fillHole) (Full bs) = conat bs
instance (Functor f, RestrictedConatural s f f') => RestrictedConatural (s :*: HoleM a f) f (HoleF a f') where
rconat (s :*: HoleM fillHole) (Hole a) = fillHole a
rconat (s :*: _) (Full bs) = rconat s bs
-- Does this instance make sense?
instance Functor f => Preserving (HoleM a f) (HoleF a f) where
trans (HoleM fillHole) = HoleM (\a -> const (Hole a) <$> fillHole a)
| bhamrick/fixalgs | Data/FAlgebra/Hole.hs | bsd-3-clause | 1,496 | 0 | 12 | 332 | 473 | 254 | 219 | 25 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
-- |
-- Module : Travis.Meta
-- Description : Travis preprocessor
-- Copyright : (c) Oleg Grenrus, 2005
-- License : BSD3
-- Maintainer : Oleg Grenrus <[email protected]>
module Travis.Meta (
-- * High level
preprocessIO
, preprocess
, preprocessYaml
-- * Internal
, Env
, parseEnv
, interpolateEnv
, unlinesShell
, shellScripts
, languageTemplates
, encode'
) where
import Control.Category hiding ((.))
import Control.Arrow (second)
import Control.Lens hiding ((.=))
import Control.Monad hiding (sequence)
import Data.Aeson.Lens
import Data.Aeson.Merge
import Data.Aeson.Types
import Data.ByteString as BS
import Data.Char
import Data.FileEmbed
import Data.Foldable
import Data.Function (on)
import Data.List as L (map, elemIndex, filter)
import Data.Maybe
import Data.Monoid
import Data.String
import Data.Text as T
import Data.Traversable
import Data.Vector.Lens (vector)
import Data.Version
import Data.Yaml
import Data.Yaml.Pretty
import Prelude hiding (sequence)
import Text.Regex.Applicative.Text as RE
import qualified Paths_travis_meta_yaml as Meta
type Env = [(Text, Text)]
-- | Parse environment string.
--
-- > >>> parseEnv "CABALVER=1.18 GHCVER=7.8.4"
-- > Right [("CABALVER","1.18"),("GHCVER","7.8.4")]
parseEnv :: Text -> Either String Env
parseEnv = traverse (f . T.splitOn "=") . T.words
where f [k, n] = Right (k, n)
f _ = Left "Cannot parse"
-- > match (interpolationRe $ flip lookup [("foo", "bar")]) "$foo"
-- Just (Just "bar")
interpolationRe :: (Text -> Maybe Text) -> RE' (Maybe Text)
interpolationRe l = comb <$> many (interpolationChar l)
where comb :: [Maybe Text] -> Maybe Text
comb = fmap T.concat . sequence
isVarChar :: Char -> Bool
isVarChar = (||) <$> isAlpha <*> (=='_')
interpolationChar :: (Text -> Maybe Text) -> RE' (Maybe Text)
interpolationChar l = var <|> other
where var = l . T.pack <$ sym '$' <*> many (psym isVarChar)
other = Just . T.singleton <$> anySym
-- | Interpolate env. Substitute all @$VAR@ occurrences with values from 'Env'.
-- If variable is not in the environment, return 'Nothing'.
--
-- > >>> interpolateEnv [("FOO", "foo")] "res-$FOO-bar"
-- > Just "res-foo-bar"
--
-- > >>> interpolateEnv [("FOO","foo")] "yes-$FOOBAR-$FOO"
-- > Nothing
interpolateEnv :: Env -> Text -> Maybe Text
interpolateEnv env = join . match (interpolationRe l)
where l = flip lookup env
-- | Like 'interpolateEnv' but substitue non-existing variables with empty string.
--
-- > >>> interpolateEnv [("FOO","foo")] "yes-$FOOBAR-$FOO"
-- > "yes--foo"
interpolateEnv' :: Env -> Text -> Text
interpolateEnv' env = RE.replace (f <$ sym '$' <*> many (psym isVarChar))
where f :: String -> Text
f = fromMaybe "" . flip lookup env . T.pack
preprocessYaml :: Value -> Either String Value
preprocessYaml = preprocessYaml' . processMeta . processLanguage
processMeta :: Value -> Value
processMeta v = v'''
where
v' = v & _Object . at "meta" .~ Nothing
v'' = case (v ^? key "meta" . key "pre") of
Just meta -> merge meta v'
Nothing -> v'
v''' = case (v ^? key "meta" . key "post") of
Just meta -> merge v'' meta
Nothing -> v''
preprocessYaml' :: Value -> Either String Value
preprocessYaml' v = do
assertNoMatrixInclude v
matrixInclude <- buildMatrixInclude v
let v' = v & deep _String %~ embedShellScripts
& _Object . at "env" .~ Nothing
& _Object . at "addons" .~ Nothing
& _Object . at "compiler" .~ Nothing
& _Object . at "meta" .~ Nothing
& _Object . at "matrix" ?~ (fromMaybe (Object mempty) (v ^? key "matrix"))
& key "matrix" . _Object . at "include" ?~ matrixInclude
return v'
processLanguage :: Value -> Value
processLanguage v =
case (v ^? key "language" . _String) >>= flip lookup languageTemplates of
Just template -> merge (v & _Object . at "language" .~ Nothing) template
Nothing -> v
buildMatrixInclude :: Value -> Either String Value
buildMatrixInclude v = toJSON <$> mk `traverse` envs
where addons = v ^? key "addons"
compiler = v ^? key "compiler" . _String
envs = v ^.. key "env" . values . _String
mk env = do env' <- parseEnv env
let interpolate = traverseOf _String (interpolateEnv env')
addons' = addons & _Just . key "apt" . key "packages" . _Array . from vector %~ mapMaybe interpolate
compiler' = compiler & _Just %~ T.strip . interpolateEnv' env'
return $ object $ catMaybes
[ Just $ "env" .= env
, ("addons" .=) <$> addons'
, ("compiler" .=) <$> compiler'
]
assertNoMatrixInclude :: Value -> Either String ()
assertNoMatrixInclude v =
case v ^? key "matrix" . key "include" of
Nothing -> Right ()
Just v' -> Left $ "matrix.include specified: " ++ show v'
header :: ByteString
header = "# This file has been generated by travis-meta-yaml " <> fromString (showVersion Meta.version) <> "\n# see https://github.com/phadej/travis-meta-yaml\n"
preprocess :: ByteString -> Either String ByteString
preprocess = fmap ((header <>) . encode') . preprocessYaml <=< decodeEither
preprocessIO :: FilePath -> FilePath -> IO ()
preprocessIO source target = do
contents <- BS.readFile source
case preprocess contents of
Left err -> error err
Right bs -> BS.writeFile target bs
-- | name and contents pairs
shellScripts :: [(Text, Text)]
shellScripts =
[ ("multi-ghc-install.sh", $(embedStringFile "data/multi-ghc-install.sh"))
]
languageTemplates :: [(Text, Value)]
languageTemplates =
[ t "haskell-stack" $(embedFile "data/stack.yml")
, t "haskell-multi-ghc" $(embedFile "data/multi-ghc.yml")
]
where t name bs = (name, fromJust' (T.unpack name) $ decode bs)
embedShellScripts :: Text -> Text
embedShellScripts = appEndo $ foldMap (Endo . uncurry embedShellFile . second unlinesShell) shellScripts
fromJust' :: String -> Maybe a -> a
fromJust' _ (Just x) = x
fromJust' e Nothing = error $ "fromJust: Nothing -- " <> e
embedShellFile :: Text -> Text -> Text -> Text
embedShellFile filename contents =
RE.replace (contents <$ string "sh" <* some (sym ' ') <* string filename)
unlinesShell :: Text -> Text
unlinesShell = T.lines >>>
L.map (strip . stripComments) >>>
L.filter (not . T.null) >>>
L.map (fixSemiColonThenElse . (<> ";")) >>>
T.intercalate " "
where stripComments = RE.replace ("" <$ sym '#' <* many anySym)
fixSemiColonThenElse = RE.replace ((string "then" <|> string "else") <* sym ';')
-- Right v <- decodeEither <$> BS.readFile ".travis.meta.yml" :: IO (Either String Value)
-- BS.putStr $ encode $ preprocessYaml v
-- Nothing is smaller then Just
-- We also swap params.
elemIndexE :: Eq a => [a] -> a -> Either Int ()
elemIndexE l e = maybe (Right ()) Left (L.elemIndex e l)
listCompare :: Eq a => [a] -> a -> a -> Ordering
listCompare l = compare `on` elemIndexE l
preferredOrder :: [Text]
preferredOrder = ["sudo", "language", "before_install", "install", "script", "matrix"]
encode' :: ToJSON a => a -> ByteString
encode' = encodePretty cfg
where cfg = setConfCompare (listCompare preferredOrder) defConfig
| phadej/travis-meta-yaml | src/Travis/Meta.hs | bsd-3-clause | 7,438 | 0 | 31 | 1,683 | 2,157 | 1,132 | 1,025 | -1 | -1 |
-- | Helper for writing tests for numeric code
module Test.QuickCheck.Numeric (
-- * Approximate equality
eq
, eqC
-- * Function monotonicity
, Monotonicity(..)
, monotonicFunction
, monotonicFunctionIEEE
-- * Inverse function
, checkInverse
, checkInverse2
) where
import Data.Complex
import qualified Numeric.IEEE as IEEE
----------------------------------------------------------------
-- Approximate equality
----------------------------------------------------------------
-- | Approximate equality for 'Double'. Doesn't work well for numbers
-- which are almost zero.
eq :: Double -- ^ Relative error
-> Double -> Double -> Bool
eq eps a b
| a == 0 && b == 0 = True
| otherwise = abs (a - b) <= eps * max (abs a) (abs b)
-- | Approximate equality for 'Complex Double'
eqC :: Double -- ^ Relative error
-> Complex Double
-> Complex Double
-> Bool
eqC eps a@(ar :+ ai) b@(br :+ bi)
| a == 0 && b == 0 = True
| otherwise = abs (ar - br) <= eps * d
&& abs (ai - bi) <= eps * d
where
d = max (realPart $ abs a) (realPart $ abs b)
----------------------------------------------------------------
-- Function monotonicity
----------------------------------------------------------------
-- | Function monotonicity type.
data Monotonicity
= StrictInc -- ^ Strictly increasing function
| MonotoneInc -- ^ Monotonically increasing function
| StrictDec -- ^ Strictly decreasing function
| MonotoneDec -- ^ Monotonically decreasing function
deriving (Show,Eq,Ord)
-- | Check that function is nondecreasing. For floating point number
-- it may give spurious failures so 'monotonicFunction'
-- should be used in this case.
monotonicFunction :: (Ord a, Ord b) => Monotonicity -> (a -> b) -> a -> a -> Bool
monotonicFunction cmp f x1 x2
= f (min x1 x2) `op` f (max x1 x2)
where
op = case cmp of
StrictInc -> (< )
MonotoneInc -> (<=)
StrictDec -> (> )
MonotoneDec -> (>=)
-- | Check that function is nondecreasing taking rounding errors into
-- account. This function makes no distinction between strictly
-- increasing function and monotonically increasing function since
-- distinction is pointless for floating point.
--
-- In fact funstion is allowed to decrease less than one ulp in order
-- to guard againist problems with excess precision. On x86 FPU works
-- with 80-bit numbers but doubles are 64-bit so rounding happens
-- whenever values are moved from registers to memory
monotonicFunctionIEEE :: (Ord a, IEEE.IEEE b) => Monotonicity -> (a -> b) -> a -> a -> Bool
monotonicFunctionIEEE cmp f x1 x2
= y1 `op` y2
|| abs (y1 - y2) < abs (y2 * IEEE.epsilon)
where
y1 = f (min x1 x2)
y2 = f (max x1 x2)
op = case cmp of
StrictInc -> (<=)
MonotoneInc -> (<=)
StrictDec -> (>=)
MonotoneDec -> (>=)
----------------------------------------------------------------
-- Function and its inverse
----------------------------------------------------------------
-- | Check that function is inverse. Breaks down near zero.
checkInverse
:: (Double -> Double) -- ^ Function @f(x)@
-> (Double -> Double) -- ^ Inverse function @g@, @g(f(x)) = x@
-> (Double -> Double) -- ^ Derivative of function @f(x)@
-> Double -- ^ Relative error for
-- @f(x)@. Usually is machine epsilon.
-> Double -- ^ Relative error for inverse function
-- @g(x)@. Usually is machine epsilon.
-> Double -> Bool
checkInverse f invF f' eps eps' x
= x ~= invF y
where
(~=) = eq (eps' + abs (y / f' x * eps))
y = f x
-- | Check that function is inverse. Breaks down near zero.
checkInverse2
:: (Double -> Double) -- ^ Function @f(x)@
-> (Double -> Double) -- ^ Inverse function @g@, @g(f(x)) = x@
-> (Double -> Double) -- ^ Derivative of function @g(x)@
-> Double -- ^ Relative error for
-- @f(x)@. Usually is machine epsilon.
-> Double -- ^ Relative error for inverse function
-- @g(x)@. Usually is machine epsilon.
-> Double -> Bool
checkInverse2 f invF invF' eps eps' x
= x ~= invF y
where
(~=) = eq (eps' + abs (y * (invF' y * eps)))
y = f x
| Shimuuar/quickcheck-numeric | Test/QuickCheck/Numeric.hs | bsd-3-clause | 4,416 | 0 | 15 | 1,182 | 913 | 510 | 403 | 71 | 4 |
{-# LANGUAGE GADTs, RankNTypes, TupleSections #-}
module QnA4
where
data Q1i
data Q1o
data Q2i
data Q2o
data Q3i
data Q3o
data Q4i
data Q4o
data Graph s a s' where
Graph :: (s -> a) -> (s -> a -> s') -> Graph s a s'
runGraph :: Graph s a s' -> s -> (a, s')
runGraph (Graph f q) s = (f s, q s (f s))
(~:>) :: Graph s a s' -> Graph s' a' s'' -> Graph s a' s''
Graph f1 q1 ~:> Graph f2 q2 = Graph (\s -> f2 $ q1 s (f1 s)) (\s a' -> q2 (q1 s (f1 s)) a')
n1 :: Graph () Int String
n1 = Graph (const 42) (const . show)
n2 :: Graph String Bool Int
n2 = Graph ("42"==) (\s b -> if b then length s else -1 * length s)
n3 :: Graph () Bool Int
n3 = n1 ~:> n2
n4 :: Graph () Bool Int
n4 = let (a, s) = runGraph n1 () in undefined
type Input = String
type Prompt = String
type Color = String
data Question a = Question Prompt (Input -> a)
data Link a s s' = Link (Question a) (s -> a -> s')
data Edge sf where
Edge :: Link a s s' -> (s' -> a -> Edge sf) -> Edge sf
Final :: Link a s s' -> (s' -> a -> sf) -> Edge sf
doYouKnowYourSizeQ :: Question Bool
doYouKnowYourSizeQ = Question "Do you know your size?" read
whatIsYourSizeQ :: Question Int
whatIsYourSizeQ = Question "What is your size?" read
whatIsYourWeightQ :: Question Int
whatIsYourWeightQ = Question "What is your weight?" read
whatIsYourHeightQ :: Question Int
whatIsYourHeightQ = Question "What is your height?" read
whatIsYourFavColorQ :: Question Color
whatIsYourFavColorQ = Question "What is your fav color?" id
l5 :: Link Color (Bool, Int) (Bool, Int, Color)
l5 = Link whatIsYourFavColorQ (\(b, i) c -> (b, i, c))
l1 :: Link Bool () Bool
l1 = Link doYouKnowYourSizeQ (const id)
l2 :: Link Int Bool (Bool, Int)
l2 = Link whatIsYourSizeQ (\ b s -> (b, s))
l3 :: Link Int Bool (Bool, Int)
l3 = Link whatIsYourWeightQ (,)
l4 :: Link Int (Bool, Int) (Bool, Int)
l4 = Link whatIsYourHeightQ (\ (b, w) h -> (b, w * h))
e1 = Edge l1 (const $ \ b -> if b then e2 else e3)
e2 = Edge l2 (const $ const ef)
e3 = Edge l3 (const $ const e4)
e4 = Edge l4 (const $ const ef)
ef = Final l5 const
| homam/fsm-conversational-ui | src/QnA4.hs | bsd-3-clause | 2,065 | 0 | 12 | 468 | 973 | 524 | 449 | -1 | -1 |
{-# LANGUAGE NoImplicitPrelude #-}
-------------------------------------------------------------------
-- |
-- Module : Irreverent.Bitucket.Core.Data.Pipelines.SSHKeyPair
-- Copyright : (C) 2017 - 2018 Irreverent Pixel Feats
-- License : BSD-style (see the file /LICENSE.md)
-- Maintainer : Dom De Re
--
-------------------------------------------------------------------
module Irreverent.Bitbucket.Core.Data.Pipelines.SSHKeyPair (
-- * Types
PipelinesSSHKeyPair(..)
) where
import Irreverent.Bitbucket.Core.Data.Common (
PublicSSHKey(..)
)
import Preamble
data PipelinesSSHKeyPair = PipelinesSSHKeyPair {
pskpPublicKey :: !PublicSSHKey
} deriving (Show, Eq)
| irreverent-pixel-feats/bitbucket | bitbucket-core/src/Irreverent/Bitbucket/Core/Data/Pipelines/SSHKeyPair.hs | bsd-3-clause | 700 | 0 | 9 | 105 | 76 | 53 | 23 | 11 | 0 |
module System.IO.Jail.ByteString
( packCString
, packCStringLen
, useAsCString
, useAsCStringLen
, getLine
, getContents
, putStr
, putStrLn
, interact
, readFile
, writeFile
, appendFile
, hGetLine
, hGetContents
, hGet
, hGetNonBlocking
, hPut
, hPutStr
, hPutStrLn
)
where
import Prelude hiding (IO, getLine, getContents, putStr, putStrLn, interact, readFile, writeFile, appendFile)
import System.IO.Jail.Unsafe
import Foreign.C.String
import System.IO (Handle)
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
packCString :: CString -> IO ByteString
packCString c = io (B.packCString c)
packCStringLen :: CStringLen -> IO ByteString
packCStringLen c = io (B.packCStringLen c)
useAsCString :: ByteString -> (CString -> IO a) -> IO a
useAsCString b f =
do r <- mkCallback
io (B.useAsCString b (r . f))
useAsCStringLen :: ByteString -> (CStringLen -> IO a) -> IO a
useAsCStringLen b f =
do r <- mkCallback
io (B.useAsCStringLen b (r . f))
getLine :: IO ByteString
getLine = io B.getLine
getContents :: IO ByteString
getContents = io B.getContents
putStr :: ByteString -> IO ()
putStr b = io (B.putStr b)
putStrLn :: ByteString -> IO ()
putStrLn b = io (B.putStrLn b)
interact :: (ByteString -> ByteString) -> IO ()
interact f = io (B.interact f)
readFile :: FilePath -> IO ByteString
readFile = embedPath "readFile" B.readFile
writeFile :: FilePath -> ByteString -> IO ()
writeFile f b = embedPath "writeFile" (flip B.writeFile b) f
appendFile :: FilePath -> ByteString -> IO ()
appendFile f b = embedPath "appendFile" (flip B.appendFile b) f
hGetLine :: Handle -> IO ByteString
hGetLine = embedHandle "hGetLine" B.hGetLine
hGetContents :: Handle -> IO ByteString
hGetContents = embedHandle "hGetContents" B.hGetContents
hGet :: Handle -> Int -> IO ByteString
hGet h i = embedHandle "hGet" (flip B.hGet i) h
hGetNonBlocking :: Handle -> Int -> IO ByteString
hGetNonBlocking h i = embedHandle "hGetNonBlocking" (flip B.hGetNonBlocking i) h
hPut :: Handle -> ByteString -> IO ()
hPut h b = embedHandle "hPut" (flip B.hPut b) h
hPutStr :: Handle -> ByteString -> IO ()
hPutStr h b = embedHandle "hPutStr" (flip B.hPutStr b) h
hPutStrLn :: Handle -> ByteString -> IO ()
hPutStrLn h b = embedHandle "hPutStrLn" (flip B.hPutStrLn b) h
| sebastiaanvisser/jail | src/System/IO/Jail/ByteString.hs | bsd-3-clause | 2,293 | 0 | 11 | 394 | 860 | 446 | 414 | 68 | 1 |
{-# LANGUAGE FlexibleContexts,
MultiParamTypeClasses,
FlexibleInstances
#-}
module DSL.GraphDSL (
-- Types
Equality (..),
Implication (..),
InEquality (..),
Constraint,
Requirement (..),
Graph,
CLD,
graph,
constraints,
Sign (..),
TimeFrame (..),
-- Graph syntax
GraphSyntax,
mkNode,
(>+>),
(>->),
(>++>),
(>-->),
(>?>),
(>~+>),
(>~->),
(>~++>),
(>~-->),
(>~?>),
link,
constrain,
-- Compilation
compile,
compileGraph,
compileConstraints,
ignoreTime,
-- Printing and visualization
prettify,
prettyPrint,
preview
) where
-- | Imports
import Control.Monad.State
import DSL.ConstraintDSL
import Data.Graph.Inductive as G hiding (mkNode, Graph)
import Data.GraphViz hiding (Graph)
import qualified Data.Text.Lazy as Txt hiding (head)
-- | Gives an identifier for a node
type Name = Node
-- | Gives the sign of influence {+, ++, +?, -, --, -?, 0, ?}
data Sign = P | PP | PQ | M | MM | MQ | Z | Q deriving (Ord, Eq)
-- | Convert a sign to a string
instance Show Sign where
show P = "+"
show PP = "++"
show PQ = "+?"
show M = "-"
show MM = "--"
show MQ = "-?"
show Z = "0"
show Q = "?"
-- | Label a sign
instance Labellable Sign where
toLabelValue = textLabelValue . Txt.pack . show
-- | Label a node
instance Labellable (String, Maybe Node) where
toLabelValue (s, _) = textLabelValue . Txt.pack $ s
-- | A type for requirements
data Requirement = S Sign | ST (Sign, TimeFrame) deriving (Show)
-- | We need to be able to lift Signs into Requirements
instance Lifts Sign Requirement where
lift = S
-- | We need to be able to lift pairs of Signs in to Requirements
instance Lifts (Sign, TimeFrame) Requirement where
lift = ST
-- | A type for constraints
type Constraint = ConstraintType Name Requirement
-- | A Time interval
data TimeFrame = Im | Future deriving (Ord, Eq, Show)
-- | Graphs
type Graph = Gr (String, Maybe Node) Sign
-- | A CLD is the graph, the signs associated with the edges and the constraints (and the names of the nodes)
data CLDG n e = CLDG {graph :: Gr n e, constraints :: [Constraint]} deriving (Show)
-- | A CLD is a specialiced general cld.......
type CLD = CLDG (String, Maybe Node) Sign
-- | The monadic syntax of graphs
type GraphSyntax a = State (CLDG String (Sign, TimeFrame)) a
-- fixity
infixl >+>
infixl >++>
infixl >->
infixl >-->
infixl >?>
infixl >~+>
infixl >~++>
infixl >~->
infixl >~-->
infixl >~?>
-- | Create a new node
mkNode :: String -> GraphSyntax Name
mkNode s = do
cld <- get
let gr = graph cld
let i = head $ newNodes 1 gr
put cld { graph = insNode (i, s) gr }
return i
-- | Create a new edge
(>+>) = makeEdge P Im
(>->) = makeEdge M Im
(>?>) = makeEdge Q Im
(>++>) = makeEdge PP Im
(>-->) = makeEdge MM Im
(>~+>) = makeEdge P Future
(>~->) = makeEdge M Future
(>~?>) = makeEdge Q Future
(>~++>) = makeEdge PP Future
(>~-->) = makeEdge MM Future
-- | Factor out the commonality in >x>
makeEdge :: Sign -> TimeFrame -> GraphSyntax Name -> Name -> GraphSyntax Name
makeEdge s t g w = do
v <- g
cld <- get
put $ cld { graph = insEdge (v, w, (s, t)) (graph cld) }
return w
-- | Add a constraint
constrain :: (IsConstraint Name Requirement c) => c -> GraphSyntax ()
constrain c = do
cld <- get
put $ cld { constraints = toConstraint c : constraints cld }
-- | Syntactic sugar
link :: a -> GraphSyntax a
link = return
-- | The initial state
initialState :: CLDG String (Sign, TimeFrame)
initialState = CLDG G.empty []
-- | Compile the graph
compile :: GraphSyntax a -> CLD
compile gs = CLDG (nfilter (\n -> 0 /= (length (neighbors g n))) g) constrs
where
-- General CLD
cldg = execState gs initialState
-- Posed constraints
constr = constraints cldg
-- compute temporal constraints
listConstrTemporal = [ (n, (s, t)) | (Equality n (ST (s, t))) <- constr]
constrs = filter removeTemporal constr ++
[
if futureEdge then
toConstraint $ (n+(if t == Future then minNode else 0)) := s
else
toConstraint $ n := s
| (n, (s, t)) <- listConstrTemporal
]
removeTemporal (Equality _ (ST _)) = False
removeTemporal _ = True
-- The raw CLDG graph
gra = graph cldg
-- The raw CLDG extended with parents, but without time
gra' = nmap (\s -> (s, Nothing)) $ emap fst gra
-- The minimum value for the new node
minNode = head $ newNodes 1 gra
-- Is there an edge to the near future
futureEdge = any (\(_, _, (_, t)) -> t /= Im) $ labEdges gra
newNs = map (\(n, a) -> (n+minNode, (a++"'", Just n))) (labNodes gra)
newEs = map (\(sr, si, (s, t)) -> if t == Im then
(sr+minNode, si+minNode, (s, Im))
else (sr, si+minNode, (s, Im))
) (labEdges gra)
weirdGraph = foldl (flip insEdge) (foldl (flip insNode) (nmap (\s -> (s, Nothing)) gra) newNs) newEs
g = if futureEdge then
emap fst $ delEdges
(map (\(a, b, _) -> (a, b)) $
filter (\(_, _, (x, y)) -> y /= Im) $
labEdges weirdGraph
) weirdGraph
else
gra'
-- | Extract a graph from a syntax
compileGraph :: GraphSyntax a -> Graph
compileGraph = graph . compile
-- | Extract the constraints from a syntax
compileConstraints :: GraphSyntax a -> [Constraint]
compileConstraints = constraints . compile
-- | Ignore time in the CLD
ignoreTime :: GraphSyntax a -> GraphSyntax ()
ignoreTime gs = do
gs
state <- get
put $ state {graph = emap (\(s, _) -> (s, Im)) (graph state)}
| GRACeFUL-project/GraphDSL | src/DSL/GraphDSL.hs | bsd-3-clause | 6,207 | 0 | 17 | 2,032 | 1,846 | 1,054 | 792 | 147 | 6 |
-- | An algorithm for merging users' edits. Specifically, there's just one
-- function – 'merge' – and it simply does a three-way diff.
module Guide.Diff.Merge
(
merge,
)
where
import Imports
import Guide.Diff.Tokenize
import qualified Data.Patch as PV
import qualified Data.Text as T
-- | An implementation of a 3-way diff and merge.
merge
:: Text -- ^ Original text
-> Text -- ^ Variant A (preferred)
-> Text -- ^ Variant B
-> Text -- ^ Merged text
merge (toVector . tokenize -> orig)
(toVector . tokenize -> a)
(toVector . tokenize -> b) =
T.concat . toList $ PV.apply (pa <> pb') orig
where
-- 1. diff
pa = PV.diff orig a
pb = PV.diff orig b
-- 2. merge
(_, pb') = PV.transformWith PV.ours pa pb
| aelve/guide | back/src/Guide/Diff/Merge.hs | bsd-3-clause | 770 | 0 | 8 | 196 | 182 | 106 | 76 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module Database.Esqueleto.Join.TH where
import qualified Data.List as List
import Data.Maybe
import Data.Monoid
import Data.Tagged
import qualified Database.Esqueleto as E
import Language.Haskell.TH
import Language.Haskell.TH.ExpandSyns
import Database.Esqueleto.Join
(<$$>) :: (Functor f, Functor g) => (a -> b) -> f (g a) -> f (g b)
f <$$> a = (f <$>) <$> a
mkJoins :: Q [Dec]
mkJoins = fmap concat . mapM mkInstance . findPairings =<< mapM pluck =<< entityFieldInstances
data Entity
= Entity
{ eEntityType :: Tagged "Entity" Type -- ^ Type like @Student@
, eFields :: [EntityField] -- ^ Foreign key fields like @StudentTeacherId@
} deriving (Eq, Show)
pluck :: Tagged "EntityField" Dec -> Q Entity
pluck dec = Entity (entityType dec) . catMaybes <$> mapM fieldKeyConstructors (entityFieldConstructors dec)
pairs :: [a] -> [(a, a)]
pairs xs = (,) <$> xs <*> xs
data InstanceEntity -- | One piece of @FieldPair@ declaration like a @Student@ in @Student@-@Teacher@ pair
= InstanceEntity
{ ieEntityType :: Tagged "Entity" Type
, ieFieldConstructor :: Tagged "FieldConstructor" Con
, ieMaybeCon :: MaybeCon
} deriving (Eq, Show)
data Pair
= Pair
{ left :: InstanceEntity
, right :: InstanceEntity
, joinType :: Tagged "JoinType" Type -- ^ The type that these two entities can join on like @TeacherId@ in a @Student@-@Teacher@ pair
} deriving (Eq, Show)
-- | Find pairs of entities with a unique way of joining
findPairings :: [Entity] -> [Pair]
findPairings xs =
symmetrize . catMaybes $ uncurry handlePair <$> pairs xs
where
symmetrize x = List.nub $ (swap <$> x) <> x -- Make sure we can join in either order
where
swap Pair{..} = Pair right left joinType
handlePair lEnt rEnt =
case (cons (eEntityType lEnt) (eFields lEnt), cons (eEntityType lEnt) (eFields rEnt)) of
([(lFC, lMC)], [(rFC, rMC)])
| not (lMC == Present && rMC == Present) -- It doesn't make much sense for the primary key to be nullable
-> Just (Pair (InstanceEntity (eEntityType lEnt) lFC lMC) (InstanceEntity (eEntityType rEnt) rFC rMC) (Tagged . AppT (ConT ''E.Key) . unTagged . eEntityType $ lEnt))
_ -> Nothing
where
cons :: Tagged "Entity" Type -> [EntityField] -> [(Tagged "FieldConstructor" Con, MaybeCon)]
cons t =
map (\EntityField{..} -> (efFieldConstructor, efMaybeCon)) .
filter ((== unTagged t) . unTagged . efFieldOutType)
mkInstance :: Pair -> Q [Dec]
mkInstance Pair{..} =
[d|
instance FieldPair $(spliceTCon left) $(spliceTCon right) $(spliceMaybeCon left) $(spliceMaybeCon right) where
type JoinKey $(spliceTCon left) $(spliceTCon right) = $(pure . unTagged $ joinType)
pair =
( ($(singlize . ieMaybeCon $ left), $(spliceCon left))
, ($(singlize . ieMaybeCon $ right), $(spliceCon right))
)
|]
where
promote Present = PromotedT 'Present
promote Absent = PromotedT 'Absent
singlize Present = [|SPresent|]
singlize Absent = [|SAbsent|]
spliceMaybeCon = pure . promote . ieMaybeCon
spliceTCon = pure . unTagged . ieEntityType
spliceCon = mkCon . unTagged . ieFieldConstructor
mkCon (NormalC name _) = conE name
mkCon _ = error "Field key doesn't use a normal constructor"
entityFieldInstances :: Q [Tagged "EntityField" Dec]
entityFieldInstances = do
FamilyI _ instances <- reify ''E.EntityField
pure $ Tagged <$> instances
entityType :: Tagged "EntityField" Dec -> Tagged "Entity" Type
entityType (Tagged (DataInstD _ _ [ty, _] _ _)) = Tagged ty
entityType _ = error "`EntityField` not returning `DataInstD`"
entityFieldConstructors :: Tagged "EntityField" Dec -> [Tagged "ForAllFieldConstructor" Con]
entityFieldConstructors (Tagged (DataInstD _ _ _ cons _)) = Tagged <$> cons
entityFieldConstructors _ = error "`EntityField` not returning `DataInstD`"
data EntityField
= EntityField
{ efFieldOutType :: Tagged "FieldOutType" Type -- ^ In a field like @StudentTeacherId@, the @FieldOutType@ is @Teacher@
, efFieldConstructor :: Tagged "FieldConstructor" Con -- ^ A constructor like @StudentTeacherId@
, efMaybeCon :: MaybeCon -- ^ Does the @FieldConstructor@ return a type like @Maybe TeacherId@ or just @TeacherId@?
} deriving (Eq, Show)
fieldKeyConstructors :: Tagged "ForAllFieldConstructor" Con -> Q (Maybe EntityField)
fieldKeyConstructors (Tagged con) =
case con of
(ForallC [] [AppT _equalityT ty] con') ->
(uncurry (mkEntityField con') <$$>) . expandSyns' . extractEntityType =<< expandSyns ty
_ -> pure Nothing
where
mkEntityField (Tagged -> efFieldConstructor) (Tagged -> efFieldOutType) efMaybeCon = EntityField{..}
expandSyns' (Just (ty, con')) = Just . (, con') <$> expandSyns ty
expandSyns' Nothing = pure Nothing
extractEntityType (AppT (ConT k) ty)
| k == ''E.Key = Just (ty, Absent)
extractEntityType (AppT (ConT m) (AppT (ConT k) ty))
| m == ''Maybe && k == ''E.Key = Just (ty, Present)
extractEntityType _ = Nothing
| pseudonom/dovetail | src/Database/Esqueleto/Join/TH.hs | bsd-3-clause | 5,108 | 0 | 23 | 1,073 | 1,469 | 777 | 692 | -1 | -1 |
module HsenvMonadUtils (runInTmpDir) where
import System.Directory
import Util.IO
import HsenvMonad
runInTmpDir :: Hsenv a -> Hsenv a
runInTmpDir m = do
tmp <- liftIO getTemporaryDirectory
tmpDir <- liftIO $ createTemporaryDirectory tmp "hsenv"
oldCwd <- liftIO getCurrentDirectory
liftIO $ setCurrentDirectory tmpDir
let cleanup = do
liftIO $ setCurrentDirectory oldCwd
liftIO $ removeDirectoryRecursive tmpDir
m `finally` cleanup
| Paczesiowa/hsenv | src/HsenvMonadUtils.hs | bsd-3-clause | 462 | 0 | 13 | 85 | 129 | 61 | 68 | 14 | 1 |
{-# LANGUAGE RecordWildCards #-}
module Anchor.Tokens.Server (
P.version,
startServer,
ServerState(..),
module X,
) where
import Control.Concurrent
import Control.Concurrent.Async
import Data.Pool
import qualified Data.Streaming.Network as N
import Database.PostgreSQL.Simple
import qualified Network.Socket as S
import Network.Wai.Handler.Warp hiding (Connection)
import Pipes.Concurrent
import Servant.Server
import System.Log.Logger
import qualified System.Remote.Monitoring as EKG
import Anchor.Tokens.Server.API as X hiding (logName)
import Anchor.Tokens.Server.Configuration as X
import Anchor.Tokens.Server.Statistics as X
import Anchor.Tokens.Server.Types as X
import Paths_anchor_token_server as P
-- * Server
logName :: String
logName = "Anchor.Tokens.Server"
-- | Start the statistics-reporting thread.
startStatistics
:: ServerOptions
-> Pool Connection
-> GrantCounters
-> IO (Output GrantEvent, IO ())
startStatistics ServerOptions{..} connPool counters = do
debugM logName $ "Starting EKG"
srv <- EKG.forkServer optStatsHost optStatsPort
(output, input, seal) <- spawn' (bounded 50)
registerOAuth2Metrics (EKG.serverMetricStore srv) connPool input counters
let stop = do
debugM logName $ "Stopping EKG"
atomically seal
killThread (EKG.serverThreadId srv)
threadDelay 10000
debugM logName $ "Stopped EKG"
return (output, stop)
startServer
:: ServerOptions
-> IO (IO (Async ()))
startServer serverOpts@ServerOptions{..} = do
debugM logName $ "Opening API Socket"
sock <- N.bindPortTCP optServicePort optServiceHost
let createConn = connectPostgreSQL optDBString
destroyConn conn = close conn
stripes = 1
keep_alive = 10
num_conns = 20
serverPGConnPool <-
createPool createConn destroyConn stripes keep_alive num_conns
counters <- mkGrantCounters
(serverEventSink, serverEventStop) <- startStatistics serverOpts serverPGConnPool counters
let settings = setPort optServicePort $ setHost optServiceHost $ defaultSettings
serverOAuth2Server = anchorOAuth2Server serverPGConnPool serverEventSink
apiSrv <- async $ do
debugM logName $ "Starting API Server"
runSettingsSocket settings sock $ serve anchorOAuth2API (server ServerState{..})
let serverServiceStop = do
debugM logName $ "Closing API Socket"
S.close sock
async $ do
wait apiSrv
debugM logName $ "Stopped API Server"
return $ do
serverEventStop
destroyAllResources serverPGConnPool
serverServiceStop
| zerobuzz/anchor-token-server | lib/Anchor/Tokens/Server.hs | bsd-3-clause | 2,912 | 0 | 16 | 820 | 658 | 341 | 317 | 71 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
module ClientProxyApi where
import System.Random
import Control.Monad.Trans.Except
import Control.Monad.Trans.Resource hiding (register)
import Control.Monad.IO.Class
import Data.Aeson
import Data.Aeson.TH
import Data.Bson.Generic
import GHC.Generics
import Network.Wai hiding(Response)
import Network.Wai.Handler.Warp
import Network.Wai.Logger
import Servant
import Servant.API
import Servant.Client
import System.IO
import System.Directory
import System.Environment (getArgs, getProgName, lookupEnv)
import System.Log.Formatter
import System.Log.Handler (setFormatter)
import System.Log.Handler.Simple
import System.Log.Handler.Syslog
import System.Log.Logger
import Data.Bson.Generic
import qualified Data.List as DL
import Data.Maybe (catMaybes)
import Data.Text (pack, unpack)
import Data.Time.Clock (UTCTime, getCurrentTime)
import Data.Time.Format (defaultTimeLocale, formatTime)
import Control.Monad (when)
import Network.HTTP.Client (newManager, defaultManagerSettings)
import System.Process
import LRUCache as C
data File = File {
fileName :: FilePath,
fileContent :: String
} deriving (Eq, Show, Generic)
instance ToJSON File
instance FromJSON File
data Response = Response{
response :: String
} deriving (Eq, Show, Generic)
instance ToJSON Response
instance FromJSON Response
data User = User{
uusername :: String,
upassword :: String,
timeout :: String,
token :: String
} deriving (Eq, Show, Generic)
instance ToJSON User
instance FromJSON User
instance ToBSON User
instance FromBSON User
data Signin = Signin{
susername :: String,
spassword :: String
} deriving (Eq, Show, Generic)
instance ToJSON Signin
instance FromJSON Signin
instance ToBSON Signin
instance FromBSON Signin
type ApiHandler = ExceptT ServantErr IO
serverport :: String
serverport = "8080"
serverhost :: String
serverhost = "localhost"
type AuthApi =
"signin" :> ReqBody '[JSON] Signin :> Post '[JSON] User :<|>
"register" :> ReqBody '[JSON] Signin :> Post '[JSON] Response :<|>
"isvalid" :> ReqBody '[JSON] User :> Post '[JSON] Response :<|>
"extend" :> ReqBody '[JSON] User :> Post '[JSON] Response
authApi :: Proxy AuthApi
authApi = Proxy
signin :: Signin -> ClientM User
register :: Signin -> ClientM Response
isvalid :: User -> ClientM Response
extend :: User -> ClientM Response
signin :<|> register :<|> isvalid :<|> extend = client authApi
signinQuery :: Signin -> ClientM User
signinQuery signindetails = do
signinquery <- signin signindetails
return signinquery
registerQuery :: Signin -> ClientM Response
registerQuery registerdetails = do
registerquery <- register registerdetails
return registerquery
isvalidQuery :: User -> ClientM Response
isvalidQuery isvaliddetails = do
isvalidquery <- isvalid isvaliddetails
return isvalidquery
extendQuery :: User -> ClientM Response
extendQuery extenddetails = do
extendquery <- extend extenddetails
return extendquery
type DirectoryApi =
"open" :> Capture "fileName" String :> Get '[JSON] File :<|>
"close" :> ReqBody '[JSON] File :> Post '[JSON] Response :<|>
"allfiles" :> Get '[JSON] [String]
directoryApi :: Proxy DirectoryApi
directoryApi = Proxy
open :: String -> ClientM File
close :: File -> ClientM Response
allfiles :: ClientM [String]
open :<|> close :<|> allfiles = client directoryApi
openQuery:: String -> ClientM File
openQuery filename = do
openquery <- open filename
return openquery
closeQuery:: File -> ClientM Response
closeQuery file = do
closequery <- close file
return closequery
type LockingApi =
"lock" :> Capture "fileName" String :> Get '[JSON] Bool :<|>
"unlock" :> Capture "fileName" String :> Get '[JSON] Bool :<|>
"islocked" :> Capture "fileName" String :> Get '[JSON] Bool
lockingApi :: Proxy LockingApi
lockingApi = Proxy
lock :: String -> ClientM Bool
unlock :: String -> ClientM Bool
islocked :: String -> ClientM Bool
lock :<|> unlock :<|> islocked = client lockingApi
lockQuery:: String -> ClientM Bool
lockQuery fName = do
lockquery <- lock fName
return lockquery
unlockQuery:: String -> ClientM Bool
unlockQuery fName = do
unlockquery <- unlock fName
return unlockquery
islockedQuery :: String -> ClientM Bool
islockedQuery fName = do
islockedquery <- islocked fName
return islockedquery
mainClient :: IO()
mainClient = do
createDirectoryIfMissing True ("localstorage/")
setCurrentDirectory ("localstorage/")
authpart
authpart :: IO()
authpart = do
putStrLn $ "Enter one of the following commands: LOGIN/REGISTER"
cmd <- getLine
case cmd of
"LOGIN" -> authlogin
"REGISTER" -> authregister
authlogin :: IO ()
authlogin = do
putStrLn $ "Enter your username:"
username <- getLine
putStrLn $ "Enter your password"
password <- getLine
let user = (Signin username password)
manager <- newManager defaultManagerSettings
res <- runClientM (signinQuery user) (ClientEnv manager (BaseUrl Http "localhost" 8082 ""))
case res of
Left err -> do putStrLn $ "Error: " ++ show err
authpart
Right response -> do cache <- C.newHandle 5
mainloop response cache
authregister :: IO ()
authregister = do
putStrLn $ "Enter your details to make a new account"
putStrLn $ "Enter your username:"
username <- getLine
putStrLn $ "Enter your password"
password <- getLine
let user = (Signin username password)
manager <- newManager defaultManagerSettings
res <- runClientM (registerQuery user) (ClientEnv manager (BaseUrl Http "localhost" 8082 ""))
case res of
Left err -> do putStrLn $ "Error: " ++ show err
authpart
Right response -> authpart
mainloop :: User -> (C.Handle String String) -> IO()
mainloop user cache = do
putStrLn $ "Enter one of the following commands: FILES/UPLOAD/DOWNLOAD/CLOSE"
cmd <- getLine
case cmd of
"FILES" -> displayFiles user cache
"UPLOAD" -> uploadFile user cache
"DOWNLOAD" -> downloadFile user cache
"CLOSE" -> putStrLn $ "Closing service!"
_ -> do putStrLn $ "Invalid Command. Try Again"
mainloop user cache
displayFiles :: User -> (C.Handle String String) -> IO()
displayFiles user cache = do
putStrLn "Fetching file list. Please wait."
isTokenValid user
manager <- newManager defaultManagerSettings
res <- runClientM allfiles (ClientEnv manager (BaseUrl Http "localhost" 7008 ""))
case res of
Left err -> putStrLn $ "Error: " ++ show err
Right response -> do extendToken user
mapM putStrLn response
mainloop user cache
uploadFile :: User -> (C.Handle String String) -> IO()
uploadFile user cache = do
putStrLn "Please enter the name of the file to upload"
fileName <- getLine
let cmd = shell ("vim " ++ fileName)
createProcess_ "vim" cmd
putStrLn $ "Hit enter when youre finished"
enter <- getLine
fileContent <- readFile fileName
let file = File fileName fileContent
response <- putFile file user cache
putStrLn $ "Response: " ++ show response
mainloop user cache
downloadFile :: User -> (C.Handle String String) -> IO()
downloadFile user cache = do
putStrLn "Please enter the name of the file to download"
fileName <- getLine
incache <- C.iolookup cache fileName
case incache of
(Nothing) -> getFile fileName user cache
(Just v) -> do putStrLn $ "Cache hit"
liftIO (writeFile (fileName) v)
let cmd = shell ("vim " ++ fileName)
createProcess_ "vim" cmd
putStrLn $ "Would you like to re-upload this file? y/n"
yesorno <- getLine
putStrLn $ "Are you Sure? y/n"
sure <- getLine
fileContent <- readFile (fileName)
case sure of
("y") -> do let file = File fileName fileContent
putFile file user cache
mainloop user cache
(_) -> mainloop user cache
mainloop user cache
isTokenValid :: User -> IO()
isTokenValid user = do
manager <- newManager defaultManagerSettings
res <- runClientM (isvalidQuery user) (ClientEnv manager (BaseUrl Http "localhost" 8082 ""))
case res of
Left err -> putStrLn $ "Error: " ++ show err
Right responser -> do case (response responser) of
"Token is Valid" -> return()
_ -> do putStrLn $ "Session timeout, returning to login menu"
authpart
extendToken :: User -> IO()
extendToken user = do
manager <- newManager defaultManagerSettings
res <- runClientM (extendQuery user) (ClientEnv manager (BaseUrl Http "localhost" 8082 ""))
case res of
Left err -> putStrLn $ "Error: " ++ show err
Right response -> return()
getFile:: String -> User -> (C.Handle String String) -> IO()
getFile filename user cache = do
isTokenValid user
locksuccess <- lockFile filename
case locksuccess of
True -> do
manager <- newManager defaultManagerSettings
res <- runClientM (openQuery filename) (ClientEnv manager (BaseUrl Http "localhost" 7008 ""))
case res of
Left err -> putStrLn $ "Error: " ++ show err
Right response -> do extendToken user
C.ioinsert cache filename (fileContent response)
liftIO (writeFile (fileName response) (fileContent response))
let cmd = shell ("vim " ++ (fileName response))
createProcess_ "vim" cmd
putStrLn $ "Would you like to re-upload this file? y/n"
yesorno <- getLine
putStrLn $ "Please enter your answer again y/n"
sure <- getLine
case sure of
("y") -> do unlocker <- unlockFile filename
fileContent <- readFile (fileName response)
let file = File filename fileContent
putFile file user cache
mainloop user cache
(_) -> do unlocker <- unlockFile filename
mainloop user cache
False -> putStrLn $ "Unable to lock file " ++ filename ++ ". Perhaps another user is using it."
putFile:: File -> User-> (C.Handle String String) -> IO ()
putFile file user cache = do
isTokenValid user
locksuccess <- lockFile (fileName file)
case locksuccess of
True -> do manager <- newManager defaultManagerSettings
res <- runClientM (closeQuery file) (ClientEnv manager (BaseUrl Http "localhost" 7008 ""))
case res of
Left err -> putStrLn $ "Error: " ++ show err
Right responser -> do extendToken user
unlocksuccess <- unlockFile (fileName file)
case unlocksuccess of
True -> do incache <- C.iolookup cache (fileName file)
case incache of
(Nothing) -> putStrLn $ (response responser)
(Just v) -> C.ioinsert cache (fileName file) (fileContent file)
False -> putStrLn $ "Failed to unlock file possible conflict. Try again soon"
False -> putStrLn $ "Unable to lock file " ++ (fileName file) ++ ". Perhaps another user is using it."
lockFile :: String -> IO Bool
lockFile fName = do
manager <- newManager defaultManagerSettings
res <- runClientM (islockedQuery fName) (ClientEnv manager (BaseUrl Http "localhost" 8000 ""))
case res of
Left err -> do putStrLn $ "Error: " ++ show err
return False
Right responser -> do case responser of
True -> return False
False -> do res <- runClientM (lockQuery fName) (ClientEnv manager (BaseUrl Http "localhost" 8000 ""))
case res of
Left err ->do putStrLn $ "Error: " ++ show err
return False
Right response -> return True
unlockFile :: String -> IO Bool
unlockFile fName = do
manager <- newManager defaultManagerSettings
res <- runClientM (islockedQuery fName) (ClientEnv manager (BaseUrl Http "localhost" 8000 ""))
case res of
Left err -> do putStrLn $ "Error: " ++ show err
return False
Right responser -> do case responser of
False -> return False
True -> do res <- runClientM (unlockQuery fName) (ClientEnv manager (BaseUrl Http "localhost" 8000 ""))
case res of
Left err -> do putStrLn $ "Error: " ++ show err
return False
Right response -> return True
| Garygunn94/DFS | ClientProxy/.stack-work/intero/intero7973HSQ.hs | bsd-3-clause | 14,674 | 29 | 29 | 5,023 | 3,846 | 1,856 | 1,990 | 333 | 5 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
module Halytics.Metric.Statistics where
import Data.Proxy
import GHC.TypeLits
import Halytics.Monitor.Tuple
import qualified Data.Vector.Unboxed as V
import qualified Statistics.Quantile as Stats
import qualified Statistics.Sample as Stats
newtype StoredStats a = StoredStats a
class FromStats a r | a -> r where
func :: Proxy a -> V.Vector Double -> r
instance Collect (StoredStats a) where
type S (StoredStats a) = [Double]
collect _ = flip (:)
instance Default (StoredStats a) where
initial _ = []
instance {-# OVERLAPPABLE #-} (Collect (StoredStats a), FromStats a r)
=> Resultable (StoredStats a) r where
r _ xs = func (Proxy :: Proxy a) (V.fromList xs)
--------------------------------------------------------------------------------
-- From Statistics.Sample
--------------------------------------------------------------------------------
-- Mean
data Mean'
type Mean = StoredStats Mean'
instance FromStats Mean' Double where func _ = Stats.mean
instance Resultable Mean String where
r _ xs = "Mean: " ++ show res
where
res = r (Proxy :: Proxy Mean) xs :: Double
data HarmonicMean'
type HarmonicMean = StoredStats HarmonicMean'
instance FromStats HarmonicMean' Double where func _ = Stats.harmonicMean
instance Resultable HarmonicMean String where
r _ xs = "Harmonic mean: " ++ show res
where
res = r (Proxy :: Proxy HarmonicMean) xs :: Double
data GeometricMean'
type GeometricMean = StoredStats GeometricMean'
instance FromStats GeometricMean' Double where func _ = Stats.geometricMean
instance Resultable GeometricMean String where
r _ xs = "Geometric mean: " ++ show res
where
res = r (Proxy :: Proxy GeometricMean) xs :: Double
--------------------------------------------------------------------------------
-- Central moments
data CentralMoment' :: Nat -> *
type CentralMoment k = StoredStats (CentralMoment' k)
instance (KnownNat k) => FromStats (CentralMoment' k) Double where
func _ = Stats.centralMoment k
where k = fromInteger $ natVal (Proxy :: Proxy k)
instance (KnownNat k) => Resultable (CentralMoment k) String where
r _ xs = show k ++ "th central moment: " ++ show res
where
res = r (Proxy :: Proxy (CentralMoment k)) xs :: Double
k = natVal (Proxy :: Proxy k) :: Integer
data CentralMoments' :: Nat -> Nat -> *
type CentralMoments k j = StoredStats (CentralMoments' k j)
instance (KnownNat k, KnownNat j)
=> FromStats (CentralMoments' k j) (Double, Double) where
func _ = Stats.centralMoments k j
where
k = fromInteger $ natVal (Proxy :: Proxy k)
j = fromInteger $ natVal (Proxy :: Proxy j)
instance (KnownNat k, KnownNat j)
=> Resultable (CentralMoments k j) (String, String) where
r _ xs = (kStr, jStr)
where
kStr = show k ++ "th central moment: " ++ show kRes
jStr = show j ++ "th central moment: " ++ show jRes
kRes = r (Proxy :: Proxy (CentralMoment k)) xs :: Double
jRes = r (Proxy :: Proxy (CentralMoment j)) xs :: Double
k = fromInteger $ natVal (Proxy :: Proxy k) :: Integer
j = fromInteger $ natVal (Proxy :: Proxy j) :: Integer
instance (KnownNat k, KnownNat j)
=> Resultable (CentralMoments k j) String where
r _ xs = kStr ++ ", " ++ jStr
where
(kStr, jStr) = r (Proxy :: Proxy (CentralMoments k j)) xs
--------------------------------------------------------------------------------
-- Curvature and all
data Skewness'
type Skewness = StoredStats Skewness'
instance FromStats Skewness' Double where func _ = Stats.skewness
instance Resultable Skewness String where
r _ xs = "Skewness: " ++ show res
where
res = r (Proxy :: Proxy Skewness) xs :: Double
data Kurtosis'
type Kurtosis = StoredStats Kurtosis'
instance FromStats Kurtosis' Double where func _ = Stats.kurtosis
instance Resultable Kurtosis String where
r _ xs = "Kurtosis: " ++ show res
where
res = r (Proxy :: Proxy Kurtosis) xs :: Double
--------------------------------------------------------------------------------
-- Variance and standard dev
data Variance'
type Variance = StoredStats Variance'
instance FromStats Variance' Double where func _ = Stats.variance
instance Resultable Variance String where
r _ xs = "Variance: " ++ show res
where
res = r (Proxy :: Proxy Variance) xs :: Double
data VarianceUnbiased'
type VarianceUnbiased = StoredStats VarianceUnbiased'
instance FromStats VarianceUnbiased' Double where
func _ = Stats.varianceUnbiased
instance Resultable VarianceUnbiased String where
r _ xs = "Unbiased variance: " ++ show res
where
res = r (Proxy :: Proxy VarianceUnbiased) xs :: Double
data MeanVariance'
type MeanVariance = StoredStats MeanVariance'
instance FromStats MeanVariance' (Double, Double) where
func _ = Stats.meanVariance
instance Resultable MeanVariance (String, String) where
r _ xs = (mStr, vStr)
where
mStr = "Mean: " ++ show m
vStr = "Variance: " ++ show v
(m, v) = r (Proxy :: Proxy MeanVariance) xs :: (Double, Double)
instance Resultable MeanVariance String where
r _ xs = mStr ++ ", " ++ vStr
where
(mStr, vStr) = r (Proxy :: Proxy MeanVariance) xs :: (String, String)
data MeanVarianceUnb'
type MeanVarianceUnb = StoredStats MeanVarianceUnb'
instance FromStats MeanVarianceUnb' (Double, Double) where
func _ = Stats.meanVarianceUnb
instance Resultable MeanVarianceUnb (String, String) where
r _ xs = (mStr, vStr)
where
mStr = "Unbiased mean: " ++ show m
vStr = "Unbiased variance: " ++ show v
(m, v) = r (Proxy :: Proxy MeanVarianceUnb) xs :: (Double, Double)
instance Resultable MeanVarianceUnb String where
r _ xs = mStr ++ ", " ++ vStr
where
(mStr, vStr) = r (Proxy :: Proxy MeanVarianceUnb) xs :: (String, String)
data StdDev'
type StdDev = StoredStats StdDev'
instance FromStats StdDev' Double where
func _ = Stats.stdDev
instance Resultable StdDev String where
r _ xs = "Standard deviation: " ++ show res
where
res = r (Proxy :: Proxy StdDev) xs :: Double
data FastVariance'
type FastVariance = StoredStats FastVariance'
instance FromStats FastVariance' Double where
func _ = Stats.fastVariance
instance Resultable FastVariance String where
r _ xs = "Fast variance: " ++ show res
where
res = r (Proxy :: Proxy FastVariance) xs :: Double
data FastVarianceUnbiased'
type FastVarianceUnbiased = StoredStats FastVarianceUnbiased'
instance FromStats FastVarianceUnbiased' Double where
func _ = Stats.fastVarianceUnbiased
instance Resultable FastVarianceUnbiased String where
r _ xs = "Fast unbiased variance: " ++ show res
where
res = r (Proxy :: Proxy FastVarianceUnbiased) xs :: Double
data FastStdDev'
type FastStdDev = StoredStats FastStdDev'
instance FromStats FastStdDev' Double where
func _ = Stats.fastStdDev
instance Resultable FastStdDev String where
r _ xs = "Fast standard deviation: " ++ show res
where
res = r (Proxy :: Proxy FastStdDev) xs :: Double
--------------------------------------------------------------------------------
-- From Statistics.Quantile
-- Helpers
type Quantile k q = WeightedAvg k q
type Percentile k = Quantile k 100
type Median = Percentile 50
data WeightedAvg' :: Nat -> Nat -> *
type WeightedAvg k q = StoredStats (WeightedAvg' k q)
instance (KnownNat k, KnownNat q)
=> FromStats (WeightedAvg' k q) (Maybe Double) where
-- Here we return a 'Maybe' because 'weightedAvg' throws an exception
-- on an empty vector
func _ v = if V.null v then Nothing else Just $ Stats.weightedAvg k q v
where
k = fromInteger $ natVal (Proxy :: Proxy k)
q = fromInteger $ natVal (Proxy :: Proxy q)
instance (KnownNat k, KnownNat q) => Resultable (WeightedAvg k q) String where
r _ xs = str (natVal (Proxy :: Proxy k)) (natVal (Proxy :: Proxy q))
where
str 50 100 = "Median: " ++ show res
str k 100 = show k ++ "th percentile: " ++ show res
str k q = "Quantile " ++ show k ++ "/" ++ show q ++ ": " ++ show res
res = r (Proxy :: Proxy (WeightedAvg k q)) xs :: Maybe Double
-- TODO: Add functions that take a 'ContParam'
| nmattia/halytics | src/Halytics/Metric/Statistics.hs | bsd-3-clause | 8,593 | 0 | 13 | 1,851 | 2,548 | 1,356 | 1,192 | -1 | -1 |
{-# LANGUAGE StandaloneDeriving, FlexibleInstances, BangPatterns #-}
{-# LANGUAGE MultiParamTypeClasses, TypeFamilies #-}
module Data.Array.Matryoshka.PArray.Unboxed (
U.Unbox,
U,
fromVector,
toVector
)
where
import Control.DeepSeq
import Data.Array.Matryoshka.PArray.Base
import qualified Data.Vector.Unboxed as U
import qualified Data.Vector.Unboxed.Mutable as UM
data U
instance U.Unbox a => PArrayIn U a where
data PArray U a = PArrayU !(U.Vector a)
fromList !xs = PArrayU $! U.fromList xs
toMPArray (PArrayU xs) = U.unsafeThaw xs >>= (\ x -> return $! MPArrayU x)
toList (PArrayU xs) = U.toList xs
length (PArrayU xs) = let len = U.length xs in len `seq` len
empty (PArrayU xs) = let x = U.null xs in x `seq` x
emptyP = PArrayU U.empty
postscanl f ne (PArrayU xs) = PArrayU $! U.postscanl' f ne xs
splitAt n (PArrayU xs) = let (l, r) = U.splitAt n xs
!res = l `seq` r `seq` (PArrayU l, PArrayU r)
in res
(++) (PArrayU xs) (PArrayU ys) = PArrayU $! xs U.++ ys
-- should most likely be changed to regular index
(!) (PArrayU xs) n = U.unsafeIndex xs n
generate !n !f = PArrayU $! U.generate n f
cons a (PArrayU xs) = PArrayU $! U.cons a xs
snoc (PArrayU xs) a = PArrayU $! U.snoc xs a
filter !p (PArrayU xs) = PArrayU $! U.filter p xs
singleton e = PArrayU $! U.singleton e
slice s n (PArrayU xs) = PArrayU $! U.slice s n xs
fromVector :: U.Unbox a => U.Vector a -> PArray U a
fromVector !a = let x = PArrayU a in x `seq` x
toVector :: PArray U a -> U.Vector a
toVector (PArrayU a) = a
-- mutable parallel array. only used as internal data structure
instance U.Unbox a => PArrayOut U a where
data MPArray U a = MPArrayU !(UM.IOVector a)
unsafeNew !n = UM.unsafeNew n >>= (\ x -> return $! MPArrayU x)
unsafeFreeze (MPArrayU a) = U.unsafeFreeze a >>=
(\ x -> return $! PArrayU x)
unsafeWrite (MPArrayU a) i e = UM.unsafeWrite a i e
unsafeRead (MPArrayU a) i = UM.unsafeRead a i
toPArray = unsafeFreeze
emptyMP = unsafeNew 0
unsafeSlice s e (MPArrayU xs) = MPArrayU $! UM.unsafeSlice s e xs
deriving instance (Show a, U.Unbox a) => Show (PArray U a)
instance NFData xs => NFData (PArray U xs) where
rnf (PArrayU xs) = rnf xs
instance NFData xs => NFData (MPArray U xs) where
rnf (MPArrayU xs) = rnf xs
| agremm/Matryoshka | Data/Array/Matryoshka/PArray/Unboxed.hs | bsd-3-clause | 2,368 | 0 | 12 | 564 | 990 | 498 | 492 | -1 | -1 |
-- | for developing only
--
module Paths_siberia where
getDataFileName ::FilePath -> IO FilePath
getDataFileName = return
getDataDir::IO FilePath
getDataDir = return "."
| chemist/siberia | tmp/Paths_siberia.hs | bsd-3-clause | 174 | 0 | 6 | 27 | 39 | 22 | 17 | 5 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | A preliminary renderer that produces `ReactJS` components when run using
-- GHCJS.
--
module Text.Blaze.Renderer.ReactJS
( ReactJSNode
, renderHtml
) where
import Control.Applicative
import Control.Monad
import Control.Monad.Trans (lift)
import Control.Monad.Trans.Either ( runEitherT, EitherT(..), left)
import qualified Data.ByteString.Char8 as SBC
import qualified Data.HashMap.Strict as HMS
import Data.List (isInfixOf)
import Data.Monoid ((<>))
import qualified Data.Text as T
import qualified Data.ByteString as S
import qualified GHCJS.Foreign as Foreign
import GHCJS.Marshal as Marshal
import GHCJS.Types (JSString, JSRef, JSArray, JSObject, castRef)
import Prelude hiding (span)
import Text.Blaze.Internal
import Text.Blaze.Event.Internal
import Text.Blaze.Event.Charcode (unCharcode)
import Text.Blaze.Event.Keycode (unKeycode)
------------------------------------------------------------------------------
-- FFI to ReactJS
------------------------------------------------------------------------------
data ReactJSEvent_
type ReactJSEvent = JSRef ReactJSEvent_
data ReactJSNode_
type ReactJSNode = JSRef ReactJSNode_
type ReactJSNodes = JSArray ReactJSNode
foreign import javascript unsafe
"h$reactjs.mkDomNode($1, $2, $3)"
mkReactJSParent
:: JSString -> JSObject JSString -> ReactJSNodes -> IO ReactJSNode
foreign import javascript unsafe
"h$reactjs.mkDomNode($1, $2, [])"
mkReactJSLeaf :: JSString -> JSObject JSString -> IO ReactJSNode
foreign import javascript unsafe
"$1.preventDefault()"
preventDefault :: ReactJSEvent -> IO ()
foreign import javascript unsafe
"$1.stopPropagation()"
stopPropagation :: ReactJSEvent -> IO ()
------------------------------------------------------------------------------
-- Rendering
------------------------------------------------------------------------------
-- TODO (SM): find a better representation for the rendering of Strings.
-- Probably a DList T.Text with a following concat.
-- | Render a 'ChoiceString'.
--
fromChoiceString :: ChoiceString -- ^ String to render
-> String -- ^ String to append
-> String -- ^ Resulting string
fromChoiceString (Static s) = getString s
fromChoiceString (String s) = (s ++)
fromChoiceString (Text s) = (T.unpack s ++)
fromChoiceString (ByteString s) = (SBC.unpack s ++)
fromChoiceString (PreEscaped x) =
-- FiXME (SM): here we actually need to unescape!
case x of
String s -> (s ++)
Text s -> (\k -> T.foldr (:) k s)
s -> fromChoiceString s
fromChoiceString (External x) = case x of
-- Check that the sequence "</" is *not* in the external data.
String s -> if "</" `isInfixOf` s then id else (s ++)
Text s -> if "</" `T.isInfixOf` s then id else (\k -> T.foldr (:) k s)
ByteString s -> if "</" `S.isInfixOf` s then id else (SBC.unpack s ++)
s -> fromChoiceString s
fromChoiceString (AppendChoiceString x y) =
fromChoiceString x . fromChoiceString y
fromChoiceString EmptyChoiceString = id
-- | Render some 'Markup' to a virtual dom.
--
-- This function is morally pure.
--
render
:: forall act.
Show act
=> (act -> Bool -> IO ()) -- ^ Callback for actions raised by event handlers.
-> Markup act
-> IO ReactJSNodes
render handleAct0 markup = do
children <- Foreign.newArray
go handleAct0 (\_props -> return ()) children markup
return children
where
go :: forall act' b.
(act' -> Bool -> IO ())
-> (JSObject JSString -> IO ())
-> (JSArray ReactJSNode)
-> MarkupM act' b
-> IO ()
go handleAct setProps children html0 = case html0 of
MapActions f h ->
go (handleAct . f) setProps children h
OnEvent handler h -> do
let setProps' props = do
registerEventHandler (handleAct <$> handler) props
setProps props
go handleAct setProps' children h
Parent tag _open _close h -> tagToVNode (staticStringToJs tag) h
CustomParent tag h -> tagToVNode (choiceStringToJs tag) h
Leaf tag _begin _end -> leafToVNode (staticStringToJs tag)
CustomLeaf tag _close -> leafToVNode (choiceStringToJs tag)
Content content -> textToVNode (choiceStringToJs content)
AddAttribute key _preparedKey value h -> do
setProperty (staticStringToJs key) (choiceStringToJs value) h
AddBoolAttribute key value h -> do
setProperty (staticStringToJs key) (Foreign.toJSBool value) h
-- FIXME (SM): This is not going to work in all cases, as 'attributes'
-- must be set differently from properties.
AddCustomAttribute key value h ->
setProperty (choiceStringToJs key) (choiceStringToJs value) h
AddObjectAttribute key object h -> do
jsObj <- toJSRef_hashMap object
setProperty (staticStringToJs key) jsObj h
Empty -> return ()
Append h1 h2 -> do
go handleAct setProps children h1
go handleAct setProps children h2
where
choiceStringToJs cs = Foreign.toJSString (fromChoiceString cs "")
staticStringToJs ss = Foreign.toJSString (getText ss)
-- setProperty :: JSString -> JSRef a -> MarkupM (EventHandler act') b -> IO ()
setProperty key value content =
go handleAct setProps' children content
where
setProps' props =
Foreign.setProp key value props >> setProps props
makePropertiesObject = do
props <- Foreign.newObj
setProps props
return props
tagToVNode tag content = do
props <- makePropertiesObject
innerChildren <- Foreign.newArray
go handleAct (\_props -> return ()) innerChildren content
node <- mkReactJSParent tag props innerChildren
Foreign.pushArray node children
leafToVNode tag = do
props <- makePropertiesObject
node <- mkReactJSLeaf tag props
Foreign.pushArray node children
textToVNode :: JSString -> IO ()
textToVNode jsText = Foreign.pushArray jsText children
-- TODO (asayers): Something like this should probably be added to GHCJS.Marshall:
-- toJSRef_hashMap :: (IsString a, ToJSRef b)
-- => HMS.HashMap a b
-- -> IO (JSRef (HMS.HashMap a b))
toJSRef_hashMap :: HMS.HashMap T.Text T.Text -> IO (JSRef (HMS.HashMap T.Text T.Text))
toJSRef_hashMap hashmap = fmap castRef $ do
obj <- Foreign.newObj
let addProp k v = Foreign.setProp k (Foreign.toJSString v) obj
void $ HMS.traverseWithKey addProp hashmap
return obj
renderHtml
:: Show act
=> (act -> Bool -> IO ())
-> Markup act
-> IO (ReactJSNode)
renderHtml handleAction html = do
children <- render handleAction html
props <- Foreign.newObj
mkReactJSParent "div" props children
------------------------------------------------------------------------------
-- Event handler callback construction
------------------------------------------------------------------------------
-- | ReactJS defines the following event types, as of v0.12:
data ReactJSEventType
-- Clipboard Events
= OnCopyE | OnCutE | OnPasteE
-- Keyboard Events
| OnKeyDownE | OnKeyPressE | OnKeyUpE
-- Focus Events
| OnFocusE | OnBlurE
-- Form Events
| OnChangeE | OnInputE | OnSubmitE
-- Mouse Events
| OnClickE | OnDoubleClickE | OnDragE | OnDragEndE | OnDragEnterE
| OnDragExitE | OnDragLeaveE | OnDragOverE | OnDragStartE | OnDropE
| OnMouseDownE | OnMouseEnterE | OnMouseLeaveE | OnMouseMoveE
| OnMouseOutE | OnMouseOverE | OnMouseUpE
-- Touch Events
| OnTouchCancelE | OnTouchEndE | OnTouchMoveE | OnTouchStartE
-- UI Events
| OnScrollE
-- Wheel Events
| OnWheelE
reactEventName :: ReactJSEventType -> JSString
reactEventName ev = case ev of
OnCopyE -> "onCopy"
OnCutE -> "onCut"
OnPasteE -> "onPaste"
OnKeyDownE -> "onKeyDown"
OnKeyPressE -> "onKeyPress"
OnKeyUpE -> "onKeyUp"
OnFocusE -> "onFocus"
OnBlurE -> "onBlur"
OnChangeE -> "onChange"
OnInputE -> "onInput"
OnSubmitE -> "onSubmit"
OnClickE -> "onClick"
OnDoubleClickE -> "onDoubleClick"
OnDragE -> "onDrag"
OnDragEndE -> "onDragEnd"
OnDragEnterE -> "onDragEnter"
OnDragExitE -> "onDragExit"
OnDragLeaveE -> "onDragLeave"
OnDragOverE -> "onDragOver"
OnDragStartE -> "onDragStart"
OnDropE -> "onDrop"
OnMouseDownE -> "onMouseDown"
OnMouseEnterE -> "onMouseEnter"
OnMouseLeaveE -> "onMouseLeave"
OnMouseMoveE -> "onMouseMove"
OnMouseOutE -> "onMouseOut"
OnMouseOverE -> "onMouseOver"
OnMouseUpE -> "onMouseUp"
OnTouchCancelE -> "onTouchCancel"
OnTouchEndE -> "onTouchEnd"
OnTouchMoveE -> "onTouchMove"
OnTouchStartE -> "onTouchStart"
OnScrollE -> "onScroll"
OnWheelE -> "onWheel"
lookupProp :: JSString -> JSRef a -> EitherT T.Text IO (JSRef b)
lookupProp name obj = do
mbProp <- lift $ Foreign.getPropMaybe name obj
maybe (left err) return mbProp
where
err = "failed to get property '" <> Foreign.fromJSString name <> "'."
lookupIntProp :: JSString -> JSRef a -> EitherT T.Text IO Int
lookupIntProp name obj = do
ref <- lookupProp name obj
mbInt <- lift $ Marshal.fromJSRef ref
case mbInt of
Nothing -> left "lookupIntProp: couldn't parse field as Int"
Just x -> return x
lookupDoubleProp :: JSString -> JSRef a -> EitherT T.Text IO Double
lookupDoubleProp name obj = do
ref <- lookupProp name obj
mbDouble <- lift $ Marshal.fromJSRef ref
case mbDouble of
Nothing -> left "lookupDoubleProp: couldn't parse field as Double"
Just x -> return x
data Handler
= IgnoreEvent
| HandleEvent (IO (Bool -> IO ()))
-- ^ Contains an IO action which generates the callback to attach to the event
registerEventHandler
:: EventHandler (Bool -> IO ())
-> JSObject JSString
-- ^ Properties to register the event handler in
-> IO ()
registerEventHandler eh props = case eh of
OnKeyDown keys mkAct -> register True OnKeyDownE $ \eventRef ->
handleKeyEvent eventRef keys mkAct
OnKeyUp keys mkAct -> register True OnKeyUpE $ \eventRef ->
handleKeyEvent eventRef keys mkAct
OnKeyPress chars mkAct -> register True OnKeyPressE $ \eventRef ->
handleCharEvent eventRef chars mkAct
OnFocus mkAct -> register False OnFocusE $ \_eventRef ->
return $ Right $ HandleEvent mkAct
OnBlur mkAct -> register False OnBlurE $ \_eventRef ->
return $ Right $ HandleEvent mkAct
OnValueChange mkAct -> register True OnChangeE $ \eventRef ->
runEitherT $ do
valueRef <- lookupProp "value" =<< lookupProp "target" eventRef
return $ HandleEvent $ mkAct $ Foreign.fromJSString valueRef
OnCheckedChange mkAct -> register False OnChangeE $ \eventRef ->
runEitherT $ do
valueRef <- lookupProp "checked" =<< lookupProp "target" eventRef
return $ HandleEvent $ mkAct $ Foreign.fromJSBool valueRef
OnSubmit mkAct -> register True OnSubmitE $ \_eventRef ->
return $ Right $ HandleEvent mkAct
OnClick btns mkAct -> register False OnClickE $ \eventRef ->
handleMouseEvent eventRef btns mkAct
OnDoubleClick btns mkAct -> register False OnDoubleClickE $ \eventRef ->
handleMouseEvent eventRef btns mkAct
OnMouseDown btns mkAct -> register False OnMouseDownE $ \eventRef ->
handleMouseEvent eventRef btns mkAct
OnMouseUp btns mkAct -> register False OnMouseUpE $ \eventRef ->
handleMouseEvent eventRef btns mkAct
OnMouseMove mkAct -> register False OnMouseMoveE $ \eventRef ->
runEitherT $ HandleEvent . mkAct <$> getMousePosition eventRef
OnMouseEnter mkAct -> register False OnMouseEnterE $ \eventRef ->
runEitherT $ HandleEvent . mkAct <$> getMousePosition eventRef
OnMouseLeave mkAct -> register False OnMouseLeaveE $ \eventRef ->
runEitherT $ HandleEvent . mkAct <$> getMousePosition eventRef
OnMouseOver mkAct -> register False OnMouseOverE $ \eventRef ->
runEitherT $ HandleEvent . mkAct <$> getMousePosition eventRef
OnMouseOut mkAct -> register False OnMouseOutE $ \eventRef ->
runEitherT $ HandleEvent . mkAct <$> getMousePosition eventRef
OnScroll mkAct -> register False OnScrollE $ \eventRef ->
runEitherT $ do
scrollTop <- lookupIntProp "scrollTop" =<<lookupProp "target" eventRef
return $ HandleEvent $ mkAct scrollTop
OnWheel mkAct -> register False OnWheelE $ \eventRef ->
runEitherT $ do
dx <- lookupDoubleProp "deltaX" eventRef
dy <- lookupDoubleProp "deltaY" eventRef
dz <- lookupDoubleProp "deltaZ" eventRef
let deltaValue = DeltaValue dx dy dz
deltaMode <- lookupIntProp "deltaMode" eventRef
domDelta <- case deltaMode of
0 -> return $ PixelDelta deltaValue
1 -> return $ LineDelta deltaValue
2 -> return $ PageDelta deltaValue
_ -> left "registerEventHandler: unrecognized delta mode"
return $ HandleEvent $ mkAct domDelta
where
handleKeyEvent eventRef keys mkAct = runEitherT $ do
keycode <- lookupIntProp "keyCode" eventRef <|>
lookupIntProp "which" eventRef
if keycode `elem` map unKeycode keys
then return $ HandleEvent mkAct
else return $ IgnoreEvent
handleCharEvent eventRef chars mkAct = runEitherT $ do
charcode <- lookupIntProp "charCode" eventRef <|>
lookupIntProp "which" eventRef
if charcode `elem` map unCharcode chars
then return $ HandleEvent mkAct
else return $ IgnoreEvent
handleMouseEvent
:: ReactJSEvent
-> [MouseButton]
-> (MousePosition -> IO (Bool -> IO ()))
-> IO (Either T.Text Handler)
handleMouseEvent eventRef btns mkAct = runEitherT $ do
button <- getMouseButton eventRef
if button `elem` btns
then HandleEvent . mkAct <$> getMousePosition eventRef
else return IgnoreEvent
getMouseButton :: ReactJSEvent -> EitherT T.Text IO MouseButton
getMouseButton eventRef = do
button <- lookupIntProp "button" eventRef
case button of
0 -> return LeftButton
1 -> return MiddleButton
2 -> return RightButton
_ -> left "getMouseButton: couldn't parse button code"
getMousePosition :: ReactJSEvent -> EitherT T.Text IO MousePosition
getMousePosition eventRef = do
clientX <- lookupIntProp "clientX" eventRef
clientY <- lookupIntProp "clientY" eventRef
pageX <- lookupIntProp "pageX" eventRef
pageY <- lookupIntProp "pageY" eventRef
screenX <- lookupIntProp "screenX" eventRef
screenY <- lookupIntProp "screenY" eventRef
return MousePosition
{ mpClientX = clientX
, mpClientY = clientY
, mpPageX = pageX
, mpPageY = pageY
, mpScreenX = screenX
, mpScreenY = screenY
}
register
:: Bool
-> ReactJSEventType
-> (ReactJSEvent -> IO (Either T.Text Handler))
-- ^ Callback to actually handle the event.
-> IO ()
register requireSyncRedraw reactEvent extractHandler = do
-- FIXME (SM): memory leak to to AlwaysRetain. Need to hook-up ReactJS
-- event handler table with GHCJS GC.
cb <- Foreign.syncCallback1 Foreign.AlwaysRetain False $ \eventRef -> do
-- try to extract handler
errOrHandler <- extractHandler eventRef
case errOrHandler of
Left err -> do
-- prevent default action and cancel propagation
preventDefault eventRef
stopPropagation eventRef
-- print the error
let eventName = Foreign.fromJSString $ reactEventName reactEvent
eventType <- either (const "Unknown type") Foreign.fromJSString <$>
runEitherT (lookupProp "type" eventRef)
putStrLn $ unlines
[ "blaze-react - event handling error: " ++ T.unpack err
, "Event was " ++ eventName ++ " of type " ++ eventType
]
Right IgnoreEvent -> return ()
Right (HandleEvent mkHandler) -> do
-- prevent default action and cancel propagation
preventDefault eventRef
stopPropagation eventRef
-- run the handler. This triggers a redraw.
handler <- mkHandler
handler requireSyncRedraw
Foreign.setProp (reactEventName reactEvent) cb props
| meiersi/blaze-react | src/Text/Blaze/Renderer/ReactJS.hs | mit | 17,708 | 40 | 24 | 5,157 | 4,069 | 2,019 | 2,050 | -1 | -1 |
{-# LANGUAGE Arrows #-}
module Main where
import Data.IORef
import System.IO
import Text.Printf
import Control.Monad.Random
import Control.Monad.Reader
import Control.Monad.Trans.MSF.Random
import Data.Array.IArray
import FRP.BearRiver
import qualified Graphics.Gloss as GLO
import qualified Graphics.Gloss.Interface.IO.Animate as GLOAnim
data SIRState = Susceptible | Infected | Recovered deriving (Show, Eq)
type Disc2dCoord = (Int, Int)
type SIREnv = Array Disc2dCoord SIRState
type SIRMonad g = Rand g
type SIRAgent g = SF (SIRMonad g) SIREnv SIRState
type SimSF g = SF (SIRMonad g) () SIREnv
data SimCtx g = SimCtx
{ simSf :: !(SimSF g)
, simEnv :: !SIREnv
, simRng :: g
, simSteps :: !Integer
, simTime :: !Time
}
contactRate :: Double
contactRate = 5.0
infectivity :: Double
infectivity = 0.05
illnessDuration :: Double
illnessDuration = 15.0
agentGridSize :: (Int, Int)
agentGridSize = (51, 51)
winSize :: (Int, Int)
winSize = (800, 800)
winTitle :: String
winTitle = "Agent-Based SIR on 2D Grid"
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
let visualise = True
t = 100
dt = 0.1
seed = 123 -- 123 -- 42 leads to recovery without any infection
g = mkStdGen seed
(as, env) = initAgentsEnv agentGridSize
sfs = map (\(coord, a) -> (sirAgent coord a, coord)) as
sf = simulationStep sfs env
ctx = mkSimCtx sf env g 0 0
if visualise
then visualiseSimulation dt ctx
else do
--let ret = runSimulationUntil t dt ctx
--writeAggregatesToFile "SIR_DUNAI.m" ret
writeSimulationUntil t dt ctx ("SIR_IO_" ++ show agentGridSize ++ "agents.m")
runSimulationUntil :: RandomGen g
=> Time
-> DTime
-> SimCtx g
-> [(Double, Double, Double)]
runSimulationUntil tMax dt ctx0 = runSimulationAux 0 ctx0 []
where
runSimulationAux :: RandomGen g
=> Time
-> SimCtx g
-> [(Double, Double, Double)]
-> [(Double, Double, Double)]
runSimulationAux t ctx acc
| t >= tMax = acc
| otherwise = runSimulationAux t' ctx' acc'
where
env = simEnv ctx
aggr = aggregateStates $ elems env
t' = t + dt
ctx' = runStepCtx dt ctx
acc' = aggr : acc
writeSimulationUntil :: RandomGen g
=> Time
-> DTime
-> SimCtx g
-> String
-> IO ()
writeSimulationUntil tMax dt ctx0 fileName = do
fileHdl <- openFile fileName WriteMode
hPutStrLn fileHdl "dynamics = ["
writeSimulationUntilAux 0 ctx0 fileHdl
hPutStrLn fileHdl "];"
writeMatlabPlot fileHdl dt
hClose fileHdl
where
writeSimulationUntilAux :: RandomGen g
=> Time
-> SimCtx g
-> Handle
-> IO ()
writeSimulationUntilAux t ctx fileHdl
| t >= tMax = return ()
| otherwise = do
let env = simEnv ctx
aggr = aggregateStates $ elems env
t' = t + dt
ctx' = runStepCtx dt ctx
hPutStrLn fileHdl (sirAggregateToString aggr)
writeSimulationUntilAux t' ctx' fileHdl
visualiseSimulation :: RandomGen g
=> DTime
-> SimCtx g
-> IO ()
visualiseSimulation dt ctx0 = do
ctxRef <- newIORef ctx0
GLOAnim.animateIO
(GLO.InWindow winTitle winSize (0, 0))
GLO.white
(nextFrame ctxRef)
(const $ return ())
where
(cx, cy) = agentGridSize
(wx, wy) = winSize
cellWidth = (fromIntegral wx / fromIntegral cx) :: Double
cellHeight = (fromIntegral wy / fromIntegral cy) :: Double
nextFrame :: RandomGen g
=> IORef (SimCtx g)
-> Float
-> IO GLO.Picture
nextFrame ctxRef _ = do
ctx <- readIORef ctxRef
let ctx' = runStepCtx dt ctx
writeIORef ctxRef ctx'
return $ ctxToPic ctx
ctxToPic :: RandomGen g
=> SimCtx g
-> GLO.Picture
ctxToPic ctx = GLO.Pictures $ aps ++ [timeStepTxt]
where
env = simEnv ctx
as = assocs env
aps = map renderAgent as
t = simTime ctx
(tcx, tcy) = transformToWindow (-7, 10)
timeTxt = printf "%0.1f" t
timeStepTxt = GLO.color GLO.black $ GLO.translate tcx tcy $ GLO.scale 0.5 0.5 $ GLO.Text timeTxt
renderAgent :: (Disc2dCoord, SIRState) -> GLO.Picture
renderAgent (coord, Susceptible)
= GLO.color (GLO.makeColor 0.0 0.0 0.7 1.0) $ GLO.translate x y $ GLO.Circle (realToFrac cellWidth / 2)
where
(x, y) = transformToWindow coord
renderAgent (coord, Infected)
= GLO.color (GLO.makeColor 0.7 0.0 0.0 1.0) $ GLO.translate x y $ GLO.ThickCircle 0 (realToFrac cellWidth)
where
(x, y) = transformToWindow coord
renderAgent (coord, Recovered)
= GLO.color (GLO.makeColor 0.0 0.70 0.0 1.0) $ GLO.translate x y $ GLO.ThickCircle 0 (realToFrac cellWidth)
where
(x, y) = transformToWindow coord
transformToWindow :: Disc2dCoord -> (Float, Float)
transformToWindow (x, y) = (x', y')
where
rw = cellWidth
rh = cellHeight
halfXSize = fromRational (toRational wx / 2.0)
halfYSize = fromRational (toRational wy / 2.0)
x' = fromRational (toRational (fromIntegral x * rw)) - halfXSize
y' = fromRational (toRational (fromIntegral y * rh)) - halfYSize
mkSimCtx :: RandomGen g
=> SimSF g
-> SIREnv
-> g
-> Integer
-> Time
-> SimCtx g
mkSimCtx sf env g steps t = SimCtx {
simSf = sf
, simEnv = env
, simRng = g
, simSteps = steps
, simTime = t
}
runStepCtx :: RandomGen g
=> DTime
-> SimCtx g
-> SimCtx g
runStepCtx dt ctx = ctx'
where
g = simRng ctx
sf = simSf ctx
sfReader = unMSF sf ()
sfRand = runReaderT sfReader dt
((env, simSf'), g') = runRand sfRand g
steps = simSteps ctx + 1
t = simTime ctx + dt
ctx' = mkSimCtx simSf' env g' steps t
initAgentsEnv :: (Int, Int) -> ([(Disc2dCoord, SIRState)], SIREnv)
initAgentsEnv (xd, yd) = (as, e)
where
xCenter = floor $ fromIntegral xd * (0.5 :: Double)
yCenter = floor $ fromIntegral yd * (0.5 :: Double)
sus = [ ((x, y), Susceptible) | x <- [0..xd-1],
y <- [0..yd-1],
x /= xCenter ||
y /= yCenter ]
inf = ((xCenter, yCenter), Infected)
as = inf : sus
e = array ((0, 0), (xd - 1, yd - 1)) as
simulationStep :: RandomGen g
=> [(SIRAgent g, Disc2dCoord)]
-> SIREnv
-> SF (SIRMonad g) () SIREnv
simulationStep sfsCoords env = MSF $ \_ -> do
let (sfs, coords) = unzip sfsCoords
-- run all agents sequentially but keep the environment
-- read-only: it is shared as input with all agents
-- and thus cannot be changed by the agents themselves
-- run agents sequentially but with shared, read-only environment
ret <- mapM (`unMSF` env) sfs
-- construct new environment from all agent outputs for next step
let (as, sfs') = unzip ret
env' = foldr (\(coord, a) envAcc -> updateCell coord a envAcc) env (zip coords as)
sfsCoords' = zip sfs' coords
cont = simulationStep sfsCoords' env'
return (env', cont)
where
updateCell :: Disc2dCoord -> SIRState -> SIREnv -> SIREnv
updateCell c s e = e // [(c, s)]
sirAgent :: RandomGen g => Disc2dCoord -> SIRState -> SIRAgent g
sirAgent coord Susceptible = susceptibleAgent coord
sirAgent _ Infected = infectedAgent
sirAgent _ Recovered = recoveredAgent
susceptibleAgent :: RandomGen g => Disc2dCoord -> SIRAgent g
susceptibleAgent coord
= switch
-- delay the switching by 1 step, otherwise could
-- make the transition from Susceptible to Recovered within time-step
(susceptible >>> iPre (Susceptible, NoEvent))
(const infectedAgent)
where
susceptible :: RandomGen g
=> SF (SIRMonad g) SIREnv (SIRState, Event ())
susceptible = proc env -> do
makeContact <- occasionally (1 / contactRate) () -< ()
if not $ isEvent makeContact
then returnA -< (Susceptible, NoEvent)
else (do
let ns = neighbours env coord agentGridSize moore
--let ns = allNeighbours e
s <- drawRandomElemS -< ns
case s of
Infected -> do
infected <- arrM_ (lift $ randomBoolM infectivity) -< ()
if infected
then returnA -< (Infected, Event ())
else returnA -< (Susceptible, NoEvent)
_ -> returnA -< (Susceptible, NoEvent))
infectedAgent :: RandomGen g => SIRAgent g
infectedAgent
= switch
-- delay the switching by 1 step, otherwise could
-- make the transition from Susceptible to Recovered within time-step
(infected >>> iPre (Infected, NoEvent))
(const recoveredAgent)
where
infected :: RandomGen g => SF (SIRMonad g) SIREnv (SIRState, Event ())
infected = proc _ -> do
recovered <- occasionally illnessDuration () -< ()
if isEvent recovered
then returnA -< (Recovered, Event ())
else returnA -< (Infected, NoEvent)
recoveredAgent :: RandomGen g => SIRAgent g
recoveredAgent = arr (const Recovered)
drawRandomElemS :: MonadRandom m => SF m [a] a
drawRandomElemS = proc as -> do
r <- getRandomRS ((0, 1) :: (Double, Double)) -< ()
let len = length as
let idx = fromIntegral len * r
let a = as !! floor idx
returnA -< a
randomBoolM :: RandomGen g => Double -> Rand g Bool
randomBoolM p = getRandomR (0, 1) >>= (\r -> return $ r <= p)
neighbours :: SIREnv
-> Disc2dCoord
-> Disc2dCoord
-> [Disc2dCoord]
-> [SIRState]
neighbours e (x, y) (dx, dy) n = map (e !) nCoords'
where
nCoords = map (\(x', y') -> (x + x', y + y')) n
nCoords' = filter (\(nx, ny) -> nx >= 0 &&
ny >= 0 &&
nx <= (dx - 1) &&
ny <= (dy - 1)) nCoords
allNeighbours :: SIREnv -> [SIRState]
allNeighbours = elems
neumann :: [Disc2dCoord]
neumann = [ topDelta, leftDelta, rightDelta, bottomDelta ]
moore :: [Disc2dCoord]
moore = [ topLeftDelta, topDelta, topRightDelta,
leftDelta, rightDelta,
bottomLeftDelta, bottomDelta, bottomRightDelta ]
topLeftDelta :: Disc2dCoord
topLeftDelta = (-1, -1)
topDelta :: Disc2dCoord
topDelta = ( 0, -1)
topRightDelta :: Disc2dCoord
topRightDelta = ( 1, -1)
leftDelta :: Disc2dCoord
leftDelta = (-1, 0)
rightDelta :: Disc2dCoord
rightDelta = ( 1, 0)
bottomLeftDelta :: Disc2dCoord
bottomLeftDelta = (-1, 1)
bottomDelta :: Disc2dCoord
bottomDelta = ( 0, 1)
bottomRightDelta :: Disc2dCoord
bottomRightDelta = ( 1, 1)
writeAggregatesToFile :: String
-> DTime
-> [(Double, Double, Double)]
-> IO ()
writeAggregatesToFile fileName dt dynamics = do
fileHdl <- openFile fileName WriteMode
hPutStrLn fileHdl "dynamics = ["
mapM_ (hPutStrLn fileHdl . sirAggregateToString) dynamics
hPutStrLn fileHdl "];"
writeMatlabPlot fileHdl dt
hClose fileHdl
writeMatlabPlot :: Handle
-> DTime
-> IO ()
writeMatlabPlot fileHdl dt = do
hPutStrLn fileHdl "susceptible = dynamics (:, 1);"
hPutStrLn fileHdl "infected = dynamics (:, 2);"
hPutStrLn fileHdl "recovered = dynamics (:, 3);"
hPutStrLn fileHdl "totalPopulation = susceptible(1) + infected(1) + recovered(1);"
hPutStrLn fileHdl "susceptibleRatio = susceptible ./ totalPopulation;"
hPutStrLn fileHdl "infectedRatio = infected ./ totalPopulation;"
hPutStrLn fileHdl "recoveredRatio = recovered ./ totalPopulation;"
hPutStrLn fileHdl "steps = length (susceptible);"
hPutStrLn fileHdl "indices = 0 : steps - 1;"
hPutStrLn fileHdl $ "indices = indices ./ " ++ show (1 / dt) ++ ";"
hPutStrLn fileHdl "figure"
hPutStrLn fileHdl "plot (indices, susceptibleRatio.', 'color', 'blue', 'linewidth', 2);"
hPutStrLn fileHdl "hold on"
hPutStrLn fileHdl "plot (indices, infectedRatio.', 'color', 'red', 'linewidth', 2);"
hPutStrLn fileHdl "hold on"
hPutStrLn fileHdl "plot (indices, recoveredRatio.', 'color', 'green', 'linewidth', 2);"
hPutStrLn fileHdl "set(gca,'YTick',0:0.05:1.0);"
hPutStrLn fileHdl "xlabel ('Time');"
hPutStrLn fileHdl "ylabel ('Population Ratio');"
hPutStrLn fileHdl "legend('Susceptible','Infected', 'Recovered');"
sirAggregateToString :: (Double, Double, Double) -> String
sirAggregateToString (susceptibleCount, infectedCount, recoveredCount) =
printf "%f" susceptibleCount
++ "," ++ printf "%f" infectedCount
++ "," ++ printf "%f" recoveredCount
++ ";"
aggregateStates :: [SIRState] -> (Double, Double, Double)
aggregateStates as = (susceptibleCount, infectedCount, recoveredCount)
where
susceptibleCount = fromIntegral $ length $ filter (Susceptible==) as
infectedCount = fromIntegral $ length $ filter (Infected==) as
recoveredCount = fromIntegral $ length $ filter (Recovered==) as | thalerjonathan/phd | thesis/code/concurrent/sir/SIR_Seq/src/Main.hs | gpl-3.0 | 13,828 | 3 | 24 | 4,355 | 4,000 | 2,086 | 1,914 | 338 | 4 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
import Yesod
data HelloWorld = HelloWorld
mkYesod "HelloWorld" [parseRoutes|
/ HomeR GET
|]
instance Yesod HelloWorld
getHomeR :: Handler Html
getHomeR = defaultLayout [whamlet|Hello World!|]
main :: IO ()
main = warp 3000 HelloWorld
| wowofbob/scratch | app/Yesod/Basic/Main.hs | gpl-3.0 | 405 | 1 | 6 | 94 | 77 | 42 | 35 | 12 | 1 |
-- Copyright 2016 TensorFlow authors.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
-- | Conduit wrappers for TensorFlow.Records.
{-# LANGUAGE Rank2Types #-}
module TensorFlow.Records.Conduit
(
-- * Encode/Decode
encodeTFRecords
, decodeTFRecords
-- * Source/Sink
, sinkTFRecords
, sourceTFRecords
) where
import Control.Monad.Catch (MonadThrow)
import Control.Monad.Trans.Resource (MonadResource)
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import Data.Conduit ((=$=), Conduit, Consumer, Producer)
import Data.Conduit.Binary (sinkFile, sourceFile)
import Data.Conduit.Cereal (conduitGet2, conduitPut)
import TensorFlow.Records (getTFRecord, putTFRecord)
-- | Decode TFRecords from a stream of bytes.
decodeTFRecords :: MonadThrow m => Conduit B.ByteString m BL.ByteString
decodeTFRecords = conduitGet2 getTFRecord
-- | Read TFRecords from a file.
sourceTFRecords :: (MonadResource m, MonadThrow m) => FilePath -> Producer m BL.ByteString
sourceTFRecords path = sourceFile path =$= decodeTFRecords
-- | Encode TFRecords to a stream of bytes.
encodeTFRecords :: Monad m => Conduit BL.ByteString m B.ByteString
encodeTFRecords = conduitPut putTFRecord
-- | Write TFRecords to a file.
sinkTFRecords :: (MonadResource m) => FilePath -> Consumer BL.ByteString m ()
sinkTFRecords path = encodeTFRecords =$= sinkFile path
| judah/tensorflow-haskell | tensorflow-records-conduit/src/TensorFlow/Records/Conduit.hs | apache-2.0 | 1,892 | 0 | 8 | 283 | 296 | 178 | 118 | 23 | 1 |
{-# OPTIONS_GHC -fno-warn-name-shadowing -fno-warn-unused-binds #-}
{-# LANGUAGE StandaloneDeriving, FlexibleContexts, DeriveDataTypeable
, UndecidableInstances, FlexibleInstances, MultiParamTypeClasses
, PatternGuards, Rank2Types, TypeSynonymInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.Groups
-- Copyright : Quentin Moser <[email protected]>
-- License : BSD-style (see LICENSE)
--
-- Maintainer : orphaned
-- Stability : unstable
-- Portability : unportable
--
-- Two-level layout with windows split in individual layout groups,
-- themselves managed by a user-provided layout.
--
-----------------------------------------------------------------------------
module XMonad.Layout.Groups ( -- * Usage
-- $usage
-- * Creation
group
-- * Messages
, GroupsMessage(..)
, ModifySpec
-- ** Useful 'ModifySpec's
, swapUp
, swapDown
, swapMaster
, focusUp
, focusDown
, focusMaster
, swapGroupUp
, swapGroupDown
, swapGroupMaster
, focusGroupUp
, focusGroupDown
, focusGroupMaster
, moveToGroupUp
, moveToGroupDown
, moveToNewGroupUp
, moveToNewGroupDown
, splitGroup
-- * Types
, Groups
, Group(..)
, onZipper
, onLayout
, WithID
, sameID
) where
import XMonad
import qualified XMonad.StackSet as W
import XMonad.Util.Stack
import Data.Maybe (isJust, isNothing, fromMaybe, catMaybes, fromJust)
import Data.List ((\\))
import Control.Arrow ((>>>))
import Control.Applicative ((<$>))
import Control.Monad (forM)
-- $usage
-- This module provides a layout combinator that allows you
-- to manage your windows in independent groups. You can provide
-- both the layout with which to arrange the windows inside each
-- group, and the layout with which the groups themselves will
-- be arranged on the screen.
--
-- The "XMonad.Layout.Groups.Examples" and "XMonad.Layout.Groups.Wmii"
-- modules contain examples of layouts that can be defined with this
-- combinator. They're also the recommended starting point
-- if you are a beginner and looking for something you can use easily.
--
-- One thing to note is that 'Groups'-based layout have their own
-- notion of the order of windows, which is completely separate
-- from XMonad's. For this reason, operations like 'XMonad.StackSet.SwapUp'
-- will have no visible effect, and those like 'XMonad.StackSet.focusUp'
-- will focus the windows in an unpredictable order. For a better way of
-- rearranging windows and moving focus in such a layout, see the
-- example 'ModifySpec's (to be passed to the 'Modify' message) provided
-- by this module.
--
-- If you use both 'Groups'-based and other layouts, The "XMonad.Layout.Groups.Helpers"
-- module provides actions that can work correctly with both, defined using
-- functions from "XMonad.Actions.MessageFeedback".
-- | Create a 'Groups' layout.
--
-- Note that the second parameter (the layout for arranging the
-- groups) is not used on 'Windows', but on 'Group's. For this
-- reason, you can only use layouts that don't specifically
-- need to manage 'Window's. This is obvious, when you think
-- about it.
group :: l Window -> l2 (Group l Window) -> Groups l l2 Window
group l l2 = Groups l l2 startingGroups (U 1 0)
where startingGroups = fromJust $ singletonZ $ G (ID (U 0 0) l) emptyZ
-- * Stuff with unique keys
data Uniq = U Integer Integer
deriving (Eq, Show, Read)
-- | From a seed, generate an infinite list of keys and a new
-- seed. All keys generated with this method will be different
-- provided you don't use 'gen' again with a key from the list.
-- (if you need to do that, see 'split' instead)
gen :: Uniq -> (Uniq, [Uniq])
gen (U i1 i2) = (U (i1+1) i2, zipWith U (repeat i1) [i2..])
-- | Split an infinite list into two. I ended up not
-- needing this, but let's keep it just in case.
-- split :: [a] -> ([a], [a])
-- split as = snd $ foldr step (True, ([], [])) as
-- where step a (True, (as1, as2)) = (False, (a:as1, as2))
-- step a (False, (as1, as2)) = (True, (as1, a:as2))
-- | Add a unique identity to a layout so we can
-- follow it around.
data WithID l a = ID { getID :: Uniq
, unID :: (l a)}
deriving (Show, Read)
-- | Compare the ids of two 'WithID' values
sameID :: WithID l a -> WithID l a -> Bool
sameID (ID id1 _) (ID id2 _) = id1 == id2
instance Eq (WithID l a) where
ID id1 _ == ID id2 _ = id1 == id2
instance LayoutClass l a => LayoutClass (WithID l) a where
runLayout [email protected] { W.layout = ID id l } r
= do (placements, ml') <- flip runLayout r
ws { W.layout = l}
return (placements, ID id <$> ml')
handleMessage (ID id l) sm = do ml' <- handleMessage l sm
return $ ID id <$> ml'
description (ID _ l) = description l
-- * The 'Groups' layout
-- ** Datatypes
-- | A group of windows and its layout algorithm.
data Group l a = G { gLayout :: WithID l a
, gZipper :: Zipper a }
deriving (Show, Read, Eq)
onLayout :: (WithID l a -> WithID l a) -> Group l a -> Group l a
onLayout f g = g { gLayout = f $ gLayout g }
onZipper :: (Zipper a -> Zipper a) -> Group l a -> Group l a
onZipper f g = g { gZipper = f $ gZipper g }
-- | The type of our layouts.
data Groups l l2 a = Groups { -- | The starting layout for new groups
baseLayout :: l a
-- | The layout for placing each group on the screen
, partitioner :: l2 (Group l a)
-- | The window groups
, groups :: W.Stack (Group l a)
-- | A seed for generating unique ids
, seed :: Uniq
}
deriving instance (Show a, Show (l a), Show (l2 (Group l a))) => Show (Groups l l2 a)
deriving instance (Read a, Read (l a), Read (l2 (Group l a))) => Read (Groups l l2 a)
-- | Messages accepted by 'Groups'-based layouts.
-- All other messages are forwarded to the layout of the currently
-- focused subgroup (as if they had been wrapped in 'ToFocused').
data GroupsMessage = ToEnclosing SomeMessage -- ^ Send a message to the enclosing layout
-- (the one that places the groups themselves)
| ToGroup Int SomeMessage -- ^ Send a message to the layout for nth group
-- (starting at 0)
| ToFocused SomeMessage -- ^ Send a message to the layout for the focused
-- group
| ToAll SomeMessage -- ^ Send a message to all the sub-layouts
| Refocus -- ^ Refocus the window which should be focused according
-- to the layout.
| Modify ModifySpec -- ^ Modify the ordering\/grouping\/focusing
-- of windows according to a 'ModifySpec'
deriving Typeable
instance Show GroupsMessage where
show (ToEnclosing _) = "ToEnclosing {...}"
show (ToGroup i _) = "ToGroup "++show i++" {...}"
show (ToFocused _) = "ToFocused {...}"
show (ToAll _) = "ToAll {...}"
show Refocus = "Refocus"
show (Modify _) = "Modify {...}"
instance Message GroupsMessage
modifyGroups :: (Zipper (Group l a) -> Zipper (Group l a))
-> Groups l l2 a -> Groups l l2 a
modifyGroups f g = let (seed', id:_) = gen (seed g)
defaultGroups = fromJust $ singletonZ $ G (ID id $ baseLayout g) emptyZ
in g { groups = fromMaybe defaultGroups . f . Just $ groups g
, seed = seed' }
-- ** Readaptation
-- | Adapt our groups to a new stack.
-- This algorithm handles window additions and deletions correctly,
-- ignores changes in window ordering, and tries to react to any
-- other stack changes as gracefully as possible.
readapt :: Eq a => Zipper a -> Groups l l2 a -> Groups l l2 a
readapt z g = let mf = getFocusZ z
(seed', id:_) = gen $ seed g
g' = g { seed = seed' }
in flip modifyGroups g' $ mapZ_ (onZipper $ removeDeleted z)
>>> filterKeepLast (isJust . gZipper)
>>> findNewWindows (W.integrate' z)
>>> addWindows (ID id $ baseLayout g)
>>> focusGroup mf
>>> onFocusedZ (onZipper $ focusWindow mf)
where filterKeepLast _ Nothing = Nothing
filterKeepLast f z@(Just s) = maybe (singletonZ $ W.focus s) Just
$ filterZ_ f z
-- | Remove the windows from a group which are no longer present in
-- the stack.
removeDeleted :: Eq a => Zipper a -> Zipper a -> Zipper a
removeDeleted z = filterZ_ (flip elemZ z)
-- | Identify the windows not already in a group.
findNewWindows :: Eq a => [a] -> Zipper (Group l a)
-> (Zipper (Group l a), [a])
findNewWindows as gs = (gs, foldrZ_ removePresent as gs)
where removePresent g as' = filter (not . flip elemZ (gZipper g)) as'
-- | Add windows to the focused group. If you need to create one,
-- use the given layout and an id from the given list.
addWindows :: WithID l a -> (Zipper (Group l a), [a]) -> Zipper (Group l a)
addWindows l (Nothing, as) = singletonZ $ G l (W.differentiate as)
addWindows _ (z, as) = onFocusedZ (onZipper add) z
where add z = foldl (flip insertUpZ) z as
-- | Focus the group containing the given window
focusGroup :: Eq a => Maybe a -> Zipper (Group l a) -> Zipper (Group l a)
focusGroup Nothing = id
focusGroup (Just a) = fromTags . map (tagBy $ elemZ a . gZipper) . W.integrate'
-- | Focus the given window
focusWindow :: Eq a => Maybe a -> Zipper a -> Zipper a
focusWindow Nothing = id
focusWindow (Just a) = fromTags . map (tagBy (==a)) . W.integrate'
-- * Interface
-- ** Layout instance
instance (LayoutClass l Window, LayoutClass l2 (Group l Window))
=> LayoutClass (Groups l l2) Window where
description (Groups _ p gs _) = s1++" by "++s2
where s1 = description $ gLayout $ W.focus gs
s2 = description p
runLayout ws@(W.Workspace _ _l z) r = let l = readapt z _l in
do (areas, mpart') <- runLayout ws { W.layout = partitioner l
, W.stack = Just $ groups l } r
results <- forM areas $ \(g, r') -> runLayout ws { W.layout = gLayout g
, W.stack = gZipper g } r'
let hidden = map gLayout (W.integrate $ groups _l) \\ map (gLayout . fst) areas
hidden' <- mapM (flip handleMessage $ SomeMessage Hide) hidden
let placements = concatMap fst results
newL = justMakeNew l mpart' (map snd results ++ hidden')
return $ (placements, newL)
handleMessage l@(Groups _ p _ _) sm | Just (ToEnclosing sm') <- fromMessage sm
= do mp' <- handleMessage p sm'
return $ maybeMakeNew l mp' []
handleMessage l@(Groups _ p gs _) sm | Just (ToAll sm') <- fromMessage sm
= do mp' <- handleMessage p sm'
mg's <- mapZM_ (handle sm') $ Just gs
return $ maybeMakeNew l mp' $ W.integrate' mg's
where handle sm (G l _) = handleMessage l sm
handleMessage l sm | Just a <- fromMessage sm
= let _rightType = a == Hide -- Is there a better-looking way
-- of doing this?
in handleMessage l $ SomeMessage $ ToAll sm
handleMessage l@(Groups _ _ z _) sm = case fromMessage sm of
Just (ToFocused sm') -> do mg's <- W.integrate' <$> handleOnFocused sm' z
return $ maybeMakeNew l Nothing mg's
Just (ToGroup i sm') -> do mg's <- handleOnIndex i sm' z
return $ maybeMakeNew l Nothing mg's
Just (Modify spec) -> case applySpec spec l of
Just l' -> refocus l' >> return (Just l')
Nothing -> return $ Just l
Just Refocus -> refocus l >> return (Just l)
Just _ -> return Nothing
Nothing -> handleMessage l $ SomeMessage (ToFocused sm)
where handleOnFocused sm z = mapZM step $ Just z
where step True (G l _) = handleMessage l sm
step False _ = return Nothing
handleOnIndex i sm z = mapM step $ zip [0..] $ W.integrate z
where step (j, (G l _)) | i == j = handleMessage l sm
step _ = return Nothing
justMakeNew :: Groups l l2 a -> Maybe (l2 (Group l a)) -> [Maybe (WithID l a)]
-> Maybe (Groups l l2 a)
justMakeNew g mpart' ml's = Just g { partitioner = fromMaybe (partitioner g) mpart'
, groups = combine (groups g) ml's }
where combine z ml's = let table = map (\(ID id a) -> (id, a)) $ catMaybes ml's
in flip mapS_ z $ \(G (ID id l) ws) -> case lookup id table of
Nothing -> G (ID id l) ws
Just l' -> G (ID id l') ws
mapS_ f = fromJust . mapZ_ f . Just
maybeMakeNew :: Groups l l2 a -> Maybe (l2 (Group l a)) -> [Maybe (WithID l a)]
-> Maybe (Groups l l2 a)
maybeMakeNew _ Nothing ml's | all isNothing ml's = Nothing
maybeMakeNew g mpart' ml's = justMakeNew g mpart' ml's
refocus :: Groups l l2 Window -> X ()
refocus g = case getFocusZ $ gZipper $ W.focus $ groups g
of Just w -> focus w
Nothing -> return ()
-- ** ModifySpec type
-- | Type of functions describing modifications to a 'Groups' layout. They
-- are transformations on 'Zipper's of groups.
--
-- Things you shouldn't do:
--
-- * Forge new windows (they will be ignored)
--
-- * Duplicate windows (whatever happens is your problem)
--
-- * Remove windows (they will be added again)
--
-- * Duplicate layouts (only one will be kept, the rest will
-- get the base layout)
--
-- Note that 'ModifySpec' is a rank-2 type (indicating that 'ModifySpec's must
-- be polymorphic in the layout type), so if you define functions taking
-- 'ModifySpec's as arguments, or returning them, you'll need to write a type
-- signature and add @{-# LANGUAGE Rank2Types #-}@ at the beginning
type ModifySpec = forall l. WithID l Window
-> Zipper (Group l Window)
-> Zipper (Group l Window)
-- | Apply a ModifySpec.
applySpec :: ModifySpec -> Groups l l2 Window -> Maybe (Groups l l2 Window)
applySpec f g = let (seed', id:ids) = gen $ seed g
g' = flip modifyGroups g $ f (ID id $ baseLayout g)
>>> toTags
>>> foldr reID ((ids, []), [])
>>> snd
>>> fromTags
in case groups g == groups g' of
True -> Nothing
False -> Just g' { seed = seed' }
where reID eg ((id:ids, seen), egs)
= let myID = getID $ gLayout $ fromE eg
in case elem myID seen of
False -> ((id:ids, myID:seen), eg:egs)
True -> ((ids, seen), mapE_ (setID id) eg:egs)
where setID id (G (ID _ _) z) = G (ID id $ baseLayout g) z
reID _ (([], _), _) = undefined -- The list of ids is infinite
-- ** Misc. ModifySpecs
-- | helper
onFocused :: (Zipper Window -> Zipper Window) -> ModifySpec
onFocused f _ gs = onFocusedZ (onZipper f) gs
-- | Swap the focused window with the previous one.
swapUp :: ModifySpec
swapUp = onFocused swapUpZ
-- | Swap the focused window with the next one.
swapDown :: ModifySpec
swapDown = onFocused swapDownZ
-- | Swap the focused window with the (group's) master
-- window.
swapMaster :: ModifySpec
swapMaster = onFocused swapMasterZ
-- | Swap the focused group with the previous one.
swapGroupUp :: ModifySpec
swapGroupUp _ = swapUpZ
-- | Swap the focused group with the next one.
swapGroupDown :: ModifySpec
swapGroupDown _ = swapDownZ
-- | Swap the focused group with the master group.
swapGroupMaster :: ModifySpec
swapGroupMaster _ = swapMasterZ
-- | Move focus to the previous window in the group.
focusUp :: ModifySpec
focusUp = onFocused focusUpZ
-- | Move focus to the next window in the group.
focusDown :: ModifySpec
focusDown = onFocused focusDownZ
-- | Move focus to the group's master window.
focusMaster :: ModifySpec
focusMaster = onFocused focusMasterZ
-- | Move focus to the previous group.
focusGroupUp :: ModifySpec
focusGroupUp _ = focusUpZ
-- | Move focus to the next group.
focusGroupDown :: ModifySpec
focusGroupDown _ = focusDownZ
-- | Move focus to the master group.
focusGroupMaster :: ModifySpec
focusGroupMaster _ = focusMasterZ
-- | helper
_removeFocused :: W.Stack a -> (a, Zipper a)
_removeFocused (W.Stack f (u:up) down) = (f, Just $ W.Stack u up down)
_removeFocused (W.Stack f [] (d:down)) = (f, Just $ W.Stack d [] down)
_removeFocused (W.Stack f [] []) = (f, Nothing)
-- helper
_moveToNewGroup :: WithID l Window -> W.Stack (Group l Window)
-> (Group l Window -> Zipper (Group l Window)
-> Zipper (Group l Window))
-> Zipper (Group l Window)
_moveToNewGroup l0 s insertX | G l (Just f) <- W.focus s
= let (w, f') = _removeFocused f
s' = s { W.focus = G l f' }
in insertX (G l0 $ singletonZ w) $ Just s'
_moveToNewGroup _ s _ = Just s
-- | Move the focused window to a new group before the current one.
moveToNewGroupUp :: ModifySpec
moveToNewGroupUp _ Nothing = Nothing
moveToNewGroupUp l0 (Just s) = _moveToNewGroup l0 s insertUpZ
-- | Move the focused window to a new group after the current one.
moveToNewGroupDown :: ModifySpec
moveToNewGroupDown _ Nothing = Nothing
moveToNewGroupDown l0 (Just s) = _moveToNewGroup l0 s insertDownZ
-- | Move the focused window to the previous group.
-- If 'True', when in the first group, wrap around to the last one.
-- If 'False', create a new group before it.
moveToGroupUp :: Bool -> ModifySpec
moveToGroupUp _ _ Nothing = Nothing
moveToGroupUp False l0 (Just s) = if null (W.up s) then moveToNewGroupUp l0 (Just s)
else moveToGroupUp True l0 (Just s)
moveToGroupUp True _ (Just s@(W.Stack _ [] [])) = Just s
moveToGroupUp True _ (Just s@(W.Stack (G l (Just f)) _ _))
= let (w, f') = _removeFocused f
in onFocusedZ (onZipper $ insertUpZ w) $ focusUpZ $ Just s { W.focus = G l f' }
moveToGroupUp True _ gs = gs
-- | Move the focused window to the next group.
-- If 'True', when in the last group, wrap around to the first one.
-- If 'False', create a new group after it.
moveToGroupDown :: Bool -> ModifySpec
moveToGroupDown _ _ Nothing = Nothing
moveToGroupDown False l0 (Just s) = if null (W.down s) then moveToNewGroupDown l0 (Just s)
else moveToGroupDown True l0 (Just s)
moveToGroupDown True _ (Just s@(W.Stack _ [] [])) = Just s
moveToGroupDown True _ (Just s@(W.Stack (G l (Just f)) _ _))
= let (w, f') = _removeFocused f
in onFocusedZ (onZipper $ insertUpZ w) $ focusDownZ $ Just s { W.focus = G l f' }
moveToGroupDown True _ gs = gs
-- | Split the focused group into two at the position of the focused window (below it,
-- unless it's the last window - in that case, above it).
splitGroup :: ModifySpec
splitGroup _ Nothing = Nothing
splitGroup l0 z@(Just s) | G l (Just ws) <- W.focus s
= case ws of
W.Stack _ [] [] -> z
W.Stack f (u:up) [] -> let g1 = G l $ Just $ W.Stack f [] []
g2 = G l0 $ Just $ W.Stack u up []
in insertDownZ g1 $ onFocusedZ (const g2) z
W.Stack f up (d:down) -> let g1 = G l $ Just $ W.Stack f up []
g2 = G l0 $ Just $ W.Stack d [] down
in insertUpZ g1 $ onFocusedZ (const g2) z
splitGroup _ _ = Nothing
| f1u77y/xmonad-contrib | XMonad/Layout/Groups.hs | bsd-3-clause | 21,445 | 0 | 17 | 7,217 | 5,474 | 2,809 | 2,665 | 280 | 4 |
{-# LANGUAGE TemplateHaskell, FlexibleInstances, UndecidableInstances, DeriveDataTypeable #-}
module Baum.Such.Op where
import Autolib.ToDoc
import Autolib.Reader
import Data.Typeable
import Autolib.Reporter
import System.Random
class ( Eq a, Ord a, Show a, ToDoc a, Reader a, Random a )
=> OpC a
instance ( Eq a, Ord a, Show a, ToDoc a, Reader a, Random a )
=> OpC a
data OpC a => Op a = Insert a | Delete a | Any
deriving ( Eq, Typeable )
$(derives [makeReader, makeToDoc] [''Op])
conforms :: OpC a => Op a -> Op a -> Reporter ()
conforms _ Any = reject $
text "Sie sollen Any durch eine Operation ersetzen."
conforms Any _ = return ()
conforms x y = when ( x /= y ) $ reject $
text "Die Operation" <+> toDoc x <+> text "soll nicht geändert werden."
| florianpilz/autotool | src/Baum/Such/Op.hs | gpl-2.0 | 794 | 0 | 11 | 176 | 292 | 149 | 143 | -1 | -1 |
module Base.Renderable.Centered where
import Data.Abelian
import Graphics.Qt
import Utils
import Base.Types
import Base.Renderable.Common ()
centered :: Renderable r => r -> RenderableInstance
centered = RenderableInstance . Centered . RenderableInstance
data Centered = Centered RenderableInstance
instance Renderable Centered where
label = const "Centered"
render ptr app config parentSize (Centered child) = return $ tuple parentSize $ do
(childSize, action) <- render ptr app config parentSize child
let offset = notLessThanZero $size2position $
fmap (fromIntegral . round . (/ 2)) (parentSize -~ childSize)
translate ptr offset
action
notLessThanZero (Position x y) = Position (max 0 x) (max 0 y)
| changlinli/nikki | src/Base/Renderable/Centered.hs | lgpl-3.0 | 769 | 0 | 16 | 164 | 237 | 122 | 115 | 18 | 1 |
module Snap.Internal.Http.Server.TimeoutManager.Tests
( tests ) where
import Control.Concurrent
import Data.IORef
import Data.Maybe
import System.PosixCompat.Time
import System.Timeout
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit hiding (Test, path)
import qualified Snap.Internal.Http.Server.TimeoutManager as TM
tests :: [Test]
tests = [ testOneTimeout
, testOneTimeoutAfterInactivity
, testCancel
, testTickle ]
testOneTimeout :: Test
testOneTimeout = testCase "timeout/oneTimeout" $ do
mgr <- TM.initialize 3 epochTime
oneTimeout mgr
testOneTimeoutAfterInactivity :: Test
testOneTimeoutAfterInactivity =
testCase "timeout/oneTimeoutAfterInactivity" $ do
mgr <- TM.initialize 3 epochTime
threadDelay $ 7 * seconds
oneTimeout mgr
oneTimeout :: TM.TimeoutManager -> IO ()
oneTimeout mgr = do
mv <- newEmptyMVar
_ <- TM.register (putMVar mv ()) mgr
m <- timeout (6*seconds) $ takeMVar mv
assertBool "timeout fired" $ isJust m
TM.stop mgr
testTickle :: Test
testTickle = testCase "timeout/tickle" $ do
mgr <- TM.initialize 8 epochTime
ref <- newIORef (0 :: Int)
h <- TM.register (writeIORef ref 1) mgr
threadDelay $ 5 * seconds
b0 <- readIORef ref
assertEqual "b0" 0 b0
TM.tickle h 8
threadDelay $ 5 * seconds
b1 <- readIORef ref
assertEqual "b1" 0 b1
threadDelay $ 8 * seconds
b2 <- readIORef ref
assertEqual "b2" 1 b2
TM.stop mgr
testCancel :: Test
testCancel = testCase "timeout/cancel" $ do
mgr <- TM.initialize 3 epochTime
ref <- newIORef (0 :: Int)
h <- TM.register (writeIORef ref 1) mgr
threadDelay $ 1 * seconds
TM.cancel h
threadDelay $ 5 * seconds
b0 <- readIORef ref
assertEqual "b0" 0 b0
TM.stop mgr
seconds :: Int
seconds = (10::Int) ^ (6::Int)
| beni55/snap-server | test/suite/Snap/Internal/Http/Server/TimeoutManager/Tests.hs | bsd-3-clause | 1,964 | 0 | 11 | 517 | 621 | 305 | 316 | 62 | 1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Sandbox
-- Maintainer : [email protected]
-- Portability : portable
--
-- UI for the sandboxing functionality.
-----------------------------------------------------------------------------
module Distribution.Client.Sandbox (
sandboxInit,
sandboxDelete,
sandboxAddSource,
sandboxAddSourceSnapshot,
sandboxDeleteSource,
sandboxListSources,
sandboxHcPkg,
dumpPackageEnvironment,
withSandboxBinDirOnSearchPath,
getSandboxConfigFilePath,
loadConfigOrSandboxConfig,
findSavedDistPref,
initPackageDBIfNeeded,
maybeWithSandboxDirOnSearchPath,
WereDepsReinstalled(..),
reinstallAddSourceDeps,
maybeReinstallAddSourceDeps,
SandboxPackageInfo(..),
maybeWithSandboxPackageInfo,
tryGetIndexFilePath,
sandboxBuildDir,
getInstalledPackagesInSandbox,
updateSandboxConfigFileFlag,
updateInstallDirs,
configPackageDB', configCompilerAux', getPersistOrConfigCompiler
) where
import Distribution.Client.Setup
( SandboxFlags(..), ConfigFlags(..), ConfigExFlags(..), InstallFlags(..)
, GlobalFlags(..), defaultConfigExFlags, defaultInstallFlags
, defaultSandboxLocation, withRepoContext )
import Distribution.Client.Sandbox.Timestamp ( listModifiedDeps
, maybeAddCompilerTimestampRecord
, withAddTimestamps
, removeTimestamps )
import Distribution.Client.Config
( SavedConfig(..), defaultUserInstall, loadConfig )
import Distribution.Client.Dependency ( foldProgress )
import Distribution.Client.IndexUtils ( BuildTreeRefType(..) )
import Distribution.Client.Install ( InstallArgs,
makeInstallContext,
makeInstallPlan,
processInstallPlan )
import Distribution.Utils.NubList ( fromNubList )
import Distribution.Client.Sandbox.PackageEnvironment
( PackageEnvironment(..), PackageEnvironmentType(..)
, createPackageEnvironmentFile, classifyPackageEnvironment
, tryLoadSandboxPackageEnvironmentFile, loadUserConfig
, commentPackageEnvironment, showPackageEnvironmentWithComments
, sandboxPackageEnvironmentFile, userPackageEnvironmentFile
, sandboxPackageDBPath )
import Distribution.Client.Sandbox.Types ( SandboxPackageInfo(..)
, UseSandbox(..) )
import Distribution.Client.SetupWrapper
( SetupScriptOptions(..), defaultSetupScriptOptions )
import Distribution.Client.Types ( PackageLocation(..)
, SourcePackage(..) )
import Distribution.Client.Utils ( inDir, tryCanonicalizePath
, tryFindAddSourcePackageDesc)
import Distribution.PackageDescription.Configuration
( flattenPackageDescription )
import Distribution.PackageDescription.Parse ( readPackageDescription )
import Distribution.Simple.Compiler ( Compiler(..), PackageDB(..)
, PackageDBStack )
import Distribution.Simple.Configure ( configCompilerAuxEx
, interpretPackageDbFlags
, getPackageDBContents
, maybeGetPersistBuildConfig
, findDistPrefOrDefault
, findDistPref )
import qualified Distribution.Simple.LocalBuildInfo as LocalBuildInfo
import Distribution.Simple.PreProcess ( knownSuffixHandlers )
import Distribution.Simple.Program ( ProgramConfiguration )
import Distribution.Simple.Setup ( Flag(..), HaddockFlags(..)
, fromFlagOrDefault, flagToMaybe )
import Distribution.Simple.SrcDist ( prepareTree )
import Distribution.Simple.Utils ( die, debug, notice, info, warn
, debugNoWrap, defaultPackageDesc
, intercalate, topHandlerWith
, createDirectoryIfMissingVerbose )
import Distribution.Package ( Package(..) )
import Distribution.System ( Platform )
import Distribution.Text ( display )
import Distribution.Verbosity ( Verbosity, lessVerbose )
import Distribution.Compat.Environment ( lookupEnv, setEnv )
import Distribution.Client.Compat.FilePerms ( setFileHidden )
import qualified Distribution.Client.Sandbox.Index as Index
import Distribution.Simple.PackageIndex ( InstalledPackageIndex )
import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex
import qualified Distribution.Simple.Register as Register
import qualified Data.Map as M
import qualified Data.Set as S
import Data.Either (partitionEithers)
import Control.Exception ( assert, bracket_ )
import Control.Monad ( forM, liftM, liftM2, unless, when )
import Data.Bits ( shiftL, shiftR, xor )
import Data.Char ( ord )
import Data.IORef ( newIORef, writeIORef, readIORef )
import Data.List ( delete
, foldl'
, intersperse
, isPrefixOf
, groupBy )
import Data.Maybe ( fromJust )
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid ( mempty, mappend )
#endif
import Data.Word ( Word32 )
import Numeric ( showHex )
import System.Directory ( canonicalizePath
, createDirectory
, doesDirectoryExist
, doesFileExist
, getCurrentDirectory
, removeDirectoryRecursive
, removeFile
, renameDirectory )
import System.FilePath ( (</>), equalFilePath
, getSearchPath
, searchPathSeparator
, splitSearchPath
, takeDirectory )
--
-- * Constants
--
-- | The name of the sandbox subdirectory where we keep snapshots of add-source
-- dependencies.
snapshotDirectoryName :: FilePath
snapshotDirectoryName = "snapshots"
-- | Non-standard build dir that is used for building add-source deps instead of
-- "dist". Fixes surprising behaviour in some cases (see issue #1281).
sandboxBuildDir :: FilePath -> FilePath
sandboxBuildDir sandboxDir = "dist/dist-sandbox-" ++ showHex sandboxDirHash ""
where
sandboxDirHash = jenkins sandboxDir
-- See http://en.wikipedia.org/wiki/Jenkins_hash_function
jenkins :: String -> Word32
jenkins str = loop_finish $ foldl' loop 0 str
where
loop :: Word32 -> Char -> Word32
loop hash key_i' = hash'''
where
key_i = toEnum . ord $ key_i'
hash' = hash + key_i
hash'' = hash' + (shiftL hash' 10)
hash''' = hash'' `xor` (shiftR hash'' 6)
loop_finish :: Word32 -> Word32
loop_finish hash = hash'''
where
hash' = hash + (shiftL hash 3)
hash'' = hash' `xor` (shiftR hash' 11)
hash''' = hash'' + (shiftL hash'' 15)
--
-- * Basic sandbox functions.
--
-- | If @--sandbox-config-file@ wasn't given on the command-line, set it to the
-- value of the @CABAL_SANDBOX_CONFIG@ environment variable, or else to
-- 'NoFlag'.
updateSandboxConfigFileFlag :: GlobalFlags -> IO GlobalFlags
updateSandboxConfigFileFlag globalFlags =
case globalSandboxConfigFile globalFlags of
Flag _ -> return globalFlags
NoFlag -> do
f' <- fmap (maybe NoFlag Flag) . lookupEnv $ "CABAL_SANDBOX_CONFIG"
return globalFlags { globalSandboxConfigFile = f' }
-- | Return the path to the sandbox config file - either the default or the one
-- specified with @--sandbox-config-file@.
getSandboxConfigFilePath :: GlobalFlags -> IO FilePath
getSandboxConfigFilePath globalFlags = do
let sandboxConfigFileFlag = globalSandboxConfigFile globalFlags
case sandboxConfigFileFlag of
NoFlag -> do pkgEnvDir <- getCurrentDirectory
return (pkgEnvDir </> sandboxPackageEnvironmentFile)
Flag path -> return path
-- | Load the @cabal.sandbox.config@ file (and possibly the optional
-- @cabal.config@). In addition to a @PackageEnvironment@, also return a
-- canonical path to the sandbox. Exit with error if the sandbox directory or
-- the package environment file do not exist.
tryLoadSandboxConfig :: Verbosity -> GlobalFlags
-> IO (FilePath, PackageEnvironment)
tryLoadSandboxConfig verbosity globalFlags = do
path <- getSandboxConfigFilePath globalFlags
tryLoadSandboxPackageEnvironmentFile verbosity path
(globalConfigFile globalFlags)
-- | Return the name of the package index file for this package environment.
tryGetIndexFilePath :: SavedConfig -> IO FilePath
tryGetIndexFilePath config = tryGetIndexFilePath' (savedGlobalFlags config)
-- | The same as 'tryGetIndexFilePath', but takes 'GlobalFlags' instead of
-- 'SavedConfig'.
tryGetIndexFilePath' :: GlobalFlags -> IO FilePath
tryGetIndexFilePath' globalFlags = do
let paths = fromNubList $ globalLocalRepos globalFlags
case paths of
[] -> die $ "Distribution.Client.Sandbox.tryGetIndexFilePath: " ++
"no local repos found. " ++ checkConfiguration
_ -> return $ (last paths) </> Index.defaultIndexFileName
where
checkConfiguration = "Please check your configuration ('"
++ userPackageEnvironmentFile ++ "')."
-- | Try to extract a 'PackageDB' from 'ConfigFlags'. Gives a better error
-- message than just pattern-matching.
getSandboxPackageDB :: ConfigFlags -> IO PackageDB
getSandboxPackageDB configFlags = do
case configPackageDBs configFlags of
[Just sandboxDB@(SpecificPackageDB _)] -> return sandboxDB
-- TODO: should we allow multiple package DBs (e.g. with 'inherit')?
[] ->
die $ "Sandbox package DB is not specified. " ++ sandboxConfigCorrupt
[_] ->
die $ "Unexpected contents of the 'package-db' field. "
++ sandboxConfigCorrupt
_ ->
die $ "Too many package DBs provided. " ++ sandboxConfigCorrupt
where
sandboxConfigCorrupt = "Your 'cabal.sandbox.config' is probably corrupt."
-- | Which packages are installed in the sandbox package DB?
getInstalledPackagesInSandbox :: Verbosity -> ConfigFlags
-> Compiler -> ProgramConfiguration
-> IO InstalledPackageIndex
getInstalledPackagesInSandbox verbosity configFlags comp conf = do
sandboxDB <- getSandboxPackageDB configFlags
getPackageDBContents verbosity comp sandboxDB conf
-- | Temporarily add $SANDBOX_DIR/bin to $PATH.
withSandboxBinDirOnSearchPath :: FilePath -> IO a -> IO a
withSandboxBinDirOnSearchPath sandboxDir = bracket_ addBinDir rmBinDir
where
-- TODO: Instead of modifying the global process state, it'd be better to
-- set the environment individually for each subprocess invocation. This
-- will have to wait until the Shell monad is implemented; without it the
-- required changes are too intrusive.
addBinDir :: IO ()
addBinDir = do
mbOldPath <- lookupEnv "PATH"
let newPath = maybe sandboxBin ((++) sandboxBin . (:) searchPathSeparator)
mbOldPath
setEnv "PATH" newPath
rmBinDir :: IO ()
rmBinDir = do
oldPath <- getSearchPath
let newPath = intercalate [searchPathSeparator]
(delete sandboxBin oldPath)
setEnv "PATH" newPath
sandboxBin = sandboxDir </> "bin"
-- | Initialise a package DB for this compiler if it doesn't exist.
initPackageDBIfNeeded :: Verbosity -> ConfigFlags
-> Compiler -> ProgramConfiguration
-> IO ()
initPackageDBIfNeeded verbosity configFlags comp conf = do
SpecificPackageDB dbPath <- getSandboxPackageDB configFlags
packageDBExists <- doesDirectoryExist dbPath
unless packageDBExists $
Register.initPackageDB verbosity comp conf dbPath
when packageDBExists $
debug verbosity $ "The package database already exists: " ++ dbPath
-- | Entry point for the 'cabal sandbox dump-pkgenv' command.
dumpPackageEnvironment :: Verbosity -> SandboxFlags -> GlobalFlags -> IO ()
dumpPackageEnvironment verbosity _sandboxFlags globalFlags = do
(sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags
commentPkgEnv <- commentPackageEnvironment sandboxDir
putStrLn . showPackageEnvironmentWithComments (Just commentPkgEnv) $ pkgEnv
-- | Entry point for the 'cabal sandbox init' command.
sandboxInit :: Verbosity -> SandboxFlags -> GlobalFlags -> IO ()
sandboxInit verbosity sandboxFlags globalFlags = do
-- Warn if there's a 'cabal-dev' sandbox.
isCabalDevSandbox <- liftM2 (&&) (doesDirectoryExist "cabal-dev")
(doesFileExist $ "cabal-dev" </> "cabal.config")
when isCabalDevSandbox $
warn verbosity $
"You are apparently using a legacy (cabal-dev) sandbox. "
++ "Legacy sandboxes may interact badly with native Cabal sandboxes. "
++ "You may want to delete the 'cabal-dev' directory to prevent issues."
-- Create the sandbox directory.
let sandboxDir' = fromFlagOrDefault defaultSandboxLocation
(sandboxLocation sandboxFlags)
createDirectoryIfMissingVerbose verbosity True sandboxDir'
sandboxDir <- tryCanonicalizePath sandboxDir'
setFileHidden sandboxDir
-- Determine which compiler to use (using the value from ~/.cabal/config).
userConfig <- loadConfig verbosity (globalConfigFile globalFlags)
(comp, platform, conf) <- configCompilerAuxEx (savedConfigureFlags userConfig)
-- Create the package environment file.
pkgEnvFile <- getSandboxConfigFilePath globalFlags
createPackageEnvironmentFile verbosity sandboxDir pkgEnvFile comp platform
(_sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags
let config = pkgEnvSavedConfig pkgEnv
configFlags = savedConfigureFlags config
-- Create the index file if it doesn't exist.
indexFile <- tryGetIndexFilePath config
indexFileExists <- doesFileExist indexFile
if indexFileExists
then notice verbosity $ "Using an existing sandbox located at " ++ sandboxDir
else notice verbosity $ "Creating a new sandbox at " ++ sandboxDir
Index.createEmpty verbosity indexFile
-- Create the package DB for the default compiler.
initPackageDBIfNeeded verbosity configFlags comp conf
maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile
(compilerId comp) platform
-- | Entry point for the 'cabal sandbox delete' command.
sandboxDelete :: Verbosity -> SandboxFlags -> GlobalFlags -> IO ()
sandboxDelete verbosity _sandboxFlags globalFlags = do
(useSandbox, _) <- loadConfigOrSandboxConfig
verbosity
globalFlags { globalRequireSandbox = Flag False }
case useSandbox of
NoSandbox -> warn verbosity "Not in a sandbox."
UseSandbox sandboxDir -> do
curDir <- getCurrentDirectory
pkgEnvFile <- getSandboxConfigFilePath globalFlags
-- Remove the @cabal.sandbox.config@ file, unless it's in a non-standard
-- location.
let isNonDefaultConfigLocation = not $ equalFilePath pkgEnvFile $
curDir </> sandboxPackageEnvironmentFile
if isNonDefaultConfigLocation
then warn verbosity $ "Sandbox config file is in non-default location: '"
++ pkgEnvFile ++ "'.\n Please delete manually."
else removeFile pkgEnvFile
-- Remove the sandbox directory, unless we're using a shared sandbox.
let isNonDefaultSandboxLocation = not $ equalFilePath sandboxDir $
curDir </> defaultSandboxLocation
when isNonDefaultSandboxLocation $
die $ "Non-default sandbox location used: '" ++ sandboxDir
++ "'.\nAssuming a shared sandbox. Please delete '"
++ sandboxDir ++ "' manually."
absSandboxDir <- canonicalizePath sandboxDir
notice verbosity $ "Deleting the sandbox located at " ++ absSandboxDir
removeDirectoryRecursive absSandboxDir
let
pathInsideSandbox = isPrefixOf absSandboxDir
-- Warn the user if deleting the sandbox deleted a package database
-- referenced in the current environment.
checkPackagePaths var = do
let
checkPath path = do
absPath <- canonicalizePath path
(when (pathInsideSandbox absPath) . warn verbosity)
(var ++ " refers to package database " ++ path
++ " inside the deleted sandbox.")
liftM (maybe [] splitSearchPath) (lookupEnv var) >>= mapM_ checkPath
checkPackagePaths "CABAL_SANDBOX_PACKAGE_PATH"
checkPackagePaths "GHC_PACKAGE_PATH"
checkPackagePaths "GHCJS_PACKAGE_PATH"
-- Common implementation of 'sandboxAddSource' and 'sandboxAddSourceSnapshot'.
doAddSource :: Verbosity -> [FilePath] -> FilePath -> PackageEnvironment
-> BuildTreeRefType
-> IO ()
doAddSource verbosity buildTreeRefs sandboxDir pkgEnv refType = do
let savedConfig = pkgEnvSavedConfig pkgEnv
indexFile <- tryGetIndexFilePath savedConfig
-- If we're running 'sandbox add-source' for the first time for this compiler,
-- we need to create an initial timestamp record.
(comp, platform, _) <- configCompilerAuxEx . savedConfigureFlags $ savedConfig
maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile
(compilerId comp) platform
withAddTimestamps sandboxDir $ do
-- Path canonicalisation is done in addBuildTreeRefs, but we do it
-- twice because of the timestamps file.
buildTreeRefs' <- mapM tryCanonicalizePath buildTreeRefs
Index.addBuildTreeRefs verbosity indexFile buildTreeRefs' refType
return buildTreeRefs'
-- | Entry point for the 'cabal sandbox add-source' command.
sandboxAddSource :: Verbosity -> [FilePath] -> SandboxFlags -> GlobalFlags
-> IO ()
sandboxAddSource verbosity buildTreeRefs sandboxFlags globalFlags = do
(sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags
if fromFlagOrDefault False (sandboxSnapshot sandboxFlags)
then sandboxAddSourceSnapshot verbosity buildTreeRefs sandboxDir pkgEnv
else doAddSource verbosity buildTreeRefs sandboxDir pkgEnv LinkRef
-- | Entry point for the 'cabal sandbox add-source --snapshot' command.
sandboxAddSourceSnapshot :: Verbosity -> [FilePath] -> FilePath
-> PackageEnvironment
-> IO ()
sandboxAddSourceSnapshot verbosity buildTreeRefs sandboxDir pkgEnv = do
let snapshotDir = sandboxDir </> snapshotDirectoryName
-- Use 'D.S.SrcDist.prepareTree' to copy each package's files to our private
-- location.
createDirectoryIfMissingVerbose verbosity True snapshotDir
-- Collect the package descriptions first, so that if some path does not refer
-- to a cabal package, we fail immediately.
pkgs <- forM buildTreeRefs $ \buildTreeRef ->
inDir (Just buildTreeRef) $
return . flattenPackageDescription
=<< readPackageDescription verbosity
=<< defaultPackageDesc verbosity
-- Copy the package sources to "snapshots/$PKGNAME-$VERSION-tmp". If
-- 'prepareTree' throws an error at any point, the old snapshots will still be
-- in consistent state.
tmpDirs <- forM (zip buildTreeRefs pkgs) $ \(buildTreeRef, pkg) ->
inDir (Just buildTreeRef) $ do
let targetDir = snapshotDir </> (display . packageId $ pkg)
targetTmpDir = targetDir ++ "-tmp"
dirExists <- doesDirectoryExist targetTmpDir
when dirExists $
removeDirectoryRecursive targetDir
createDirectory targetTmpDir
prepareTree verbosity pkg Nothing targetTmpDir knownSuffixHandlers
return (targetTmpDir, targetDir)
-- Now rename the "snapshots/$PKGNAME-$VERSION-tmp" dirs to
-- "snapshots/$PKGNAME-$VERSION".
snapshots <- forM tmpDirs $ \(targetTmpDir, targetDir) -> do
dirExists <- doesDirectoryExist targetDir
when dirExists $
removeDirectoryRecursive targetDir
renameDirectory targetTmpDir targetDir
return targetDir
-- Once the packages are copied, just 'add-source' them as usual.
doAddSource verbosity snapshots sandboxDir pkgEnv SnapshotRef
-- | Entry point for the 'cabal sandbox delete-source' command.
sandboxDeleteSource :: Verbosity -> [FilePath] -> SandboxFlags -> GlobalFlags
-> IO ()
sandboxDeleteSource verbosity buildTreeRefs _sandboxFlags globalFlags = do
(sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags
indexFile <- tryGetIndexFilePath (pkgEnvSavedConfig pkgEnv)
(results, convDict) <-
Index.removeBuildTreeRefs verbosity indexFile buildTreeRefs
let (failedPaths, removedPaths) = partitionEithers results
removedRefs = fmap convDict removedPaths
unless (null removedPaths) $ do
removeTimestamps sandboxDir removedPaths
notice verbosity $ "Success deleting sources: " ++
showL removedRefs ++ "\n\n"
unless (null failedPaths) $ do
let groupedFailures = groupBy errorType failedPaths
mapM_ handleErrors groupedFailures
die $ "The sources with the above errors were skipped. (" ++
showL (fmap getPath failedPaths) ++ ")"
notice verbosity $ "Note: 'sandbox delete-source' only unregisters the " ++
"source dependency, but does not remove the package " ++
"from the sandbox package DB.\n\n" ++
"Use 'sandbox hc-pkg -- unregister' to do that."
where
getPath (Index.ErrNonregisteredSource p) = p
getPath (Index.ErrNonexistentSource p) = p
showPaths f = concat . intersperse " " . fmap (show . f)
showL = showPaths id
showE [] = return ' '
showE errs = showPaths getPath errs
errorType Index.ErrNonregisteredSource{} Index.ErrNonregisteredSource{} =
True
errorType Index.ErrNonexistentSource{} Index.ErrNonexistentSource{} = True
errorType _ _ = False
handleErrors [] = return ()
handleErrors errs@(Index.ErrNonregisteredSource{}:_) =
warn verbosity ("Sources not registered: " ++ showE errs ++ "\n\n")
handleErrors errs@(Index.ErrNonexistentSource{}:_) =
warn verbosity
("Source directory not found for paths: " ++ showE errs ++ "\n"
++ "If you are trying to delete a reference to a removed directory, "
++ "please provide the full absolute path "
++ "(as given by `sandbox list-sources`).\n\n")
-- | Entry point for the 'cabal sandbox list-sources' command.
sandboxListSources :: Verbosity -> SandboxFlags -> GlobalFlags
-> IO ()
sandboxListSources verbosity _sandboxFlags globalFlags = do
(sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags
indexFile <- tryGetIndexFilePath (pkgEnvSavedConfig pkgEnv)
refs <- Index.listBuildTreeRefs verbosity
Index.ListIgnored Index.LinksAndSnapshots indexFile
when (null refs) $
notice verbosity $ "Index file '" ++ indexFile
++ "' has no references to local build trees."
when (not . null $ refs) $ do
notice verbosity $ "Source dependencies registered "
++ "in the current sandbox ('" ++ sandboxDir ++ "'):\n\n"
mapM_ putStrLn refs
notice verbosity $ "\nTo unregister source dependencies, "
++ "use the 'sandbox delete-source' command."
-- | Entry point for the 'cabal sandbox hc-pkg' command. Invokes the @hc-pkg@
-- tool with provided arguments, restricted to the sandbox.
sandboxHcPkg :: Verbosity -> SandboxFlags -> GlobalFlags -> [String] -> IO ()
sandboxHcPkg verbosity _sandboxFlags globalFlags extraArgs = do
(sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags
let configFlags = savedConfigureFlags . pkgEnvSavedConfig $ pkgEnv
-- Invoke hc-pkg for the most recently configured compiler (if any),
-- using the right package-db for the compiler (see #1935).
(comp, platform, conf) <- getPersistOrConfigCompiler configFlags
let dir = sandboxPackageDBPath sandboxDir comp platform
dbStack = [GlobalPackageDB, SpecificPackageDB dir]
Register.invokeHcPkg verbosity comp conf dbStack extraArgs
updateInstallDirs :: Flag Bool
-> (UseSandbox, SavedConfig) -> (UseSandbox, SavedConfig)
updateInstallDirs userInstallFlag (useSandbox, savedConfig) =
case useSandbox of
NoSandbox ->
let savedConfig' = savedConfig {
savedConfigureFlags = configureFlags {
configInstallDirs = installDirs
}
}
in (useSandbox, savedConfig')
_ -> (useSandbox, savedConfig)
where
configureFlags = savedConfigureFlags savedConfig
userInstallDirs = savedUserInstallDirs savedConfig
globalInstallDirs = savedGlobalInstallDirs savedConfig
installDirs | userInstall = userInstallDirs
| otherwise = globalInstallDirs
userInstall = fromFlagOrDefault defaultUserInstall
(configUserInstall configureFlags `mappend` userInstallFlag)
-- | Check which type of package environment we're in and return a
-- correctly-initialised @SavedConfig@ and a @UseSandbox@ value that indicates
-- whether we're working in a sandbox.
loadConfigOrSandboxConfig :: Verbosity
-> GlobalFlags -- ^ For @--config-file@ and
-- @--sandbox-config-file@.
-> IO (UseSandbox, SavedConfig)
loadConfigOrSandboxConfig verbosity globalFlags = do
let configFileFlag = globalConfigFile globalFlags
sandboxConfigFileFlag = globalSandboxConfigFile globalFlags
ignoreSandboxFlag = globalIgnoreSandbox globalFlags
pkgEnvDir <- getPkgEnvDir sandboxConfigFileFlag
pkgEnvType <- classifyPackageEnvironment pkgEnvDir sandboxConfigFileFlag
ignoreSandboxFlag
case pkgEnvType of
-- A @cabal.sandbox.config@ file (and possibly @cabal.config@) is present.
SandboxPackageEnvironment -> do
(sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags
-- ^ Prints an error message and exits on error.
let config = pkgEnvSavedConfig pkgEnv
return (UseSandbox sandboxDir, config)
-- Only @cabal.config@ is present.
UserPackageEnvironment -> do
config <- loadConfig verbosity configFileFlag
userConfig <- loadUserConfig verbosity pkgEnvDir Nothing
let config' = config `mappend` userConfig
dieIfSandboxRequired config'
return (NoSandbox, config')
-- Neither @cabal.sandbox.config@ nor @cabal.config@ are present.
AmbientPackageEnvironment -> do
config <- loadConfig verbosity configFileFlag
let globalConstraintsOpt =
flagToMaybe . globalConstraintsFile . savedGlobalFlags $ config
globalConstraintConfig <-
loadUserConfig verbosity pkgEnvDir globalConstraintsOpt
let config' = config `mappend` globalConstraintConfig
dieIfSandboxRequired config
return (NoSandbox, config')
where
-- Return the path to the package environment directory - either the
-- current directory or the one that @--sandbox-config-file@ resides in.
getPkgEnvDir :: (Flag FilePath) -> IO FilePath
getPkgEnvDir sandboxConfigFileFlag = do
case sandboxConfigFileFlag of
NoFlag -> getCurrentDirectory
Flag path -> tryCanonicalizePath . takeDirectory $ path
-- Die if @--require-sandbox@ was specified and we're not inside a sandbox.
dieIfSandboxRequired :: SavedConfig -> IO ()
dieIfSandboxRequired config = checkFlag flag
where
flag = (globalRequireSandbox . savedGlobalFlags $ config)
`mappend` (globalRequireSandbox globalFlags)
checkFlag (Flag True) =
die $ "'require-sandbox' is set to True, but no sandbox is present. "
++ "Use '--no-require-sandbox' if you want to override "
++ "'require-sandbox' temporarily."
checkFlag (Flag False) = return ()
checkFlag (NoFlag) = return ()
-- | Return the saved \"dist/\" prefix, or the default prefix.
findSavedDistPref :: SavedConfig -> Flag FilePath -> IO FilePath
findSavedDistPref config flagDistPref = do
let defDistPref = useDistPref defaultSetupScriptOptions
flagDistPref' = configDistPref (savedConfigureFlags config)
`mappend` flagDistPref
findDistPref defDistPref flagDistPref'
-- | If we're in a sandbox, call @withSandboxBinDirOnSearchPath@, otherwise do
-- nothing.
maybeWithSandboxDirOnSearchPath :: UseSandbox -> IO a -> IO a
maybeWithSandboxDirOnSearchPath NoSandbox act = act
maybeWithSandboxDirOnSearchPath (UseSandbox sandboxDir) act =
withSandboxBinDirOnSearchPath sandboxDir $ act
-- | Had reinstallAddSourceDeps actually reinstalled any dependencies?
data WereDepsReinstalled = ReinstalledSomeDeps | NoDepsReinstalled
-- | Reinstall those add-source dependencies that have been modified since
-- we've last installed them. Assumes that we're working inside a sandbox.
reinstallAddSourceDeps :: Verbosity
-> ConfigFlags -> ConfigExFlags
-> InstallFlags -> GlobalFlags
-> FilePath
-> IO WereDepsReinstalled
reinstallAddSourceDeps verbosity configFlags' configExFlags
installFlags globalFlags sandboxDir = topHandler' $ do
let sandboxDistPref = sandboxBuildDir sandboxDir
configFlags = configFlags'
{ configDistPref = Flag sandboxDistPref }
haddockFlags = mempty
{ haddockDistPref = Flag sandboxDistPref }
(comp, platform, conf) <- configCompilerAux' configFlags
retVal <- newIORef NoDepsReinstalled
withSandboxPackageInfo verbosity configFlags globalFlags
comp platform conf sandboxDir $ \sandboxPkgInfo ->
unless (null $ modifiedAddSourceDependencies sandboxPkgInfo) $ do
withRepoContext verbosity globalFlags $ \repoContext -> do
let args :: InstallArgs
args = ((configPackageDB' configFlags)
,repoContext
,comp, platform, conf
,UseSandbox sandboxDir, Just sandboxPkgInfo
,globalFlags, configFlags, configExFlags, installFlags
,haddockFlags)
-- This can actually be replaced by a call to 'install', but we use a
-- lower-level API because of layer separation reasons. Additionally, we
-- might want to use some lower-level features this in the future.
withSandboxBinDirOnSearchPath sandboxDir $ do
installContext <- makeInstallContext verbosity args Nothing
installPlan <- foldProgress logMsg die' return =<<
makeInstallPlan verbosity args installContext
processInstallPlan verbosity args installContext installPlan
writeIORef retVal ReinstalledSomeDeps
readIORef retVal
where
die' message = die (message ++ installFailedInSandbox)
-- TODO: use a better error message, remove duplication.
installFailedInSandbox =
"Note: when using a sandbox, all packages are required to have "
++ "consistent dependencies. Try reinstalling/unregistering the "
++ "offending packages or recreating the sandbox."
logMsg message rest = debugNoWrap verbosity message >> rest
topHandler' = topHandlerWith $ \_ -> do
warn verbosity "Couldn't reinstall some add-source dependencies."
-- Here we can't know whether any deps have been reinstalled, so we have
-- to be conservative.
return ReinstalledSomeDeps
-- | Produce a 'SandboxPackageInfo' and feed it to the given action. Note that
-- we don't update the timestamp file here - this is done in
-- 'postInstallActions'.
withSandboxPackageInfo :: Verbosity -> ConfigFlags -> GlobalFlags
-> Compiler -> Platform -> ProgramConfiguration
-> FilePath
-> (SandboxPackageInfo -> IO ())
-> IO ()
withSandboxPackageInfo verbosity configFlags globalFlags
comp platform conf sandboxDir cont = do
-- List all add-source deps.
indexFile <- tryGetIndexFilePath' globalFlags
buildTreeRefs <- Index.listBuildTreeRefs verbosity
Index.DontListIgnored Index.OnlyLinks indexFile
let allAddSourceDepsSet = S.fromList buildTreeRefs
-- List all packages installed in the sandbox.
installedPkgIndex <- getInstalledPackagesInSandbox verbosity
configFlags comp conf
let err = "Error reading sandbox package information."
-- Get the package descriptions for all add-source deps.
depsCabalFiles <- mapM (flip tryFindAddSourcePackageDesc err) buildTreeRefs
depsPkgDescs <- mapM (readPackageDescription verbosity) depsCabalFiles
let depsMap = M.fromList (zip buildTreeRefs depsPkgDescs)
isInstalled pkgid = not . null
. InstalledPackageIndex.lookupSourcePackageId installedPkgIndex $ pkgid
installedDepsMap = M.filter (isInstalled . packageId) depsMap
-- Get the package ids of modified (and installed) add-source deps.
modifiedAddSourceDeps <- listModifiedDeps verbosity sandboxDir
(compilerId comp) platform installedDepsMap
-- 'fromJust' here is safe because 'modifiedAddSourceDeps' are guaranteed to
-- be a subset of the keys of 'depsMap'.
let modifiedDeps = [ (modDepPath, fromJust $ M.lookup modDepPath depsMap)
| modDepPath <- modifiedAddSourceDeps ]
modifiedDepsMap = M.fromList modifiedDeps
assert (all (`S.member` allAddSourceDepsSet) modifiedAddSourceDeps) (return ())
if (null modifiedDeps)
then info verbosity $ "Found no modified add-source deps."
else notice verbosity $ "Some add-source dependencies have been modified. "
++ "They will be reinstalled..."
-- Get the package ids of the remaining add-source deps (some are possibly not
-- installed).
let otherDeps = M.assocs (depsMap `M.difference` modifiedDepsMap)
-- Finally, assemble a 'SandboxPackageInfo'.
cont $ SandboxPackageInfo (map toSourcePackage modifiedDeps)
(map toSourcePackage otherDeps) installedPkgIndex allAddSourceDepsSet
where
toSourcePackage (path, pkgDesc) = SourcePackage
(packageId pkgDesc) pkgDesc (LocalUnpackedPackage path) Nothing
-- | Same as 'withSandboxPackageInfo' if we're inside a sandbox and the
-- identity otherwise.
maybeWithSandboxPackageInfo :: Verbosity -> ConfigFlags -> GlobalFlags
-> Compiler -> Platform -> ProgramConfiguration
-> UseSandbox
-> (Maybe SandboxPackageInfo -> IO ())
-> IO ()
maybeWithSandboxPackageInfo verbosity configFlags globalFlags
comp platform conf useSandbox cont =
case useSandbox of
NoSandbox -> cont Nothing
UseSandbox sandboxDir -> withSandboxPackageInfo verbosity
configFlags globalFlags
comp platform conf sandboxDir
(\spi -> cont (Just spi))
-- | Check if a sandbox is present and call @reinstallAddSourceDeps@ in that
-- case.
maybeReinstallAddSourceDeps :: Verbosity
-> Flag (Maybe Int) -- ^ The '-j' flag
-> ConfigFlags -- ^ Saved configure flags
-- (from dist/setup-config)
-> GlobalFlags
-> (UseSandbox, SavedConfig)
-> IO WereDepsReinstalled
maybeReinstallAddSourceDeps verbosity numJobsFlag configFlags'
globalFlags' (useSandbox, config) = do
case useSandbox of
NoSandbox -> return NoDepsReinstalled
UseSandbox sandboxDir -> do
-- Reinstall the modified add-source deps.
let configFlags = savedConfigureFlags config
`mappendSomeSavedFlags`
configFlags'
configExFlags = defaultConfigExFlags
`mappend` savedConfigureExFlags config
installFlags' = defaultInstallFlags
`mappend` savedInstallFlags config
installFlags = installFlags' {
installNumJobs = installNumJobs installFlags'
`mappend` numJobsFlag
}
globalFlags = savedGlobalFlags config
-- This makes it possible to override things like 'remote-repo-cache'
-- from the command line. These options are hidden, and are only
-- useful for debugging, so this should be fine.
`mappend` globalFlags'
reinstallAddSourceDeps
verbosity configFlags configExFlags
installFlags globalFlags sandboxDir
where
-- NOTE: we can't simply do @sandboxConfigFlags `mappend` savedFlags@
-- because we don't want to auto-enable things like 'library-profiling' for
-- all add-source dependencies even if the user has passed
-- '--enable-library-profiling' to 'cabal configure'. These options are
-- supposed to be set in 'cabal.config'.
mappendSomeSavedFlags :: ConfigFlags -> ConfigFlags -> ConfigFlags
mappendSomeSavedFlags sandboxConfigFlags savedFlags =
sandboxConfigFlags {
configHcFlavor = configHcFlavor sandboxConfigFlags
`mappend` configHcFlavor savedFlags,
configHcPath = configHcPath sandboxConfigFlags
`mappend` configHcPath savedFlags,
configHcPkg = configHcPkg sandboxConfigFlags
`mappend` configHcPkg savedFlags,
configProgramPaths = configProgramPaths sandboxConfigFlags
`mappend` configProgramPaths savedFlags,
configProgramArgs = configProgramArgs sandboxConfigFlags
`mappend` configProgramArgs savedFlags,
-- NOTE: Unconditionally choosing the value from
-- 'dist/setup-config'. Sandbox package DB location may have been
-- changed by 'configure -w'.
configPackageDBs = configPackageDBs savedFlags
-- FIXME: Is this compatible with the 'inherit' feature?
}
--
-- Utils (transitionary)
--
-- FIXME: configPackageDB' and configCompilerAux' don't really belong in this
-- module
--
configPackageDB' :: ConfigFlags -> PackageDBStack
configPackageDB' cfg =
interpretPackageDbFlags userInstall (configPackageDBs cfg)
where
userInstall = fromFlagOrDefault True (configUserInstall cfg)
configCompilerAux' :: ConfigFlags
-> IO (Compiler, Platform, ProgramConfiguration)
configCompilerAux' configFlags =
configCompilerAuxEx configFlags
--FIXME: make configCompilerAux use a sensible verbosity
{ configVerbosity = fmap lessVerbose (configVerbosity configFlags) }
-- | Try to read the most recently configured compiler from the
-- 'localBuildInfoFile', falling back on 'configCompilerAuxEx' if it
-- cannot be read.
getPersistOrConfigCompiler :: ConfigFlags
-> IO (Compiler, Platform, ProgramConfiguration)
getPersistOrConfigCompiler configFlags = do
distPref <- findDistPrefOrDefault (configDistPref configFlags)
mlbi <- maybeGetPersistBuildConfig distPref
case mlbi of
Nothing -> do configCompilerAux' configFlags
Just lbi -> return ( LocalBuildInfo.compiler lbi
, LocalBuildInfo.hostPlatform lbi
, LocalBuildInfo.withPrograms lbi
)
| tolysz/prepare-ghcjs | spec-lts8/cabal/cabal-install/Distribution/Client/Sandbox.hs | bsd-3-clause | 41,442 | 0 | 27 | 11,417 | 6,605 | 3,432 | 3,173 | 619 | 7 |
module PackageTests.EmptyLib.Check where
import PackageTests.PackageTester
import System.FilePath
import Test.Tasty.HUnit
-- See https://github.com/haskell/cabal/issues/1241
emptyLib :: FilePath -> Assertion
emptyLib ghcPath = do
let spec = PackageSpec
{ directory = "PackageTests" </> "EmptyLib" </> "empty"
, configOpts = []
, distPref = Nothing
}
result <- cabal_build spec ghcPath
assertBuildSucceeded result
| Helkafen/cabal | Cabal/tests/PackageTests/EmptyLib/Check.hs | bsd-3-clause | 464 | 0 | 13 | 100 | 98 | 54 | 44 | 12 | 1 |
{- $Id: AFRPTestsDelay.hs,v 1.2 2003/11/10 21:28:58 antony Exp $
******************************************************************************
* A F R P *
* *
* Module: AFRPTestsPre *
* Purpose: Test cases for pre and (derived) combinators *
* that (semantically) involves a pre. *
* Authors: Antony Courtney and Henrik Nilsson *
* *
* Copyright (c) Yale University, 2003 *
* Copyright (c) University of Nottingham, 2005 *
* *
******************************************************************************
-}
module AFRPTestsPre (pre_tr, pre_trs) where
import FRP.Yampa
import AFRPTestsCommon
------------------------------------------------------------------------------
-- Test cases for pre and related combinators
------------------------------------------------------------------------------
pre_t0 = testSF1 (iPre 17)
pre_t0r =
[17.0,0.0,1.0,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0,11.0,12.0,13.0,14.0,
15.0,16.0,17.0,18.0,19.0,20.0,21.0,22.0,23.0]
pre_t1 = testSF2 (iPre 17)
pre_t1r =
[17.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,2.0,2.0,2.0,2.0,2.0,
3.0,3.0,3.0,3.0,3.0,4.0,4.0,4.0,4.0]
pre_t2 = testSF1 (time
>>> arr (\t -> sin (0.5 * t * pi + pi))
>>> loop (arr (\(x1,x2) -> let x' = max x1 x2 in (x',x'))
>>> second (iPre 0.0)))
pre_t2r =
take 25
(let xs = [ sin (0.5 * t * pi + pi) | t <- [0.0, 0.25 ..] ]
in tail (scanl max 0 xs))
-- This is a (somewhat strange) way of doing a counter that
-- stops after reaching a threshold. Note that the ingoing event
-- is *control dependent* on the output of the counter, so
-- "dHold" really has to have the capability of delivering an
-- output without looking at the current input at all.
pre_t3, pre_t3r :: [Int]
pre_t3 = take 50 (embed sf (deltaEncode 0.25 (repeat ())))
where
sf = repeatedly 1.0 ()
>>> (loop $
arr (\(e,c) -> (e `tag` (c + 1)) `gate` (c < 10))
>>> dHold 0
>>> arr dup)
pre_t3r = [0,0,0,0, -- 0s
0,1,1,1, -- 1s
1,2,2,2, -- 2s
2,3,3,3, -- 3s
3,4,4,4, -- 4s
4,5,5,5, -- 5s
5,6,6,6, -- 6s
6,7,7,7, -- 7s
7,8,8,8, -- 8s
8,9,9,9, -- 9s
9,10,10,10, -- 10s
10,10,10,10, -- 11s
10,10] -- 12s
-- Version of the above that tests that thigs still work OK also if
-- there is an initial event.
pre_t4, pre_t4r :: [Int]
pre_t4 = take 50 (embed sf (deltaEncode 0.25 (repeat ())))
where
sf = (now () &&& repeatedly 1.0 ()) >>> arr (uncurry lMerge)
>>> (loop $
arr (\(e,c) -> (e `tag` (c + 1)) `gate` (c < 10))
>>> dHold 0
>>> arr dup)
pre_t4r = [0,1,1,1, -- 0s
1,2,2,2, -- 1s
2,3,3,3, -- 2s
3,4,4,4, -- 3s
4,5,5,5, -- 4s
5,6,6,6, -- 5s
6,7,7,7, -- 6s
7,8,8,8, -- 7s
8,9,9,9, -- 8s
9,10,10,10, -- 9s
10,10,10,10, -- 10s
10,10,10,10, -- 11s
10,10] -- 12s
-- Similar test to "pre_t3" above but for dAccumHold.
pre_t5, pre_t5r :: [Int]
pre_t5 = take 50 (embed sf (deltaEncode 0.25 (repeat ())))
where
sf = repeatedly 1.0 ()
>>> (loop $
arr (\(e,c) -> (e `tag` (+1)) `gate` (c < 10))
>>> dAccumHold 0
>>> arr dup)
pre_t5r = [0,0,0,0, -- 0s
0,1,1,1, -- 1s
1,2,2,2, -- 2s
2,3,3,3, -- 3s
3,4,4,4, -- 4s
4,5,5,5, -- 5s
5,6,6,6, -- 6s
6,7,7,7, -- 7s
7,8,8,8, -- 8s
8,9,9,9, -- 9s
9,10,10,10, -- 10s
10,10,10,10, -- 11s
10,10] -- 12s
-- Similar test to "pre_t4" above but for dAccumHold.
pre_t6, pre_t6r :: [Int]
pre_t6 = take 50 (embed sf (deltaEncode 0.25 (repeat ())))
where
sf = (now () &&& repeatedly 1.0 ()) >>> arr (uncurry lMerge)
>>> (loop $
arr (\(e,c) -> (e `tag` (+1)) `gate` (c < 10))
>>> dAccumHold 0
>>> arr dup)
pre_t6r = [0,1,1,1, -- 0s
1,2,2,2, -- 1s
2,3,3,3, -- 2s
3,4,4,4, -- 3s
4,5,5,5, -- 4s
5,6,6,6, -- 5s
6,7,7,7, -- 6s
7,8,8,8, -- 7s
8,9,9,9, -- 8s
9,10,10,10, -- 9s
10,10,10,10, -- 10s
10,10,10,10, -- 11s
10,10] -- 12s
-- Similar test to "pre_t3" above but for dAccumHoldBy.
pre_t7, pre_t7r :: [Int]
pre_t7 = take 50 (embed sf (deltaEncode 0.25 (repeat ())))
where
sf = repeatedly 1.0 ()
>>> (loop $
arr (\(e,c) -> e `gate` (c < 10))
>>> dAccumHoldBy (\c _ -> c + 1) 0
>>> arr dup)
pre_t7r = [0,0,0,0, -- 0s
0,1,1,1, -- 1s
1,2,2,2, -- 2s
2,3,3,3, -- 3s
3,4,4,4, -- 4s
4,5,5,5, -- 5s
5,6,6,6, -- 6s
6,7,7,7, -- 7s
7,8,8,8, -- 8s
8,9,9,9, -- 9s
9,10,10,10, -- 10s
10,10,10,10, -- 11s
10,10] -- 12s
-- Similar test to "pre_t4" above but for dAccumHoldBy.
pre_t8, pre_t8r :: [Int]
pre_t8 = take 50 (embed sf (deltaEncode 0.25 (repeat ())))
where
sf = (now () &&& repeatedly 1.0 ()) >>> arr (uncurry lMerge)
>>> (loop $
arr (\(e,c) -> e `gate` (c < 10))
>>> dAccumHoldBy (\c _ -> c + 1) 0
>>> arr dup)
pre_t8r = [0,1,1,1, -- 0s
1,2,2,2, -- 1s
2,3,3,3, -- 2s
3,4,4,4, -- 3s
4,5,5,5, -- 4s
5,6,6,6, -- 5s
6,7,7,7, -- 6s
7,8,8,8, -- 7s
8,9,9,9, -- 8s
9,10,10,10, -- 9s
10,10,10,10, -- 10s
10,10,10,10, -- 11s
10,10] -- 12s
pre_trs =
[ pre_t0 ~= pre_t0r,
pre_t1 ~= pre_t1r,
pre_t2 ~= pre_t2r,
pre_t3 == pre_t3r,
pre_t4 == pre_t4r,
pre_t5 == pre_t5r,
pre_t6 == pre_t6r,
pre_t7 == pre_t7r,
pre_t8 == pre_t8r
]
pre_tr = and pre_trs
| ony/Yampa-core | tests/AFRPTestsPre.hs | bsd-3-clause | 6,766 | 0 | 19 | 2,797 | 2,329 | 1,438 | 891 | 150 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="es-ES">
<title>SVN Digger Files</title>
<maps>
<homeID>svndigger</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/svndigger/src/main/javahelp/help_es_ES/helpset_es_ES.hs | apache-2.0 | 967 | 82 | 52 | 157 | 392 | 207 | 185 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pt-BR">
<title>Visualização de Navegador | Extensão ZAP</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Conteúdo</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Índice</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Localizar</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favoritos</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/browserView/src/main/javahelp/org/zaproxy/zap/extension/browserView/resources/help_pt_BR/helpset_pt_BR.hs | apache-2.0 | 994 | 83 | 52 | 160 | 407 | 214 | 193 | -1 | -1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE UndecidableInstances #-}
-- #345
module ShouldCompile where
data Succ n
data Zero
class Plus x y z | x y -> z
instance Plus Zero x x
instance Plus x y z => Plus (Succ x) y (Succ z)
infixr 5 :::
data List :: * -> * -> * where
Nil :: List a Zero
(:::) :: a -> List a n -> List a (Succ n)
append :: Plus x y z => List a x -> List a y -> List a z
append Nil ys = ys
append (x ::: xs) ys = x ::: append xs ys
| sdiehl/ghc | testsuite/tests/gadt/gadt-fd.hs | bsd-3-clause | 622 | 0 | 10 | 160 | 216 | 115 | 101 | -1 | -1 |
{-| Main binary for all stand-alone data collectors
-}
{-
Copyright (C) 2012 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Main (main) where
import Ganeti.Common
import Ganeti.DataCollectors.CLI (genericOptions, defaultOptions)
import Ganeti.DataCollectors.Program (personalities)
-- | Simple main function.
main :: IO ()
main = genericMainCmds defaultOptions personalities genericOptions
| vladimir-ipatov/ganeti | test/hs/hpc-mon-collector.hs | gpl-2.0 | 1,054 | 0 | 6 | 168 | 64 | 39 | 25 | 6 | 1 |
module Language.HigherRank.Main (main) where
import System.Console.Haskeline
import Language.HigherRank.Interpret (runInterpret)
import Language.HigherRank.Parse (parseExpr)
import Language.HigherRank.Print (printReducedExpr, printType)
import Language.HigherRank.Typecheck (runInfer)
fromEither :: Either a a -> a
fromEither (Left x) = x
fromEither (Right x) = x
repl :: (String -> String) -> IO ()
repl f = runInputT defaultSettings loop
where loop = getInputLine "> " >>= \case
Nothing -> return ()
Just l -> outputStrLn (f l) >> loop
main :: IO ()
main = repl $ \input -> fromEither $ do
e <- parseExpr input
t <- runInfer e
r <- runInterpret e
return $ printReducedExpr r ++ " : " ++ printType t
| lexi-lambda/higher-rank | library/Language/HigherRank/Main.hs | isc | 736 | 0 | 14 | 141 | 269 | 139 | 130 | -1 | -1 |
module QueryHandler where
import PrologParse
import ArgsHandler
varMatch :: PLVar -> PLVar -> Bool
varMatch (PLSingletonVar n1 v1) (PLSingletonVar n2 v2) = n1 == n2 && v1 == v2
varMatch (PLSingletonVar n1 v1) (PLListVar n2 v2) = n1 == n2 && v1 `elem` v2
varMatch (PLListVar _ _) (PLSingletonVar _ _) = False
varMatch (PLListVar n1 v1) (PLListVar n2 v2) = n1 == n2 && all (`elem` v2) v1
isVarInState :: PLVar -> PLState -> Bool
isVarInState v = any (\y -> v `varMatch` y)
stateMatch :: PLState -> PLState -> Bool
stateMatch s1 s2 = any (`isVarInState` s2) s1
executeQuery :: [String] -> [PLState] -> Either String [PLState]
executeQuery args states = do
argState <- parseArgs args
return $ filter (stateMatch argState) states
| fredmorcos/attic | projects/hs-prolog-parser/QueryHandler.hs | isc | 734 | 0 | 10 | 133 | 309 | 163 | 146 | 16 | 1 |
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
-- |
-- Copyright : (C) 2019 [email protected]
-- License : MIT
-- Maintainer : Helmut Merz <[email protected]>
-- Stability : experimental
-- Portability : GHC only (requires STM)
--
-- Access to the FCO backend using the fco-actor framework.
--
module Fco.Backend.Actor (
-- * Backend Actor
Request (..), Response (..),
spawnBackend,
-- * Usage Example
demo
) where
import BasicPrelude hiding (lookup)
import qualified Data.Text as T
import Control.Monad.Extra (whileM)
import Data.IntMap (elems)
import Control.Concurrent.Actor (
Actor, Behaviour (..), ControlMsg (..), Mailbox, Mailboxes,
MsgHandler, StdBoxes (..),
messageBox, controlBox,
act_children, call, ctxGets, ctxPut, defContext, defListener,
mailbox, minimalContext, setDefContext,
runActor, send, spawnStdActor, stdBehvs, stdBoxes)
import Control.Concurrent.Actor.Config (
ConfigRequest (..), ConfigResponse (..),
spawnConfigDef)
import Control.Concurrent.Actor.Console (spawnConIn, spawnConOut)
import Fco.Backend (
Environment,
credentials, dbSettings, dbName, setupEnv,
query, storeTriple
)
import qualified Fco.Core.Parse as CP
import qualified Fco.Core.Show as CS
import Fco.Core.Struct (lookup)
import qualified Fco.Core.Types as CT
import Fco.Core.Types (Namespace (..))
-- | A message used to query or update the backend.
data Request = Query CT.Query (Mailbox Response)
| Update CT.Triple
-- | The response message type as returned (sent back) by the backend actor.
newtype Response = Response [CT.Triple]
-- | Start a backend actor
spawnBackend :: StdBoxes ConfigRequest -> Actor st (StdBoxes Request)
spawnBackend config = do
ConfigResponse (_, cfg) <- call config (ConfigQuery "backend-pgsql")
let db = dbSettings { dbName = lookup "dbname" cfg,
credentials = (lookup "dbuser" cfg,
lookup "dbpassword" cfg) }
env <- liftIO $ setupEnv db
spawnStdActor backendHandler env
backendHandler :: MsgHandler Environment Request
backendHandler env (Query qu client) = do
tr <- liftIO $ query env qu
send client $ Response tr
return $ Just env
backendHandler env (Update tr) = do
liftIO $ storeTriple env tr
return $ Just env
-- | An example main function that reads a query from stdin,
-- parses it, and queries the backend.
-- The query result is printed to stdout.
--
-- Enter '? ? ?' to get a list of all triples.
demo :: IO ()
demo = runActor act minimalContext where
act = do
self <- stdBoxes
respBox <- mailbox
spawnConIn self
config <- spawnConfigDef
backend <- spawnBackend config
output <- spawnConOut
let behvs = stdBehvs self
(inpHandler (messageBox backend) respBox)
[Behv respBox (responseHandler (messageBox output))]
setDefContext () behvs
defListener
-- message handlers used by the demo function.
inpHandler :: Mailbox Request -> Mailbox Response -> MsgHandler st Text
inpHandler reqBox respBox state txt = do
send reqBox $
Query (CP.parseQuery (Namespace "") txt) respBox
return $ Just state
responseHandler :: Mailbox Text -> MsgHandler st Response
responseHandler outbox state (Response triples) = do
send outbox $ unlines (map CS.showTriple triples)
return $ Just state
| cyberconcepts/fco-backend | src/Fco/Backend/Actor.hs | mit | 3,488 | 0 | 18 | 814 | 848 | 467 | 381 | 71 | 1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances #-}
module Language.Erlang.Fold where
import Language.Fold
import Language.Erlang.Algebra
import Language.Erlang.Syntax
instance Fold (ErlangAlgebra pr at fn bv iop exp mat pat gua) Program pr where
fold f (Program mod exs ims des fns) = programF f (fold f mod) (map (fold f) exs) (map (fold f) ims) (map (fold f) des) (map (fold f) fns)
instance Fold (ErlangAlgebra pr at fn bv iop exp mat pat gua) Attribute at where
fold f (Module name) = moduleF f name
fold f (Export names) = exportF f names
fold f (Import name) = importF f name
fold f (Define name bv) = defineF f name (fold f bv)
instance Fold (ErlangAlgebra pr at fn bv iop exp mat pat gua) Function fn where
fold f (Function name pats exp) = functionF f name (map (fold f) pats) (fold f exp)
instance Fold (ErlangAlgebra pr at fn bv iop exp mat pat gua) BasicValue bv where
fold f (AtomicLiteral s) = atomicLiteralF f s
fold f (StringLiteral s) = stringLiteralF f s
fold f (NumberLiteral i) = numberLiteralF f i
fold f (ProcessLiteral s) = processLiteralF f s
instance Fold (ErlangAlgebra pr at fn bv iop exp mat pat gua) InfixOp iop where
fold f OpLT = opLTF f
fold f OpLEq = opLEqF f
fold f OpGT = opGTF f
fold f OpGEq = opGEqF f
fold f OpEq = opEqF f
fold f OpNEq = opNEqF f
fold f OpLAnd = opLAndF f
fold f OpLOr = opLOrF f
fold f OpMul = opMulF f
fold f OpDiv = opDivF f
fold f OpMod = opModF f
fold f OpSub = opSubF f
fold f OpBAnd = opBAndF f
fold f OpBXor = opBXorF f
fold f OpBOr = opBOrF f
fold f OpAdd = opAddF f
instance Fold (ErlangAlgebra pr at fn bv iop exp mat pat gua) Exp exp where
fold f (InfixExp iop exp0 exp) = infixExpF f (fold f iop) (fold f exp0) (fold f exp)
fold f (ModExp n0 n) = modExpF f n0 n
fold f (Apply exp exps) = applyF f (fold f exp) (map (fold f) exps)
fold f (Call exp0 exp) = callF f (fold f exp0) (fold f exp)
fold f (Case exp mats) = caseF f (fold f exp) (map (fold f) mats)
fold f (FunAnon pats exp) = funAnonF f (map (fold f) pats) (fold f exp)
fold f (Receive mats) = receiveF f (map (fold f) mats)
fold f (If mats) = ifF f (map (fold f) mats)
fold f (Send exp0 exp) = sendF f (fold f exp0) (fold f exp)
fold f (Seq exp0 exp) = seqF f (fold f exp0) (fold f exp)
fold f (Assign pat exp) = assignF f (fold f pat) (fold f exp)
fold f (ExpT exps) = expTF f (map (fold f) exps)
fold f (ExpL exps) = expLF f (map (fold f) exps)
fold f (ExpVal bv) = expValF f (fold f bv)
fold f (ExpVar name) = expVarF f name
fold f (RecordCreate name attrs) = recordCreateF f name (map (\(k,v) -> (k, fold f v)) attrs)
fold f (Coercion exp) = coercionF f (fold f exp)
instance Fold (ErlangAlgebra pr at fn bv iop exp mat pat gua) Match mat where
fold f (Match pat gua exp) = matchF f (fold f pat) (fmap (fold f) gua) (fold f exp)
instance Fold (ErlangAlgebra pr at fn bv iop exp mat pat gua) Pattern pat where
fold f (PatVar name) = patVarF f name
fold f (PatT pats) = patTF f (map (fold f) pats)
fold f (PatL pats) = patLF f (map (fold f) pats)
fold f (PatVal bv) = patValF f (fold f bv)
instance Fold (ErlangAlgebra pr at fn bv iop exp mat pat gua) Guard gua where
fold f (GuardVal bv) = guardValF f (fold f bv)
fold f (GuardVar name) = guardVarF f name
fold f (GuardCall g gs) = guardCallF f (fold f g) (map (fold f) gs)
fold f (GuardT gs) = guardTF f (map (fold f) gs)
fold f (GuardL gs) = guardLF f (map (fold f) gs)
| arnihermann/timedreb2erl | src/Language/Erlang/Fold.hs | mit | 3,586 | 0 | 12 | 919 | 1,877 | 919 | 958 | 67 | 0 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE RecordWildCards #-}
module U.Util.Cache where
import Prelude hiding (lookup)
import Control.Monad.IO.Class (liftIO)
import UnliftIO (MonadIO, newTVarIO, modifyTVar', writeTVar, atomically, readTVar, readTVarIO)
import qualified Data.Map as Map
import Data.Functor (($>))
import Control.Monad (when)
import Data.Foldable (for_)
data Cache k v =
Cache { lookup_ :: k -> IO (Maybe v)
, insert_ :: k -> v -> IO ()
}
lookup :: MonadIO m => Cache k v -> k -> m (Maybe v)
lookup c k = liftIO (lookup_ c k)
insert :: MonadIO m => Cache k v -> k -> v -> m ()
insert c k v = liftIO (insert_ c k v)
-- Create a cache of unbounded size.
cache :: (MonadIO m, Ord k) => m (Cache k v)
cache = do
t <- newTVarIO Map.empty
let
lookup k = Map.lookup k <$> readTVarIO t
insert k v = do
m <- readTVarIO t
case Map.lookup k m of
Nothing -> atomically $ modifyTVar' t (Map.insert k v)
_ -> pure ()
pure $ Cache lookup insert
nullCache :: Cache k v
nullCache = Cache (const (pure Nothing)) (\_ _ -> pure ())
-- Create a cache of bounded size. Once the cache
-- reaches a size of `maxSize`, older unused entries
-- are evicted from the cache. Unlike LRU caching,
-- where cache hits require updating LRU info,
-- cache hits here are read-only and contention free.
semispaceCache :: (MonadIO m, Ord k) => Word -> m (Cache k v)
semispaceCache 0 = pure nullCache
semispaceCache maxSize = do
-- Analogous to semispace GC, keep 2 maps: gen0 and gen1
-- `insert k v` is done in gen0
-- if full, gen1 = gen0; gen0 = Map.empty
-- `lookup k` is done in gen0; then gen1
-- if found in gen0, return immediately
-- if found in gen1, `insert k v`, then return
-- Thus, older keys not recently looked up are forgotten
gen0 <- newTVarIO Map.empty
gen1 <- newTVarIO Map.empty
let
lookup k = readTVarIO gen0 >>= \m0 ->
case Map.lookup k m0 of
Nothing -> readTVarIO gen1 >>= \m1 ->
case Map.lookup k m1 of
Nothing -> pure Nothing
Just v -> insert k v $> Just v
just -> pure just
insert k v = atomically $ do
modifyTVar' gen0 (Map.insert k v)
m0 <- readTVar gen0
when (fromIntegral (Map.size m0) >= maxSize) $ do
writeTVar gen1 m0
writeTVar gen0 Map.empty
pure $ Cache lookup insert
-- Cached function application: if a key `k` is not in the cache,
-- calls `f` and inserts `f k` results in the cache.
apply :: MonadIO m => Cache k v -> (k -> m v) -> k -> m v
apply c f k = lookup c k >>= \case
Just v -> pure v
Nothing -> do
v <- f k
insert c k v
pure v
-- Cached function application which only caches values for
-- which `f k` is non-empty. For instance, if `g` is `Maybe`,
-- and `f x` returns `Nothing`, this won't be cached.
--
-- Useful when we think that missing results for `f` may be
-- later filled in so we don't want to cache missing results.
applyDefined :: (MonadIO m, Applicative g, Traversable g)
=> Cache k v
-> (k -> m (g v))
-> k
-> m (g v)
applyDefined c f k = lookup c k >>= \case
Just v -> pure (pure v)
Nothing -> do
v <- f k
-- only populate the cache if f returns a non-empty result
for_ v $ \v -> insert c k v
pure v
| unisonweb/platform | codebase2/util/U/Util/Cache.hs | mit | 3,354 | 0 | 21 | 894 | 1,026 | 512 | 514 | 69 | 3 |
{-# LANGUAGE ImplicitParams #-}
-- | Based on An Introduction to Hybrid Automata by Jean-Francois Raskin
-- http://www.cmi.ac.in/~madhavan/courses/acts2010/Raskin_Intro_Hybrid_Automata.pdf
module WaterHeater where
import Control.Applicative ((<|>))
import Zelus
import CyphyUtils
data TankState = T1 | T2 | T3 | T4 deriving (Eq, Show)
data BurnerEvent = ON | OFF deriving (Eq, Show)
data BurnerState = B1 | B2 | B3 | B4 deriving (Eq, Show)
data ThermoEvent = UP95 | DW93 deriving (Eq, Show)
run :: S Double -> (S Double, E ThermoEvent, E BurnerEvent)
run ref_temp =
let
(temperature, _) = unzip (tank burner_events)
burner_events = burner thermo_events
thermo_events = thermo ref_temp temperature
in (temperature, thermo_events, burner_events)
where
?h = 0.01
-- tank :: (?h :: Double) => E BurnerEvent -> S Double
tank burner_event = zip temperature state
where
max_temp = 100
min_temp = 20
init_temp = 99 --min_temp
k = 0.075
heat = 150
dtemp = map k1 state * pre temperature + map m state
temperature = integ (dtemp `in1t` init_temp)
m T1 = k * heat
m _ = 0
k1 T1 = -k
k1 T3 = -k
k1 _ = 0
state = automaton
[ T1 >-- temperature >=? max_temp --> T2
, T1 >-- burner_event `isEvent` val OFF --> T3
, T2 >-- burner_event `isEvent` val OFF --> T3
, T3 >-- burner_event `isEvent` val ON --> T1
, T3 >-- temperature <=? min_temp --> T4
, T4 >-- burner_event `isEvent` val ON --> T1
]
burner :: (?h :: Double) => E ThermoEvent -> E BurnerEvent
burner thermo_event = on <|> off
where
delay = 0.1
dy B1 = 0
dy B2 = 1
dy B3 = 0
dy B4 = 1
on = val ON `when` (state `took` (B1 --> B2))
off = val OFF `when` (state `took` (B3 --> B4))
y = integ (map dy state `in1t` 0 `reset` (0 `whenEvent` (on <|> off)))
state = automaton
[ B1 >-- thermo_event `isEvent` val DW93 --> B2
, B2 >-- y >=? val delay --> B3
, B3 >-- thermo_event `isEvent` val UP95 --> B4
, B4 >-- y >=? val delay --> B1
]
thermo :: (?h :: Double) => S Double -> S Double -> E ThermoEvent
thermo ref_temp temperature = (up <|> down)
where
max_temp = ref_temp + 1
min_temp = ref_temp - 1
frequency = 0.1
samples = val False |-> (z >=? frequency)
up = val UP95 `when` (temperature >=? max_temp &&? samples)
down = val DW93 `when` (temperature <=? min_temp &&? samples)
dz = 1
z = integ (dz `in1t` 0 `reset` (0 `when` samples))
-----------------------------------------------
----- Examples
---
tex = let ?h = 0.01 in thermo tempdown
tempdown = [100 - t*0.1 | t <- [0..200]]
tempup = [80 + t*0.1 | t <- [0..200]]
| koengit/cyphy | src/WaterHeater.hs | mit | 2,723 | 0 | 13 | 720 | 993 | 548 | 445 | 65 | 4 |
module Database.Siege.Query where
import Data.Char
import Data.List
data SExpr =
Atom String |
List [SExpr]
parse :: String -> SExpr
parse = undefined
generate :: SExpr -> String
generate (List exprs) =
"(" ++ (intercalate " " $ map generate exprs) ++ ")"
generate (Atom var) =
if any isSpace var || elem ')' var then
undefined
else
var
-- main = print $ generate $ List [Atom "lambda", List [Atom "a"], Atom "a"]
data PreConditions =
Type |
HashExists String Bool |
HashLookup String PreConditions |
SetExists String Bool |
ListLookup Int PreConditions |
ListEmpty |
SequenceAt Int Preconditions |
SequenceSize (Int -> Bool) |
Branch [PreConditions]
data Path =
HashLookup String Path |
SequenceLookup Int Path
data WriteOperation =
Set Path Ref |
Del Path |
SetInsert Path String |
SetRemove Path String |
DropList Path Int
data ReadOperation =
Get Path |
Exists Path |
SetExists Path String |
Size Path
data Query =
Get [ReadOperation] |
Alter PreConditions [WriteOperation]
--SExpr -> Ref -> (Ref, SExpr)
| DanielWaterworth/siege | src/Database/Siege/Query.hs | mit | 1,082 | 0 | 9 | 242 | 300 | 169 | 131 | 42 | 2 |
{-# LANGUAGE TypeFamilies, GADTs, TupleSections #-}
module Text.Regex.Applicative.Interface where
import Control.Applicative hiding (empty)
import Control.Arrow
import Control.Monad (guard)
import qualified Data.List as List
import Data.Maybe
import Text.Regex.Applicative.Types
import Text.Regex.Applicative.Object
-- | 'RE' is a profunctor. This is its contravariant map.
--
-- (A dependency on the @profunctors@ package doesn't seem justified.)
comap :: (s2 -> s1) -> RE s1 a -> RE s2 a
comap f re =
case re of
Eps -> Eps
Symbol t p -> Symbol t (p . f)
Alt r1 r2 -> Alt (comap f r1) (comap f r2)
App r1 r2 -> App (comap f r1) (comap f r2)
Fmap g r -> Fmap g (comap f r)
CatMaybes r -> CatMaybes (comap f r)
Fail -> Fail
Rep gr fn a r -> Rep gr fn a (comap f r)
Void r -> Void (comap f r)
-- | Match and return any single symbol
anySym :: RE s s
anySym = msym Just
-- | Match zero or more instances of the given expression, which are combined using
-- the given folding function.
--
-- 'Greediness' argument controls whether this regular expression should match
-- as many as possible ('Greedy') or as few as possible ('NonGreedy') instances
-- of the underlying expression.
reFoldl :: Greediness -> (b -> a -> b) -> b -> RE s a -> RE s b
reFoldl g f b a = Rep g f b a
-- | Match zero or more instances of the given expression, but as
-- few of them as possible (i.e. /non-greedily/). A greedy equivalent of 'few'
-- is 'many'.
--
-- Examples:
--
-- >Text.Regex.Applicative> findFirstPrefix (few anySym <* "b") "ababab"
-- >Just ("a","abab")
-- >Text.Regex.Applicative> findFirstPrefix (many anySym <* "b") "ababab"
-- >Just ("ababa","")
few :: RE s a -> RE s [a]
few a = reverse <$> Rep NonGreedy (flip (:)) [] a
-- | Return matched symbols as part of the return value
withMatched :: RE s a -> RE s (a, [s])
withMatched Eps = flip (,) [] <$> Eps
withMatched (Symbol t p) = Symbol t (\s -> (,[s]) <$> p s)
withMatched (Alt a b) = withMatched a <|> withMatched b
withMatched (App a b) =
(\(f, s) (x, t) -> (f x, s ++ t)) <$>
withMatched a <*>
withMatched b
withMatched Fail = Fail
withMatched (Fmap f x) = (f *** id) <$> withMatched x
withMatched (CatMaybes x) = CatMaybes $
(\ (as, s) -> flip (,) s <$> as) <$> withMatched x
withMatched (Rep gr f a0 x) =
Rep gr (\(a, s) (x, t) -> (f a x, s ++ t)) (a0, []) (withMatched x)
-- N.B.: this ruins the Void optimization
withMatched (Void x) = (const () *** id) <$> withMatched x
-- | @s =~ a = match a s@
(=~) :: [s] -> RE s a -> Maybe a
(=~) = flip match
infix 2 =~
-- | Attempt to match a string of symbols against the regular expression.
-- Note that the whole string (not just some part of it) should be matched.
--
-- Examples:
--
-- >Text.Regex.Applicative> match (sym 'a' <|> sym 'b') "a"
-- >Just 'a'
-- >Text.Regex.Applicative> match (sym 'a' <|> sym 'b') "ab"
-- >Nothing
--
match :: RE s a -> [s] -> Maybe a
match re = let obj = compile re in \str ->
listToMaybe $
results $
foldl (flip step) obj str
-- | Find a string prefix which is matched by the regular expression.
--
-- Of all matching prefixes, pick one using left bias (prefer the left part of
-- '<|>' to the right part) and greediness.
--
-- This is the match which a backtracking engine (such as Perl's one) would find
-- first.
--
-- If match is found, the rest of the input is also returned.
--
-- See also 'findFirstPrefixWithUncons', of which this is a special case.
--
-- Examples:
--
-- >Text.Regex.Applicative> findFirstPrefix ("a" <|> "ab") "abc"
-- >Just ("a","bc")
-- >Text.Regex.Applicative> findFirstPrefix ("ab" <|> "a") "abc"
-- >Just ("ab","c")
-- >Text.Regex.Applicative> findFirstPrefix "bc" "abc"
-- >Nothing
findFirstPrefix :: RE s a -> [s] -> Maybe (a, [s])
findFirstPrefix = findFirstPrefixWithUncons List.uncons
-- | Find the first prefix, with the given @uncons@ function.
--
-- @since 0.3.4
findFirstPrefixWithUncons :: (ss -> Maybe (s, ss)) -> RE s a -> ss -> Maybe (a, ss)
findFirstPrefixWithUncons = findPrefixWith' (walk emptyObject . threads)
where
walk obj [] = (obj, Nothing)
walk obj (t:ts) =
case getResult t of
Just r -> (obj, Just r)
Nothing -> walk (addThread t obj) ts
-- | Find the longest string prefix which is matched by the regular expression.
--
-- Submatches are still determined using left bias and greediness, so this is
-- different from POSIX semantics.
--
-- If match is found, the rest of the input is also returned.
--
-- See also 'findLongestPrefixWithUncons', of which this is a special case.
--
-- Examples:
--
-- >Text.Regex.Applicative Data.Char> let keyword = "if"
-- >Text.Regex.Applicative Data.Char> let identifier = many $ psym isAlpha
-- >Text.Regex.Applicative Data.Char> let lexeme = (Left <$> keyword) <|> (Right <$> identifier)
-- >Text.Regex.Applicative Data.Char> findLongestPrefix lexeme "if foo"
-- >Just (Left "if"," foo")
-- >Text.Regex.Applicative Data.Char> findLongestPrefix lexeme "iffoo"
-- >Just (Right "iffoo","")
findLongestPrefix :: RE s a -> [s] -> Maybe (a, [s])
findLongestPrefix = findLongestPrefixWithUncons List.uncons
-- | Find the longest prefix, with the given @uncons@ function.
--
-- @since 0.3.4
findLongestPrefixWithUncons :: (ss -> Maybe (s, ss)) -> RE s a -> ss -> Maybe (a, ss)
findLongestPrefixWithUncons = findPrefixWith' ((,) <*> listToMaybe . results)
findPrefixWith'
:: (ReObject s a -> (ReObject s a, Maybe a))
-- ^ Given the regex object, compute the regex object to feed the next input value into, and
-- the result, if any.
-> (ss -> Maybe (s, ss)) -- ^ @uncons@
-> RE s a -> ss -> Maybe (a, ss)
findPrefixWith' walk uncons = \ re -> go (compile re) Nothing
where
go obj resOld ss = case walk obj of
(obj', resThis) ->
let res = flip (,) ss <$> resThis <|> resOld
in
case uncons ss of
_ | failed obj' -> res
Nothing -> res
Just (s, ss) -> go (step s obj') res ss
-- | Find the shortest prefix (analogous to 'findLongestPrefix')
--
-- See also 'findShortestPrefixWithUncons', of which this is a special case.
findShortestPrefix :: RE s a -> [s] -> Maybe (a, [s])
findShortestPrefix = findShortestPrefixWithUncons List.uncons
-- | Find the shortest prefix (analogous to 'findLongestPrefix'), with the given @uncons@ function.
--
-- @since 0.3.4
findShortestPrefixWithUncons :: (ss -> Maybe (s, ss)) -> RE s a -> ss -> Maybe (a, ss)
findShortestPrefixWithUncons uncons = go . compile
where
go obj ss = case results obj of
r:_ -> Just (r, ss)
_ -> do
guard (not (failed obj))
(s, ss) <- uncons ss
go (step s obj) ss
-- | Find the leftmost substring that is matched by the regular expression.
-- Otherwise behaves like 'findFirstPrefix'. Returns the result together with
-- the prefix and suffix of the string surrounding the match.
findFirstInfix :: RE s a -> [s] -> Maybe ([s], a, [s])
findFirstInfix re str =
fmap (\((first, res), last) -> (first, res, last)) $
findFirstPrefix ((,) <$> few anySym <*> re) str
-- Auxiliary function for findExtremeInfix
prefixCounter :: RE s (Int, [s])
prefixCounter = second reverse <$> reFoldl NonGreedy f (0, []) anySym
where
f (i, prefix) s = ((,) $! (i+1)) $ s:prefix
data InfixMatchingState s a = GotResult
{ prefixLen :: !Int
, prefixStr :: [s]
, result :: a
, postfixStr :: [s]
}
| NoResult
-- a `preferOver` b chooses one of a and b, giving preference to a
preferOver
:: InfixMatchingState s a
-> InfixMatchingState s a
-> InfixMatchingState s a
preferOver NoResult b = b
preferOver b NoResult = b
preferOver a b =
case prefixLen a `compare` prefixLen b of
GT -> b -- prefer b when it has smaller prefix
_ -> a -- otherwise, prefer a
mkInfixMatchingState
:: [s] -- rest of input
-> Thread s ((Int, [s]), a)
-> InfixMatchingState s a
mkInfixMatchingState rest thread =
case getResult thread of
Just ((pLen, pStr), res) ->
GotResult
{ prefixLen = pLen
, prefixStr = pStr
, result = res
, postfixStr = rest
}
Nothing -> NoResult
gotResult :: InfixMatchingState s a -> Bool
gotResult GotResult {} = True
gotResult _ = False
-- Algorithm for finding leftmost longest infix match:
--
-- 1. Add a thread /.*?/ to the begginning of the regexp
-- 2. As soon as we get first accept, we delete that thread
-- 3. When we get more than one accept, we choose one by the following criteria:
-- 3.1. Compare by the length of prefix (since we are looking for the leftmost
-- match)
-- 3.2. If they are produced on the same step, choose the first one (left-biased
-- choice)
-- 3.3. If they are produced on the different steps, choose the later one (since
-- they have the same prefixes, later means longer)
findExtremalInfix
:: -- function to combine a later result (first arg) to an earlier one (second
-- arg)
(InfixMatchingState s a -> InfixMatchingState s a -> InfixMatchingState s a)
-> RE s a
-> [s]
-> Maybe ([s], a, [s])
findExtremalInfix newOrOld re str =
case go (compile $ (,) <$> prefixCounter <*> re) str NoResult of
NoResult -> Nothing
r@GotResult{} ->
Just (prefixStr r, result r, postfixStr r)
where
{-
go :: ReObject s ((Int, [s]), a)
-> [s]
-> InfixMatchingState s a
-> InfixMatchingState s a
-}
go obj str resOld =
let resThis =
foldl
(\acc t -> acc `preferOver` mkInfixMatchingState str t)
NoResult $
threads obj
res = resThis `newOrOld` resOld
obj' =
-- If we just found the first result, kill the "prefixCounter" thread.
-- We rely on the fact that it is the last thread of the object.
if gotResult resThis && not (gotResult resOld)
then fromThreads $ init $ threads obj
else obj
in
case str of
[] -> res
_ | failed obj -> res
(s:ss) -> go (step s obj') ss res
-- | Find the leftmost substring that is matched by the regular expression.
-- Otherwise behaves like 'findLongestPrefix'. Returns the result together with
-- the prefix and suffix of the string surrounding the match.
findLongestInfix :: RE s a -> [s] -> Maybe ([s], a, [s])
findLongestInfix = findExtremalInfix preferOver
-- | Find the leftmost substring that is matched by the regular expression.
-- Otherwise behaves like 'findShortestPrefix'. Returns the result together with
-- the prefix and suffix of the string surrounding the match.
findShortestInfix :: RE s a -> [s] -> Maybe ([s], a, [s])
findShortestInfix = findExtremalInfix $ flip preferOver
-- | Replace matches of the regular expression with its value.
--
-- >Text.Regex.Applicative > replace ("!" <$ sym 'f' <* some (sym 'o')) "quuxfoofooooofoobarfobar"
-- >"quux!!!bar!bar"
replace :: RE s [s] -> [s] -> [s]
replace r = ($ []) . go
where go ys = case findLongestInfix r ys of
Nothing -> (ys ++)
Just (before, m, rest) -> (before ++) . (m ++) . go rest
| feuerbach/regex-applicative | Text/Regex/Applicative/Interface.hs | mit | 11,440 | 8 | 19 | 2,864 | 2,759 | 1,499 | 1,260 | 160 | 9 |
{-# LANGUAGE CPP #-}
module GHCJS.DOM.CSSRule (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.CSSRule
#else
module Graphics.UI.Gtk.WebKit.DOM.CSSRule
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.CSSRule
#else
import Graphics.UI.Gtk.WebKit.DOM.CSSRule
#endif
| plow-technologies/ghcjs-dom | src/GHCJS/DOM/CSSRule.hs | mit | 420 | 0 | 5 | 39 | 33 | 26 | 7 | 4 | 0 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.MediaList
(js_item, item, js_deleteMedium, deleteMedium, js_appendMedium,
appendMedium, js_setMediaText, setMediaText, js_getMediaText,
getMediaText, js_getLength, getLength, MediaList, castToMediaList,
gTypeMediaList)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"item\"]($2)" js_item ::
JSRef MediaList -> Word -> IO (JSRef (Maybe JSString))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaList.item Mozilla MediaList.item documentation>
item ::
(MonadIO m, FromJSString result) =>
MediaList -> Word -> m (Maybe result)
item self index
= liftIO (fromMaybeJSString <$> (js_item (unMediaList self) index))
foreign import javascript unsafe "$1[\"deleteMedium\"]($2)"
js_deleteMedium :: JSRef MediaList -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaList.deleteMedium Mozilla MediaList.deleteMedium documentation>
deleteMedium ::
(MonadIO m, ToJSString oldMedium) => MediaList -> oldMedium -> m ()
deleteMedium self oldMedium
= liftIO
(js_deleteMedium (unMediaList self) (toJSString oldMedium))
foreign import javascript unsafe "$1[\"appendMedium\"]($2)"
js_appendMedium :: JSRef MediaList -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaList.appendMedium Mozilla MediaList.appendMedium documentation>
appendMedium ::
(MonadIO m, ToJSString newMedium) => MediaList -> newMedium -> m ()
appendMedium self newMedium
= liftIO
(js_appendMedium (unMediaList self) (toJSString newMedium))
foreign import javascript unsafe "$1[\"mediaText\"] = $2;"
js_setMediaText ::
JSRef MediaList -> JSRef (Maybe JSString) -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaList.mediaText Mozilla MediaList.mediaText documentation>
setMediaText ::
(MonadIO m, ToJSString val) => MediaList -> Maybe val -> m ()
setMediaText self val
= liftIO (js_setMediaText (unMediaList self) (toMaybeJSString val))
foreign import javascript unsafe "$1[\"mediaText\"]"
js_getMediaText :: JSRef MediaList -> IO (JSRef (Maybe JSString))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaList.mediaText Mozilla MediaList.mediaText documentation>
getMediaText ::
(MonadIO m, FromJSString result) => MediaList -> m (Maybe result)
getMediaText self
= liftIO
(fromMaybeJSString <$> (js_getMediaText (unMediaList self)))
foreign import javascript unsafe "$1[\"length\"]" js_getLength ::
JSRef MediaList -> IO Word
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaList.length Mozilla MediaList.length documentation>
getLength :: (MonadIO m) => MediaList -> m Word
getLength self = liftIO (js_getLength (unMediaList self)) | plow-technologies/ghcjs-dom | src/GHCJS/DOM/JSFFI/Generated/MediaList.hs | mit | 3,649 | 44 | 11 | 558 | 901 | 508 | 393 | 59 | 1 |
-- A module with some code to explore theorems in the monadic lambda calculus
module TP where
import Data.List
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Maybe
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Foldable hiding (concat,any,all)
import Control.Monad.State
import DataTypes
startState :: S
startState = S (-1) Map.empty
-- Util
-- |The 'split' function splits a set (encoded as a list) in all possible ways.
--
-- >>> split [1,2]
-- [([],[1,2]),([1],[2]),([2],[1]),([1,2],[])]
split :: [a] -> [([a],[a])]
split [] = [([],[])]
split [a] = [([],[a]),([a],[])]
split (a : as) = left ++ right where
left = [(a : l,r) | (l,r) <- rec]
right = [(l, a : r) | (l,r) <- rec]
rec = split as
-- |Returns the current state integer and decrease the state by one.
getAndDec :: NonDeterministicState S Int
getAndDec = do
s <- get
i <- return $ counter s
modify (\x -> x{counter = (i-1)})
return i
-- |Takes a sequent of formulae and generates fresh variables for each formula, wrapping it in a non-deterministic state
toDecorated :: Sequent -> NonDeterministicState S DecoratedSequent
toDecorated (gamma,f) = do
aux <- return $ \x -> do
i <- getAndDec
j <- getAndDec
return $ DF i (V j) x
gamma' <- mapM aux gamma
f' <- aux f
return (gamma',f')
-- |Takes a decorated sequent and generates fresh variables for each formula, wrapping it in a non-deterministic state and returning a map from the new variables to the original constant terms
toDecoratedWithConstants :: ([(LambdaTerm,Formula)],Formula) -> NonDeterministicState S (DecoratedSequent,Map Int LambdaTerm)
toDecoratedWithConstants (gamma,f) = do
aux <- return $ \(c,x) -> do
i <- getAndDec
j <- getAndDec
return $ (DF i (V j) x,(i,c))
gamma' <- mapM aux gamma
f' <- do
i <- getAndDec
j <- getAndDec
return $ DF i (V j) f
return ((map fst gamma',f'),Map.fromList $ map snd gamma')
-- |Associates two formulae in the variable-formula binding map in the state
associate :: Formula -> Formula -> NonDeterministicState S ()
associate f g = do
s <- get
m <- return $ vars s
modify (\x -> x{vars = Map.insert f g m})
return ()
-- |Looks up the binding of a formula in the variable-formula binding map of the state
getBinding :: Formula -> NonDeterministicState S (Maybe Formula)
getBinding f = aux f [f] where
aux f vs = do
s <- get
m <- return $ vars s
res <- return $ Map.lookup f m
case res of
Nothing -> return Nothing
Just v@(Var _ _ _) -> case Data.List.elem v vs of
False -> aux v (v : vs)
True -> return $ Just f
Just f -> return $ Just f
-- |Tries to unify to formulae: returns 'True' in case of success (and associate the unified formulae) and 'False' otherwise (without changing the state)
unify :: Formula -> Formula -> NonDeterministicState S Bool
unify v1@(Var _ t1 _) v2@(Var _ t2 _)
| t1 == t2 =
do
binding1 <- getBinding v1
binding2 <- getBinding v2
case binding1 of
Nothing -> do
associate v1 v2
return True
Just g -> case binding2 of
Nothing -> do
associate v2 v1
return True
Just f -> return $ f == g
| otherwise = return False
unify v@(Var _ t _) f
| t == (getType f) =
do
binding <- getBinding v
case binding of
Nothing -> do
associate v f
return True
Just g -> return $ g == f
| otherwise = return False
unify f v@(Var _ _ _) = unify v f
unify f g = return $ f == g
-- |Returns all the proofs for a given sequent
proofs :: DecoratedSequent -> NonDeterministicState S (BinTree DecoratedSequent)
proofs s@(gamma,f) = do
every $ map (\r -> r s) [iR,mR,tR] ++ map (\(r,g) -> r g (delete g gamma,f))
[(r,g) | r <- [i,iL,mL,tL]
, g <- gamma]
-- |The identity rule
i :: DecoratedFormula -> DecoratedSequent -> NonDeterministicState S (BinTree DecoratedSequent)
i a (hyp,a') | not $ any isLinear (map formula hyp) = do
res <- unify (formula a) (formula a')
case res of
False -> failure
True -> do
i <- getAndDec
x <- return $ V i
return $ Leaf Id ([DF (identifier a) x (formula a)]
, DF (identifier a') x (formula a'))
| otherwise = failure
i _ _ = failure
-- |The left implication rule
iL :: DecoratedFormula -> DecoratedSequent -> NonDeterministicState S (BinTree DecoratedSequent)
iL f@(DF _ _ (I a b ty lin)) (gamma,c) = do
a_id <- getAndDec
b_id <- getAndDec
t <- getAndDec >>= \i -> return $ V i
x <- getAndDec >>= \j -> return $ V j
splits <- return $ split gamma
proveChildren <- return $ \(g,g') -> do
l <- proofs (g,DF a_id t a)
r <- proofs (DF b_id x b : g',c)
return (l,r)
(l,r) <- every $ map proveChildren splits
(delta,a') <- return $ getVal l
((gamma_with_b), c') <- return $ getVal r
b' <- return $ lookupFormula b_id gamma_with_b
gamma <- return $ delete b' gamma_with_b
y <- getAndDec >>= \i -> return $ V i
return $ Branch ImpL l (DF (identifier f) y (I a b ty lin) : gamma `union` delta
,DF (identifier c') (sub (App y (term a')) (term b') (term c')) (formula c')) r
iL _ _ = failure
-- |The left diamond rule
mL :: DecoratedFormula -> DecoratedSequent -> NonDeterministicState S (BinTree DecoratedSequent)
mL ma@(DF _ y (M a m1 _ _)) (gamma, f@(DF j _ (M b m2 tyb lin))) | m1 == m2 = do
id_a <- getAndDec
x <- getAndDec >>= \i -> return $ V i
c <- proofs (DF id_a x a : gamma, f)
(gamma_and_a,mb) <- return $ getVal c
a <- return $ lookupFormula id_a gamma_and_a
gamma <- return $ delete a gamma_and_a
return $ Unary MonL (ma : gamma, DF j (y :*: (Lambda (term a) (term mb))) (M b m2 tyb lin)) c
| otherwise = failure
mL _ _ = failure
-- |The left tensor rule
tL :: DecoratedFormula -> DecoratedSequent -> NonDeterministicState S (BinTree DecoratedSequent)
tL ab@(DF _ y (P a b _ _)) (gamma, c) = do
a_id <- getAndDec
b_id <- getAndDec
f <- getAndDec >>= \i -> return $ V i
g <- getAndDec >>= \i -> return $ V i
child <- proofs (DF a_id f a : DF b_id g b : gamma,c)
(gamma_and_a_and_b,c') <- return $ getVal child
a <- return $ lookupFormula a_id gamma_and_a_and_b
b <- return $ lookupFormula b_id gamma_and_a_and_b
gamma <- return $ delete a $ delete b gamma_and_a_and_b
return $ Unary TensL (ab : gamma, DF (identifier c)
(sub (FirstProjection y)
(term a)
(sub (SecondProjection y)
(term b)
(term c')))
(formula c)) child
tL _ _ = failure
-- |The right implication rule
iR :: DecoratedSequent -> NonDeterministicState S (BinTree DecoratedSequent)
iR (gamma, DF i _ f@(I a b _ _)) = do
a_id <- getAndDec
b_id <- getAndDec
x <- getAndDec >>= \i -> return $ V i
t <- getAndDec >>= \i -> return $ V i
c <- proofs (DF a_id x a : gamma, DF b_id t b)
(gamma_and_a,b) <- return $ getVal c
a <- return $ lookupFormula a_id gamma_and_a
gamma <- return $ delete a gamma_and_a
return $ Unary ImpR (gamma, DF i (Lambda (term a) (term b)) f) c
iR _ = failure
-- |The right diamond rule
mR :: DecoratedSequent -> NonDeterministicState S (BinTree DecoratedSequent)
mR (gamma,DF i _ ma@(M a _ _ _)) = do
a_id <- getAndDec
x <- getAndDec >>= \i -> return $ V i
c <- proofs (gamma,DF a_id x a)
(gamma,a) <- return $ getVal c
return $ Unary MonR (gamma,DF i (Eta (term a)) ma) c
mR _ = failure
-- |The right tensor rule
tR :: DecoratedSequent -> NonDeterministicState S (BinTree DecoratedSequent)
tR (gamma,DF i _ f@(P a b _ _)) = do
a_id <- getAndDec
b_id <- getAndDec
t <- getAndDec >>= \i -> return $ V i
u <- getAndDec >>= \i -> return $ V i
splits <- return $ split gamma
proveChildren <- return $ \(g,g') -> do
l <- proofs (g,DF a_id t a)
r <- proofs (g',DF b_id u b)
return (l,r)
(l,r) <- every $ map proveChildren splits
(gamma,a) <- return $ getVal l
(delta,b) <- return $ getVal r
return $ Branch TensR l (gamma `union` delta, DF i (Pair (term a) (term b)) f) r
tR _ = failure
-- |This function searches for a formula in a list of formulae by comparing their unique ids.
-- It's meant to be used only by the left implication and left monad rules.
-- Raises an error if no formula with the given id is found
lookupFormula :: Int -> [DecoratedFormula] -> DecoratedFormula
lookupFormula _ [] = error "This will never be reached by the rules"
lookupFormula n (f : rest) | n == (identifier f) = f
| otherwise = lookupFormula n rest
-- |Substitute a term for another inside a third term (should be the substitution of a variable with a term)
sub :: LambdaTerm -> -- the new term
LambdaTerm -> -- the variable/old term
LambdaTerm -> -- the context
LambdaTerm -- the new term
sub _ _ c@(C _) = c
sub new old t@(V _) | t == old = new
| otherwise = t
sub new old t@(Lambda v b) | v == old = t
| otherwise = Lambda v $ sub new old b
sub new old (App f a) = App (sub new old f) (sub new old a)
sub new old (Eta f) = Eta (sub new old f)
sub new old (m :*: k) = (:*:) (sub new old m) (sub new old k)
sub new old (Pair a b) = Pair (sub new old a) (sub new old b)
sub new old (FirstProjection a) = FirstProjection $ sub new old a
sub new old (SecondProjection a) = SecondProjection $ sub new old a
-- |Collects all variables from a proof
collectVars :: BinTree DecoratedSequent -> Set LambdaTerm
collectVars t = Set.fromList $ foldMap aux t where
aux = concat . (map f) . (map term) . j
j (c,f) = f : c
f v@(V _) = [v]
f (C _) = []
f (Lambda v t) = f v ++ f t
f (App g a) = f g ++ f a
f (Eta x) = f x
f (m :*: k) = f m ++ f k
f (Pair a b) = f a ++ f b
f (FirstProjection a) = f a
f (SecondProjection a) = f a
-- |Changes all the negative indices used in the vars to contiguos positive integers
sanitizeVars :: BinTree DecoratedSequent -> BinTree DecoratedSequent
sanitizeVars t = fmap sanitize t where
sanitize (gamma,f) = (map deepSub gamma,deepSub f)
deepSub (DF i lt f) = (DF i (zub lt) f)
zub (V i) = V $ fromJust $ lookup i m
zub c@(C _) = c
zub (Lambda x t) = Lambda (zub x) (zub t)
zub (App f g) = App (zub f) (zub g)
zub (Eta x) = Eta (zub x)
zub (m :*: k) = (zub m) :*: (zub k)
zub (Pair a b) = Pair (zub a) (zub b)
zub (FirstProjection a) = FirstProjection $ zub a
zub (SecondProjection a) = SecondProjection $ zub a
m = zip (map (\(V i) -> i) $ Set.toList $ collectVars t) [0..]
replaceWithConstants :: BinTree DecoratedSequent -> (Map Int LambdaTerm) -> BinTree DecoratedSequent
replaceWithConstants t m = fmap (\n -> replaceWithConstantsInNode n m) t
replaceWithConstantsInNode :: DecoratedSequent -> (Map Int LambdaTerm) -> DecoratedSequent
replaceWithConstantsInNode (gamma,f) m = new where
new = (map fst gamma', deepSub f)
gamma' = map replace gamma
n = map fromJust $ filter isJust $ map snd gamma'
replace df@(DF i v f) = case Map.lookup i m of
Nothing -> (df,Nothing)
Just c -> (DF i c f,Just (v,c))
deepSub (DF i lt f) = (DF i (zub lt) f)
zub v@(V _) = case lookup v n of
Nothing -> v
Just c -> c
zub c@(C _) = c
zub (Lambda x t) = Lambda (zub x) (zub t)
zub (App f g) = App (zub f) (zub g)
zub (Eta x) = Eta (zub x)
zub (m :*: k) = (zub m) :*: (zub k)
zub (Pair a b) = Pair (zub a) (zub b)
zub (FirstProjection a) = FirstProjection $ zub a
zub (SecondProjection a) = SecondProjection $ zub a
alphaEquivalent :: LambdaTerm -> LambdaTerm -> Map Int Int -> Bool
alphaEquivalent c1@(C _) c2@(C _) _ = c1 == c2
alphaEquivalent (V i) (V j) m = case Map.lookup i m of
Just h -> j == h
Nothing -> i == j
alphaEquivalent (Lambda (V i) t) (Lambda (V j) u) m = alphaEquivalent t u (Map.insert i j m)
alphaEquivalent (App t s) (App d z) m = (alphaEquivalent t d m) && (alphaEquivalent s z m)
alphaEquivalent (Eta t) (Eta d) m = alphaEquivalent t d m
alphaEquivalent (t :*: s) (d :*: z) m = (alphaEquivalent t d m) && (alphaEquivalent s z m)
alphaEquivalent (Pair a b) (Pair a' b') m = alphaEquivalent a a' m && alphaEquivalent b b' m
alphaEquivalent (FirstProjection a) (FirstProjection b) m = alphaEquivalent a b m
alphaEquivalent (SecondProjection a) (SecondProjection b) m = alphaEquivalent a b m
alphaEquivalent _ _ _ = False
-- |This function works only under the assumption that all the formulae in the hypothesis are distinct, otherwise the answer is NO!
equivalentDecoratedSequent :: DecoratedSequent -> DecoratedSequent -> Bool
equivalentDecoratedSequent s1 s2 = f1 == f2 && hypEqual && noDuplicates && alphaEquivalent t1 t2 e where
noDuplicates = (length $ Set.toList $ Set.fromList (map formula hyp1)) == length hyp1 &&
(length $ Set.toList $ Set.fromList (map formula hyp2)) == length hyp2
hyp1 = fst s1
hyp2 = fst s2
hypEqual = (Set.fromList (map formula hyp1)) == (Set.fromList (map formula hyp2))
varId (V i) = i
varId _ = -1
m1 = Map.fromList $ map (\x -> (formula x, varId $ term x)) hyp1
m2 = Map.fromList $ map (\x -> (formula x, varId $ term x)) hyp2
e = mixMaps m1 m2
t1 = betaReduce $ monadReduce $ etaReduce $ term $ snd $ s1
t2 = betaReduce $ monadReduce $ etaReduce $ term $ snd $ s2
f1 = formula $ snd $ s1
f2 = formula $ snd $ s2
mixMaps :: Map Formula Int -> Map Formula Int -> Map Int Int
mixMaps m n = Map.fromList $ aux (Map.toList m) where
aux [] = []
aux ((f,i) : rest) = (i,n Map.! f) : aux rest
etaReduce :: LambdaTerm -> LambdaTerm
etaReduce c@(C _) = c
etaReduce v@(V _) = v
etaReduce (App f g) = App (etaReduce f) (etaReduce g)
etaReduce (Eta t) = Eta $ etaReduce t
etaReduce (m :*: k) = (etaReduce m) :*: (etaReduce k)
etaReduce (Pair a b) = Pair (etaReduce a) (etaReduce b)
etaReduce (FirstProjection a) = FirstProjection $ etaReduce a
etaReduce (SecondProjection a) = SecondProjection $ etaReduce a
etaReduce (Lambda (V i) (App f (V j))) | i == j = etaReduce f
| otherwise = Lambda (V i) (App (etaReduce f) (V j))
etaReduce (Lambda x t) = let x' = etaReduce x
t' = etaReduce t
in if t == t' then
Lambda x' t'
else
etaReduce (Lambda x' t')
betaReduce :: LambdaTerm -> LambdaTerm
betaReduce t = aux t Map.empty where
aux c@(C _) _ = c
aux v@(V i) m = case Map.lookup i m of
Nothing -> v
Just t -> t
aux (App (Lambda (V i) body) x) m = aux body (Map.insert i x m)
aux (App f x) m = let f' = aux f m
in if f == f' then
(App f (aux x m))
else
aux (App f' x) m
aux (Lambda x b) m = Lambda (aux x m) (aux b m)
aux (Eta t) m = Eta $ aux t m
aux (n :*: k) m = (aux n m) :*: (aux k m)
aux (Pair a b) m = Pair (aux a m) (aux b m)
aux (FirstProjection a) m = FirstProjection $ aux a m
aux (SecondProjection a) m = SecondProjection $ aux a m
monadReduce :: LambdaTerm -> LambdaTerm
monadReduce ((Eta t) :*: u) = App (monadReduce u) (monadReduce t)
monadReduce (t :*: (Lambda (V i) (Eta (V j)))) | i == j = monadReduce t
| otherwise = (monadReduce t) :*: (Lambda (V i) (Eta (V j)))
monadReduce v@(V _) = v
monadReduce c@(C _) = c
monadReduce (App t u) = App (monadReduce t) (monadReduce u)
monadReduce (Lambda x t) = Lambda (monadReduce x) (monadReduce t)
monadReduce (Eta t) = Eta $ monadReduce t
monadReduce (Pair a b) = Pair (monadReduce a) (monadReduce b)
monadReduce (FirstProjection a) = FirstProjection $ monadReduce a
monadReduce (SecondProjection a) = SecondProjection $ monadReduce a
monadReduce (t :*: u) = let t' = monadReduce t
u' = monadReduce u
in if t == t' && u == u' then
t' :*: u'
else
monadReduce (t' :*: u')
| gianlucagiorgolo/glue-tp | TP.hs | mit | 16,780 | 0 | 19 | 4,987 | 7,199 | 3,588 | 3,611 | 346 | 12 |
module PhotonMap
(
PhotonMap
, PhotonSurfaceInteraction
, count
, generatePhotonMap
, getLightToViewerAtIntersection
)
where
import Numeric.FastMath ( )
import Control.DeepSeq ( NFData(..), force )
import Control.Monad ( replicateM, liftM )
import Data.KdMap.Static ( KdMap, buildWithDist, inRadius )
import Core ( Point(..), Ray(..), UnitVector
, translate, neg, magnitude, to, calculateReflection, (|*|) )
import Light ( Light, PhotonLightSource, sumLights, scaled )
import Material ( probabilityDiffuseReflection, probabilitySpecularReflection
, diffuseLight, specularLight, brdf )
import Rnd ( Rnd, rndDouble, rndDirectionInHemisphere )
import Scene ( Scene, Intersection(..), allPhotonLightSources, sceneIntersection )
import Volume ( Volume(..) )
data PhotonSurfaceInteraction = PhotonSurfaceInteraction !UnitVector !Light
data LightTransferEvent = EventDiffuse | EventSpecular
data LightTransfer = LightTransfer Ray Light LightTransferEvent
instance NFData PhotonSurfaceInteraction where
rnf (PhotonSurfaceInteraction !v !l) = rnf v `seq` rnf l `seq` ()
data PhotonMap = PhotonMap (KdMap Double Point PhotonSurfaceInteraction) !Int !Double
instance NFData PhotonMap where
rnf (PhotonMap !k !n !s) = rnf k `seq` rnf n `seq` rnf s `seq` ()
generatePhotonMap :: Scene -> Int -> Rnd PhotonMap
generatePhotonMap scene num = do
psis <- generatePhotonSurfaceInxs scene num
return $ force $ PhotonMap (buildWithDist pointToList distSquared psis) (length psis) (1.0 / fromIntegral num)
where
pointToList (Point !x !y !z) = [x, y, z]
distSquared (Point !x1 !y1 !z1) (Point !x2 !y2 !z2) = xd * xd + yd * yd + zd * zd
where
xd = x1 - x2
yd = y1 - y2
zd = z1 - z2
count :: PhotonMap -> Int
count (PhotonMap _ n _) = n
generatePhotonSurfaceInxs :: Scene -> Int -> Rnd [(Point, PhotonSurfaceInteraction)]
generatePhotonSurfaceInxs scene num =
concatM $ mapM (generatePhotonSurfaceInxsForLightSource scene numPerLight) lightSources
where
lightSources = allPhotonLightSources scene
numPerLight = num `div` length lightSources
generatePhotonSurfaceInxsForLightSource :: Scene -> Int -> PhotonLightSource -> Rnd [(Point, PhotonSurfaceInteraction)]
generatePhotonSurfaceInxsForLightSource scene num lightSource =
concatM $ replicateM num $ generateSinglePhotonSurfaceInxn scene lightSource
generateSinglePhotonSurfaceInxn :: Scene -> PhotonLightSource -> Rnd [(Point, PhotonSurfaceInteraction)]
generateSinglePhotonSurfaceInxn scene lightSource = do
(ray, light) <- lightSource
traceLightRay 10 scene (LightTransfer ray light EventDiffuse)
handlePhotonIntersection :: [(Point, PhotonSurfaceInteraction)] -> LightTransferEvent -> (Point, PhotonSurfaceInteraction) -> [(Point, PhotonSurfaceInteraction)]
handlePhotonIntersection !list !event psi =
case event of
EventDiffuse -> psi : list
EventSpecular -> list
traceLightRay :: Int -> Scene -> LightTransfer -> Rnd [(Point, PhotonSurfaceInteraction)]
traceLightRay !limit !scene !incoming@(LightTransfer !incomingRay incomingLight _) =
if limit <= 0 then return []
else case maybeIntersection of
Nothing -> return []
Just ix -> do
maybeOutgoingLight <- computeOutgoingLightRay ix incoming
let !event = maybe EventDiffuse getEvent maybeOutgoingLight
let !photonIntersection = toPhotonIntersection ix
recurse <- maybe (return []) (traceLightRay (limit - 1) scene) maybeOutgoingLight
return $ handlePhotonIntersection recurse event photonIntersection
where
getEvent (LightTransfer _ _ ev) = ev
!maybeIntersection = sceneIntersection scene incomingRay
toPhotonIntersection (Intersection (Ray _ !rd) _ _ !pos) =
(pos, PhotonSurfaceInteraction rd incomingLight)
computeOutgoingLightRay :: Intersection -> LightTransfer -> Rnd (Maybe LightTransfer)
computeOutgoingLightRay (Intersection _ (Volume _ !nrm _ !material) _ !wp) (LightTransfer (Ray _ !incomingRay) !incomingLight _) = do
prob <- rndDouble 0.0 1.0
go prob
where
!pd = probabilityDiffuseReflection material
ps = probabilitySpecularReflection material
go prob | prob < pd = goDiffuse
| prob < pd + ps = goSpecular
| otherwise = return Nothing
goDiffuse = do
dr <- diffuseReflect surfaceNormal
return $ Just $ LightTransfer (Ray movedFromSurface dr)
(diffuseLight material incomingLight)
EventDiffuse
goSpecular =
return $ Just $ LightTransfer (Ray movedFromSurface $ calculateReflection incomingRay surfaceNormal)
(specularLight material incomingLight)
EventSpecular
!surfaceNormal = nrm wp
!movedFromSurface = translate (surfaceNormal |*| epsilon) wp
!epsilon = 0.0001
diffuseReflect :: UnitVector -> Rnd UnitVector
diffuseReflect =
rndDirectionInHemisphere
getLightToViewerAtIntersection :: PhotonMap -> Intersection -> Light
getLightToViewerAtIntersection (PhotonMap !kdmap _ !scale) (Intersection (Ray _ !outgoingVector) (Volume _ !nrm _ !material) _ !wp) =
(sumLights $ map attenuateByDistance nearInteractions) `scaled` scale
where
attenuateByDistance (!pp, !psi) =
brdfForInteraction psi `scaled` coneFilter pp wp maxDistance
brdfForInteraction (PhotonSurfaceInteraction !incomingVector !incomingLight) =
surfaceBrdf incomingLight (neg incomingVector) (neg outgoingVector) surfaceNormal wp
!surfaceNormal = nrm wp
!surfaceBrdf = brdf material
!nearInteractions = inRadius kdmap maxDistance wp
!maxDistance = 10.0
concatM :: Monad m => m [[a]] -> m [a]
concatM =
liftM concat
coneFilter :: Point -> Point -> Double -> Double
coneFilter !pp !wp !maxDistance =
(1.0 - distance / (2.0 * maxDistance)) / maxDistance
where
!distance = magnitude (pp `to` wp)
-- gaussianFilter :: Point -> Point -> Double -> Double
-- gaussianFilter !pp !wp !maxDistance =
-- a * (1.0 - (1.0 - exp (mb * px)) / dv)
-- where
-- !a = 0.918
-- !mb = -1.953
-- !dv = 1.0 - exp mb
-- !ds = magnitudeSquared (pp `to` wp)
-- !px = ds / (2.0 * maxDistance * maxDistance)
| stu-smith/rendering-in-haskell | src/experiment08/PhotonMap.hs | mit | 6,437 | 0 | 16 | 1,459 | 1,773 | 902 | 871 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Y2018.M04.D17.Exercise where
{--
So, yesterday we had a bunch of JSON from various periodicals, but they were
all in the same JSON format, which made it simple to upload them into a common
database.
Today, we have JSON, but in a different format, that we need to upload into
that database, so, let's do it! YES!
Also, the size is different, so it will be compressed this time, FER SHUR!
--}
import qualified Codec.Compression.GZip as GZ
import Data.Aeson
-- below imports available via 1HaskellADay git repository
import Store.SQL.Connection (withConnection)
import Y2018.M04.D16.Exercise
data ViceArticles = VA { arts :: [Vice] }
data Vice = Vice { vid :: Integer, vart :: String }
deriving Show
-- of course, Vice has a hashed id, but oh, well. Use its idx, instead
-- The articles are in this directory:
viceDir :: FilePath
viceDir = "Y2018/M04/D17/compressed/"
-- and the files are:
vices :: [FilePath]
vices = map (("vice-" ++) . (++ ".json")) (words "rands tops")
-- remember to add ".gz" but I did that for ya yesterday.
instance FromJSON ViceArticles where
parseJSON (Object o) = undefined
instance FromJSON Vice where
parseJSON (Object o) = undefined
vice2Art :: Vice -> IxArt
vice2Art v = undefined
-- parse in the vice articles then save them to the articles database.
-- Also, make sure non-ASCII characters are removed from the text, because ick.
-- Of course, first, you need to add "VIC" as a publisher and the vices
-- as file types to the database. See yesterday's exercise and do that.
-- hint: use withConnection ARCHIVE to get a connection to the ARCHIVE database
readVices :: FilePath -> IO [IxArt]
readVices vicedir = undefined -- remember to append ".gz"
-- with the above readVices function, you should be able to call main'' and go!
| geophf/1HaskellADay | exercises/HAD/Y2018/M04/D17/Exercise.hs | mit | 1,834 | 0 | 9 | 336 | 229 | 140 | 89 | 21 | 1 |
-- | Analysis and transformation of SQL queries.
module Database.Selda.Transform where
import Database.Selda.Column
import Database.Selda.SQL
import Database.Selda.Query.Type
import Database.Selda.Types
-- | Remove all dead columns recursively, assuming that the given list of
-- column names contains all names present in the final result.
removeDeadCols :: [ColName] -> SQL -> SQL
removeDeadCols live sql =
case source sql' of
EmptyTable -> sql'
TableName _ -> sql'
Values _ _ -> sql'
RawSql _ -> sql'
Product qs -> sql' {source = Product $ map noDead qs}
Join jt on l r -> sql' {source = Join jt on (noDead l) (noDead r)}
Union union_all l r -> sql' {source = Union union_all (noDead l) (noDead r)}
where
noDead = removeDeadCols live'
sql' = keepCols (implicitlyLiveCols sql ++ live) sql
live' = allColNames sql'
-- | Return the names of all columns in the given top-level query.
-- Subqueries are not traversed.
allColNames :: SQL -> [ColName]
allColNames sql = colNames (cols sql) ++ implicitlyLiveCols sql
-- | Return the names of all non-output (i.e. 'cols') columns in the given
-- top-level query. Subqueries are not traversed.
implicitlyLiveCols :: SQL -> [ColName]
implicitlyLiveCols sql = concat
[ concatMap allNamesIn (restricts sql)
, colNames (groups sql)
, colNames (map snd $ ordering sql)
, colNames (liveExtras sql)
, case source sql of
Join _ on _ _ -> allNamesIn on
_ -> []
]
-- | Get all column names appearing in the given list of (possibly complex)
-- columns.
colNames :: [SomeCol SQL] -> [ColName]
colNames cs = concat
[ [n | Some c <- cs, n <- allNamesIn c]
, [n | Named _ c <- cs, n <- allNamesIn c]
, [n | Named n _ <- cs]
]
-- | Remove all columns but the given, named ones and aggregates, from a query's
-- list of outputs.
-- If we want to refer to a column in an outer query, it must have a name.
-- If it doesn't, then it's either not referred to by an outer query, or
-- the outer query duplicates the expression, thereby referring directly
-- to the names of its components.
keepCols :: [ColName] -> SQL -> SQL
keepCols live sql = sql {cols = filtered}
where
filtered = filter (`oneOf` live) (cols sql)
oneOf (Some (AggrEx _ _)) _ = True
oneOf (Named _ (AggrEx _ _)) _ = True
oneOf (Some (Col n)) ns = n `elem` ns
oneOf (Named n _) ns = n `elem` ns
oneOf _ _ = False
-- | Build the outermost query from the SQL generation state.
-- Groups are ignored, as they are only used by 'aggregate'.
state2sql :: GenState -> SQL
state2sql (GenState [sql] srs _ _ _) =
sql {restricts = restricts sql ++ srs}
state2sql (GenState ss srs _ _ _) =
SQL (allCols ss) (Product ss) srs [] [] Nothing [] False
-- | Get all output columns from a list of SQL ASTs.
allCols :: [SQL] -> [SomeCol SQL]
allCols sqls = [outCol col | sql <- sqls, col <- cols sql]
where
outCol (Named n _) = Some (Col n)
outCol c = c
| valderman/selda | selda/src/Database/Selda/Transform.hs | mit | 3,098 | 0 | 12 | 799 | 886 | 464 | 422 | 51 | 7 |
-- Copyright (c) 2016-present, SoundCloud Ltd.
-- All rights reserved.
--
-- This source code is distributed under the terms of a MIT license,
-- found in the LICENSE file.
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
module Kubernetes.Model.V1.Endpoints
( Endpoints (..)
, kind
, apiVersion
, metadata
, subsets
, mkEndpoints
) where
import Control.Lens.TH (makeLenses)
import Data.Aeson.TH (defaultOptions, deriveJSON,
fieldLabelModifier)
import Data.Text (Text)
import GHC.Generics (Generic)
import Kubernetes.Model.V1.EndpointSubset (EndpointSubset)
import Kubernetes.Model.V1.ObjectMeta (ObjectMeta)
import Prelude hiding (drop, error, max,
min)
import qualified Prelude as P
import Test.QuickCheck (Arbitrary, arbitrary)
import Test.QuickCheck.Instances ()
-- | Endpoints is a collection of endpoints that implement the actual service. Example:\n Name: \"mysvc\",\n Subsets: [\n {\n Addresses: [{\"ip\": \"10.10.1.1\"}, {\"ip\": \"10.10.2.2\"}],\n Ports: [{\"name\": \"a\", \"port\": 8675}, {\"name\": \"b\", \"port\": 309}]\n },\n {\n Addresses: [{\"ip\": \"10.10.3.3\"}],\n Ports: [{\"name\": \"a\", \"port\": 93}, {\"name\": \"b\", \"port\": 76}]\n },\n ]
data Endpoints = Endpoints
{ _kind :: !(Maybe Text)
, _apiVersion :: !(Maybe Text)
, _metadata :: !(Maybe ObjectMeta)
, _subsets :: !([EndpointSubset])
} deriving (Show, Eq, Generic)
makeLenses ''Endpoints
$(deriveJSON defaultOptions{fieldLabelModifier = (\n -> if n == "_type_" then "type" else P.drop 1 n)} ''Endpoints)
instance Arbitrary Endpoints where
arbitrary = Endpoints <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
-- | Use this method to build a Endpoints
mkEndpoints :: [EndpointSubset] -> Endpoints
mkEndpoints xsubsetsx = Endpoints Nothing Nothing Nothing xsubsetsx
| soundcloud/haskell-kubernetes | lib/Kubernetes/Model/V1/Endpoints.hs | mit | 2,506 | 0 | 14 | 728 | 351 | 210 | 141 | 42 | 1 |
{-# LANGUAGE TemplateHaskell, OverloadedStrings #-}
import Network.Wai.Middleware.OAuth2 as OAuth2
import Network.OAuth.OAuth2
import Keys (googleKey)
import Data.ByteString
import Control.Monad (unless)
import System.Exit (exitFailure)
import Test.QuickCheck.All (quickCheckAll)
import Test.QuickCheck (Property)
import Test.QuickCheck.Monadic (assert, monadicIO, run)
import Network.Wai.Test (defaultRequest, request, runSession, simpleBody, simpleHeaders, SResponse)
googleScopeEmail :: QueryParams
googleScopeEmail = [("scope", "email")]
state :: QueryParams
state = [("state", "00000000")]
prop_login :: Property
prop_login = monadicIO $ do
login <- run $ runSession (request defaultRequest) (\_ sendResponse -> sendResponse $ OAuth2.login googleKey (googleScopeEmail ++ state))
run $ print (show $ simpleHeaders login)
assert $ (simpleHeaders login) == locationHeader
where
--build it myself and check against OAuth2 answer
locationHeader = [("Location",oauthOAuthorizeEndpoint googleKey `appendQueryParam` (transform' [("client_id",Just $ oauthClientId googleKey),("response_type",Just "code"),("redirect_uri",oauthCallback googleKey),("scope",Just "email"),("state",Just "00000000")]))]
main = do
allPass <- $quickCheckAll -- Run QuickCheck on all prop_ functions
unless allPass exitFailure
| NerdGGuy/wai-middleware-oauth2 | test/test.hs | mit | 1,345 | 0 | 16 | 178 | 371 | 213 | 158 | 24 | 1 |
{-# LANGUAGE TypeOperators, RecursiveDo, ScopedTypeVariables, TemplateHaskell, QuasiQuotes, OverloadedStrings, ExtendedDefaultRules #-}
module Main where
import Graphics.UI.Gtk
import Graphics.UI.Gtk.General.CssProvider
import Graphics.UI.Gtk.General.StyleContext
import Graphics.UI.Gtk.Abstract.Widget
import Control.FRPNow
import Control.FRPNow.GTK
import Control.FRPNow.GTK.MissingFFI
import Control.Monad
import Control.Applicative
import Control.Concurrent
import Control.Monad.Trans
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Map.Lazy as M
import qualified Data.Set as S
import Text.PhonotacticLearner.PhonotacticConstraints
import Data.Array.IArray
import Data.FileEmbed
import Text.RawString.QQ
import Data.Maybe
import FeatureTableEditor
import LexiconEditor
import GrammarEditor
import LearnerControls
import System.IO
default (T.Text)
ipaft :: FeatureTable String
ipaft = fromJust . csvToFeatureTable id . T.unpack . T.decodeUtf8 $ $(embedFile "app/ft-ipa.csv")
css :: T.Text
css = [r|
#featuretable{
background-color: @theme_base_color;
padding: 5px;
font-size: 80%;
}
#featuretable label {
padding: 0 2px;
}
#featuretable .oddcol {
background-color: mix(@theme_base_color,@theme_bg_color,0.5);
}
#featuretable .segheader {font-weight: bold;}
#featuretable .featheader {font-weight: bold;}
#featuretable .featzero {color: mix(@theme_fg_color, transparent, 0.35);}
|]
main :: IO ()
main = runNowGTK $ do
sync $ hSetEncoding stdout utf8
sync $ hSetBuffering stdout LineBuffering
-- example gtk app
-- initialization code
window <- sync $ windowNew
sync $ set window [windowTitle := "Hayes/Wilson Phonotactic Learner."]
rec (fteditor, dynft) <- createEditableFT (Just window) ipaft
(lexeditor, dynlex) <- createEditableLexicon (Just window) (fmap segsFromFt dynft) lexout
(grammareditor, dyngrammar) <- createLoadableGrammar (Just window) (fmap (M.keysSet . featLookup) dynft) grammarout
(controls,pbar,lexout,grammarout) <- createPhonotacticLearnerWidget dynft dynlex dyngrammar
sync $ do
sp <- cssProviderNew
cssProviderLoadFromString sp css
thescreen <- widgetGetScreen window
styleContextAddProviderForScreen thescreen sp 600
centerpanes <- set' [panedWideHandle := True] =<< createVPaned controls fteditor
rpanes <- set' [panedWideHandle := True] =<< createHPaned centerpanes grammareditor
lpanes <- set' [panedWideHandle := True] =<< createHPaned lexeditor rpanes
box <- createVBox 0 $ do
bstretch lpanes
bpack =<< liftIO (vSeparatorNew)
bpack pbar
containerAdd window box
sync $ window `on` deleteEvent $ liftIO mainQuit >> return False
sync $ widgetShowAll window
| george-steel/maxent-learner | maxent-learner-hw-gui/app/Main.hs | gpl-2.0 | 2,838 | 0 | 17 | 496 | 629 | 334 | 295 | 57 | 1 |
{- |
Module : $Header$
Copyright : (c) C. Maeder, DFKI GmbH 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : experimental
Portability : non-portable (via imports)
-}
module Main where
import Comorphisms.HetLogicGraph
import Comorphisms.LogicGraph
import Logic.Comorphism
import Logic.Logic
import Data.Maybe (mapMaybe)
import qualified Data.Map as Map
main :: IO ()
main = do
testInj_mapSublogicAll
putStrLn ("Size of HetSublogicGraph (n,e): " ++ show (size hetSublogicGraph))
size :: HetSublogicGraph -> (Int, Int)
size hsg = (Map.size $ sublogicNodes hsg,
Map.fold (\ x y -> length x + y) 0 $ comorphismEdges hsg)
testInj_mapSublogic :: (Comorphism cid
lid1 sublogics1 basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1
lid2 sublogics2 basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2)
=> cid -> Bool
testInj_mapSublogic cid =
all (`elem` all_sublogics (targetLogic cid))
$ mapMaybe (mapSublogic cid) $ all_sublogics $ sourceLogic cid
testInj_mapSublogicAll :: IO ()
testInj_mapSublogicAll = do
putStrLn "Every Comorphism should be followed by True"
let testResults = map (\ (Comorphism c) -> testInj_mapSublogic c)
comorphismList
mapM_ showTest $ zip comorphismList testResults
putStrLn ("Test " ++ if and testResults then "succeeded." else "failed!")
where showTest (acm, res) = putStrLn (show acm ++ " : " ++ show res)
| nevrenato/Hets_Fork | Comorphisms/test/sublogicGraph.hs | gpl-2.0 | 1,644 | 0 | 14 | 375 | 391 | 201 | 190 | 31 | 2 |
{-# LANGUAGE TemplateHaskell #-}
module Rewriting.TRS.Raw where
import Autolib.Symbol
import Autolib.TES.Term
import Autolib.TES.Rule
import Autolib.TES.Identifier
import qualified Autolib.TES
import Autolib.ToDoc
import Autolib.Reader
import Data.Typeable
-- | this is the raw type,
-- needs to be processed because what should be variables
-- will be parsed as nullary symbols by the derived parser
data ( Symbol c, Symbol v ) => TRS v c =
TRS { variablen :: [ v ]
, regeln :: [ Rule ( Term v c ) ]
}
deriving ( Eq, Ord, Typeable )
$(derives [makeReader, makeToDoc] [''TRS])
-- local variables:
-- mode: haskell
-- end;
| florianpilz/autotool | src/Rewriting/TRS/Raw.hs | gpl-2.0 | 661 | 0 | 12 | 140 | 151 | 92 | 59 | 15 | 0 |
module SumAndCount where
// https://stepic.org/lesson/%D0%9B%D0%BE%D0%BA%D0%B0%D0%BB%D1%8C%D0%BD%D1%8B%D0%B5-%D1%81%D0%B2%D1%8F%D0%B7%D1%8B%D0%B2%D0%B0%D0%BD%D0%B8%D1%8F-%D0%B8-%D0%BF%D1%80%D0%B0%D0%B2%D0%B8%D0%BB%D0%B0-%D0%BE%D1%82%D1%81%D1%82%D1%83%D0%BF%D0%BE%D0%B2-8414/step/8?unit=1553
sum'n'count :: Integer -> (Integer, Integer)
sum'n'count x | x == 0 = (0, 1)
| otherwise = let
helper 0 s c = (s,c)
helper n s c = helper (quot n 10) (s + rem n 10) (c + 1)
in helper (abs x) 0 0 | devtype-blogspot-com/Haskell-Examples | SumAndCount/SumAndCount.hs | gpl-3.0 | 549 | 3 | 80 | 116 | 467 | 231 | 236 | 7 | 2 |
{-# LANGUAGE ScopedTypeVariables, NoMonomorphismRestriction, RecordWildCards #-}
module Main where
import Control.Applicative
import Control.Monad
import Control.Monad.Error
import Control.Monad.Reader
import Control.Monad.State
import Data.Conduit
import qualified Data.Conduit.List as CL
import qualified Data.Traversable as T
import qualified Data.HashMap.Lazy as HM
import Data.Maybe
import System.Directory
import System.Environment
import System.FilePath ((</>))
import System.IO
import System.Log.Logger
--
import HEP.Parser.LHE.Type
import HEP.Automation.MadGraph.Model
import HEP.Automation.MadGraph.Model.SimplifiedSUSY
import HEP.Automation.MadGraph.Run
import HEP.Automation.MadGraph.SetupType
import HEP.Automation.MadGraph.Type
import HEP.Automation.EventChain.Driver
import HEP.Automation.EventChain.File
import HEP.Automation.EventChain.LHEConn
import HEP.Automation.EventChain.Type.Skeleton
import HEP.Automation.EventChain.Type.Spec
import HEP.Automation.EventChain.Type.Process
import HEP.Automation.EventChain.SpecDSL
import HEP.Automation.EventChain.Simulator
import HEP.Automation.EventChain.Process
import HEP.Automation.EventChain.Process.Generator
import HEP.Automation.EventGeneration.Config
import HEP.Automation.EventGeneration.Type
import HEP.Automation.EventGeneration.Work
import HEP.Storage.WebDAV
--
import qualified Paths_madgraph_auto as PMadGraph
import qualified Paths_madgraph_auto_model as PModel
jets = [1,2,3,4,-1,-2,-3,-4,21]
leptons = [11,13,-11,-13]
neut = 1000022
adms = [9000201,-9000201,9000202,-9000202]
squarksL = [ 1000001, -1000001 -- sdown_L
, 1000002, -1000002 -- sup_L
, 1000003, -1000003 -- sstrange_L
, 1000004, -1000004 -- scharm_L
]
w = [ 24, -24 ]
p_2sq_2w2j2x :: DCross
p_2sq_2w2j2x = x (t proton, t proton, [p_squarkl, p_squarkl])
p_squarkl :: DDecay
p_squarkl = d ( squarksL, [t w, t jets, neut ] )
idx_2sq_2w2j2x :: CrossID ProcSmplIdx
idx_2sq_2w2j2x = mkCrossIDIdx (mkDICross p_2sq_2w2j2x)
map_2sq_2w2j2x :: ProcSpecMap
map_2sq_2w2j2x =
HM.fromList [ (Nothing , MGProc ["define sql = ul ul~ dl dl~ sl sl~ cl cl~"]
["p p > sql sql QED=0"])
, (Just (3, 1000001,[]), MGProc [] ["dl > w- u n1"])
, (Just (3,-1000001,[]), MGProc [] ["dl~ > w+ u~ n1"])
, (Just (3, 1000002,[]), MGProc [] ["ul > w+ d n1"])
, (Just (3,-1000002,[]), MGProc [] ["ul~ > w- d~ n1"])
, (Just (3, 1000003,[]), MGProc [] ["sl > w- c n1"])
, (Just (3,-1000003,[]), MGProc [] ["sl~ > w+ c~ n1"])
, (Just (3, 1000004,[]), MGProc [] ["cl > w+ s n1"])
, (Just (3,-1000004,[]), MGProc [] ["cl~ > w- s~ n1"])
--
, (Just (4, 1000001,[]), MGProc [] ["dl > w- u n1"])
, (Just (4,-1000001,[]), MGProc [] ["dl~ > w+ u~ n1"])
, (Just (4, 1000002,[]), MGProc [] ["ul > w+ d n1"])
, (Just (4,-1000002,[]), MGProc [] ["ul~ > w- d~ n1"])
, (Just (4, 1000003,[]), MGProc [] ["sl > w- c n1"])
, (Just (4,-1000003,[]), MGProc [] ["sl~ > w+ c~ n1"])
, (Just (4, 1000004,[]), MGProc [] ["cl > w+ s n1"])
, (Just (4,-1000004,[]), MGProc [] ["cl~ > w- s~ n1"])
]
modelparam :: Double -> Double -> Double -> ModelParam SimplifiedSUSY
modelparam mneut mgl msq = SimplifiedSUSYParam mneut mgl msq
-- |
mgrunsetup :: Int -> RunSetup
mgrunsetup n =
RS { numevent = n
, machine = LHC7 ATLAS
, rgrun = Auto
, rgscale = 200.0
, match = NoMatch
, cut = NoCut
, pythia = RunPYTHIA
, lhesanitizer = -- NoLHESanitize
LHESanitize (Replace [(9000201,1000022),(-9000201,1000022)])
, pgs = RunPGS (AntiKTJet 0.4,NoTau)
, uploadhep = NoUploadHEP
, setnum = 1
}
worksets = [ (mn,50000,mq,10000) | mn <- [100,200..1200], mq <- [mn+100,mn+200..1200] ]
main :: IO ()
main = do
args <- getArgs
let fp = args !! 0
n1 = read (args !! 1) :: Int
n2 = read (args !! 2) :: Int
updateGlobalLogger "MadGraphAuto" (setLevel DEBUG)
mapM_ (scanwork fp) (drop (n1-1) . take n2 $ worksets )
-- |
getScriptSetup :: FilePath -- ^ sandbox directory
-> FilePath -- ^ mg5base
-> FilePath -- ^ main montecarlo run
-> IO ScriptSetup
getScriptSetup dir_sb dir_mg5 dir_mc = do
dir_mdl <- (</> "template") <$> PModel.getDataDir
dir_tmpl <- (</> "template") <$> PMadGraph.getDataDir
return $
SS { modeltmpldir = dir_mdl
, runtmpldir = dir_tmpl
, sandboxdir = dir_sb
, mg5base = dir_mg5
, mcrundir = dir_mc
}
scanwork :: FilePath -> (Double,Double,Double,Int) -> IO ()
scanwork fp (mneut,mgl,msq,n) = do
homedir <- getHomeDirectory
getConfig fp >>=
maybe (return ()) (\ec -> do
let ssetup = evgen_scriptsetup ec
whost = evgen_webdavroot ec
pkey = evgen_privatekeyfile ec
pswd = evgen_passwordstore ec
Just cr <- getCredential pkey pswd
let wdavcfg = WebDAVConfig { webdav_credential = cr
, webdav_baseurl = whost }
let param = modelparam mneut mgl msq
mgrs = mgrunsetup n
evchainGen SimplifiedSUSY
ssetup
("2sq_2w2j2x","2sq_2w2j2x")
param
map_2sq_2w2j2x p_2sq_2w2j2x
mgrs
let wsetup' = getWorkSetupCombined SimplifiedSUSY ssetup param ("2sq_2w2j2x","2sq_2w2j2x") mgrs
wsetup = wsetup' { ws_storage = WebDAVRemoteDir "montecarlo/admproject/SimplifiedSUSY/scan2w2j2x" }
putStrLn "phase2work start"
phase2work wsetup
putStrLn "phase3work start"
phase3work wdavcfg wsetup
)
-- |
phase2work :: WorkSetup SimplifiedSUSY -> IO ()
phase2work wsetup = do
r <- flip runReaderT wsetup . runErrorT $ do
ws <- ask
let (ssetup,psetup,param,rsetup) =
((,,,) <$> ws_ssetup <*> ws_psetup <*> ws_param <*> ws_rsetup) ws
cardPrepare
case (lhesanitizer rsetup,pythia rsetup) of
(NoLHESanitize, _) -> return ()
(LHESanitize pid, RunPYTHIA) -> do
sanitizeLHE
runPYTHIA
-- runHEP2LHE
runPGS
runClean
-- updateBanner
(LHESanitize pid, NoPYTHIA) -> do
sanitizeLHE
-- updateBanner
cleanHepFiles
print r
return ()
-- |
phase3work :: WebDAVConfig -> WorkSetup SimplifiedSUSY -> IO ()
phase3work wdav wsetup = do
uploadEventFull NoUploadHEP wdav wsetup
return ()
| wavewave/lhc-analysis-collection | exe/evchainRunSimplifiedWW.hs | gpl-3.0 | 7,017 | 0 | 20 | 2,110 | 2,069 | 1,175 | 894 | 161 | 3 |
module Experimentation.P04 (p04_test) where
import Test.HUnit
import Data.List
length_nat :: [a] -> Int
length_nat all = length all
length_rec :: [a] -> Int
length_rec [] = 0
length_rec (_:as) = 1 + length_rec as
-- Tests
test_length_nat = TestCase $ do
assertEqual "for (length_nat [0..100])" 101 (length_nat [0..100])
assertEqual "for (length_nat [1])" 1 (length_nat [1])
assertEqual "for (length_nat [])" 0 (length_nat [])
test_length_rec = TestCase $ do
assertEqual "for (length_rec [0..100])" 101 (length_rec [0..100])
assertEqual "for (length_rec [1])" 1 (length_rec [1])
assertEqual "for (length_rec [])" 0 (length_rec [])
p04_test = do
runTestTT p04_tests
p04_tests = TestList [
TestLabel "test_length_nat" test_length_nat,
TestLabel "test_length_rec" test_length_rec
]
| adarqui/99problems-hs | Experimentation/P04.hs | gpl-3.0 | 799 | 0 | 11 | 125 | 258 | 130 | 128 | 21 | 1 |
{- ============================================================================
| Copyright 2011 Matthew D. Steele <[email protected]> |
| |
| This file is part of Fallback. |
| |
| Fallback is free software: you can redistribute it and/or modify it under |
| the terms of the GNU General Public License as published by the Free |
| Software Foundation, either version 3 of the License, or (at your option) |
| any later version. |
| |
| Fallback is distributed in the hope that it will be useful, but WITHOUT |
| ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for |
| more details. |
| |
| You should have received a copy of the GNU General Public License along |
| with Fallback. If not, see <http://www.gnu.org/licenses/>. |
============================================================================ -}
module Fallback.Scenario.Triggers
(startingArea, startingMark, scenarioTriggers, initialProgress,
getAreaDevice, getAreaEntrance, getAreaExits, getAreaLinks, getAreaTerrain,
getAreaTriggers, getMonsterScript, getRegionBackground,
ScriptedBattle(..), getScriptedBattle)
where
import Fallback.Scenario.Compile
import Fallback.Scenario.Triggers.Corenglen (compileCorenglen)
import Fallback.Scenario.Triggers.FrozenPass (compileFrozenPass)
import Fallback.Scenario.Triggers.Globals
import Fallback.Scenario.Triggers.Holmgare (compileHolmgare)
import Fallback.Scenario.Triggers.Icehold (compileIcehold)
import Fallback.Scenario.Triggers.IcyConfluence (compileIcyConfluence)
import Fallback.Scenario.Triggers.IronMine (compileIronMine)
import Fallback.Scenario.Triggers.MountainPath (compileMountainPath)
import Fallback.Scenario.Triggers.PerilousRoad (compilePerilousRoad)
import Fallback.Scenario.Triggers.SewerCaves (compileSewerCaves)
import Fallback.Scenario.Triggers.StoneBridge (compileStoneBridge)
import Fallback.Scenario.Triggers.Tragorda (compileTragorda)
import Fallback.Scenario.Triggers.WhistlingWoods (compileWhistlingWoods)
import Fallback.State.Creature (MonsterTownAI(..))
import Fallback.State.Progress (Progress)
import Fallback.State.Tags
-------------------------------------------------------------------------------
startingArea :: AreaTag
startingArea = MountainPath
startingMark :: String
startingMark = "Start"
initialProgress :: Progress
initialProgress = scenarioInitialProgress scenarioTriggers
-------------------------------------------------------------------------------
scenarioTriggers :: ScenarioTriggers
scenarioTriggers = compileScenario $ do
globals <- compileGlobals
demonPitOpen <- newGlobalVar 928347 False
---------------------------------- Regions ----------------------------------
compileRegion Longvale (const "regions/Longvale.png")
compileRegion Svengaard (const "regions/Svengaard.png")
compileRegion Tahariam $ \party -> if getVar demonPitOpen party
then "regions/Tahariam2.png"
else "regions/Tahariam1.png"
compileRegion Bailagua (const "regions/Bailagua.png")
compileRegion Emitsuibom (const "regions/Emitsuibom.png")
-- OtherFloors is a dummy region containing e.g. 2nd floors of areas from
-- other regions. The background image doesn't really matter.
compileRegion OtherFloors (const $ "regions/Longvale.png")
----------------------------------- Areas -----------------------------------
compileArea Valhalla Nothing $ do
onStartDaily 409487 $ do
addUnlockedDoors globals
simpleEnemy_ 660632 "DemonWolf1" DaemonWolf MindlessAI
simpleEnemy_ 660633 "DemonWolf2" DaemonWolf ChaseAI
simpleEnemy_ 660634 "Wolf" Wolf (PatrolAI "Wolf" "WolfPatrol")
simpleEnemy_ 978292 "Bat" CaveBat MindlessAI
compileMountainPath globals
compileCorenglen globals
compileFrozenPass globals
compileHolmgare globals
compileSewerCaves globals
compilePerilousRoad globals
compileStoneBridge globals
compileTragorda globals
compileWhistlingWoods globals
compileIcyConfluence globals
compileArea Marata Nothing $ do
makeExit IcyConfluence ["ToIcyConfluence"] "FromIcyConfluence"
makeExit IronMine ["ToIronMine"] "FromIronMine"
makeExit NorthernTundra ["ToNorthernTundra"] "FromNorthernTundra"
onStartDaily 109833 $ do
addUnlockedDoors globals
compileIronMine globals
compileArea NorthernTundra Nothing $ do
makeExit Marata ["ToMarata"] "FromMarata"
makeExit Duskwood ["ToDuskwood"] "FromDuskwood"
compileArea Duskwood Nothing $ do
makeExit WhistlingWoods ["ToWhistlingWoods"] "FromWhistlingWoods"
makeExit Icehold ["ToIcehold"] "FromIcehold"
makeExit NorthernTundra ["ToNorthernTundra"] "FromNorthernTundra"
makeExit Tragorda ["ToTragorda"] "FromTragorda"
compileIcehold globals
compileArea BurningMaze Nothing $ return ()
compileArea Gazerpit Nothing $ return ()
compileArea ArcaneLab Nothing $ return ()
compileArea InnerLab Nothing $ return ()
-------------------------------------------------------------------------------
| mdsteele/fallback | src/Fallback/Scenario/Triggers.hs | gpl-3.0 | 5,637 | 0 | 14 | 1,228 | 810 | 413 | 397 | 77 | 2 |
{-# LANGUAGE ConstraintKinds
,DataKinds
,FlexibleInstances
,KindSignatures
,MultiParamTypeClasses
,TemplateHaskell
,TypeFamilies
,TypeSynonymInstances
,TypeOperators
,UndecidableInstances
#-}
{-|
Module : Data.NonZero.Nat
Copyright : (c) Samuel A. Yallop, 2015
Maintainer : [email protected]
Stability : experimental
Represent non-zero natural numbers at the type level.
This module is separate from NonZero.Natural so that we
can avoid exporting unwanted Term-level 'One' and 'Suc's
which we must declare to get DataKinds to derive the wanted
Type-level constructors.
Ideally we would be able to write something like:
@
kind Nat
= One
| Suc Nat
@
which is what we are simulating.
-}
module Data.NonZero.Nat
(Nat -- Type/Kind
,One -- Type :: Nat
,Suc -- Type :: Nat
,Plus,(:+:)
,Mult,(:*:)
,Minus,(:-:)
,(:<=:)(..)
,toNat
) where
import Language.Haskell.TH
-- | 'Nat' is used as the *kind* of natural numbers excluding a zero.
--
-- E.G.
--
-- @
--
-- One
--
-- Suc One
--
-- Suc Suc One
--
-- @
--
-- Are all types of kind 'Nat'.
data Nat
= One
| Suc Nat
-- | @ One :: Nat @
type One = 'One
-- | @ Suc :: Nat -> Nat @
type Suc = 'Suc
-- | Addition of 'Nat's.
type family Plus (n :: Nat) (m :: Nat) :: Nat where
Plus One m = Suc m
Plus (Suc n) m = Suc (Plus n m)
type n :+: m = Plus n m
type family Minus (n :: Nat) (m :: Nat) :: Nat where
Minus (Suc n) (Suc m) = Minus n m
Minus (Suc n) One = n
Minus One One = One
type n :-: m = Minus n m
type family Mult (n :: Nat) (m :: Nat) :: Nat where
Mult One m = m
Mult (Suc n) m = Plus m (Mult n m)
type n :*: m = Mult n m
-- | Decide LessThanOrEqual between two Nats.
type family (n :: Nat) :<= (m :: Nat) :: Bool where
One :<= n = True
Suc n :<= One = False
Suc n :<= Suc m = n :<= m
-- | 'n' is less than or equal to 'm'.
type (n :: Nat) :<=: (m :: Nat) = (n :<= m) ~ True
-- | TemplateHaskell type for conveniently writing 'Nat' types.
--
-- E.G.
--
-- @ $(toNat 3) @ ~> @ :: Suc (Suc One) @
toNat :: Integer -> Q Type
toNat 1 = [t| 'One |]
toNat n = [t| 'Suc $(toNat (n-1)) |]
| syallop/NonZero | Data/NonZero/Nat.hs | gpl-3.0 | 2,256 | 0 | 8 | 644 | 496 | 304 | 192 | 46 | 1 |
-- Implementation of the collatz sequence in Haskell
import Debug.Trace
collatz :: Integer -> Integer
collatz 1 = trace("N=1") 1
collatz n = if even n
then trace("N="++show n++" is even") collatz (div n 2)
else trace("N="++show n++" is not even") collatz (3*n+1)
| situx/Misc | Haskell/collatz.hs | gpl-3.0 | 282 | 2 | 10 | 62 | 114 | 58 | 56 | 6 | 2 |
{-# LANGUAGE TemplateHaskell, TypeFamilies, TypeApplications #-}
module Lamdu.Sugar.Convert.Binder.Params
( convertLamParams, convertEmptyParams
, convertBinderToFunction
, convertToRecordParams
, StoredLam(..), slLam, slLambdaProp
, NewParamPosition(..), addFieldParam
, isParamAlwaysUsedWithGetField
, mkVarInfo
) where
import qualified Control.Lens.Extended as Lens
import Control.Monad.Once (OnceT)
import Control.Monad.Transaction (MonadTransaction, getP, setP)
import qualified Data.List.Extended as List
import Data.Maybe.Extended (unsafeUnjust)
import Data.Property (MkProperty', modP)
import qualified Data.Set as Set
import Hyper
import Hyper.Syntax (FuncType(..), funcIn)
import Hyper.Syntax.Nominal (NominalInst(..))
import Hyper.Syntax.Row (RowExtend(..), FlatRowExtends(..), freExtends, freRest)
import qualified Hyper.Syntax.Row as Row
import Hyper.Type.Functor (F)
import Hyper.Type.Prune (Prune(..), _Unpruned)
import qualified Lamdu.Calc.Lens as ExprLens
import qualified Lamdu.Calc.Term as V
import qualified Lamdu.Calc.Type as T
import qualified Lamdu.Data.Anchors as Anchors
import qualified Lamdu.Data.Ops as DataOps
import qualified Lamdu.Data.Ops.Subexprs as SubExprs
import Lamdu.Expr.IRef (ValI, HRef)
import qualified Lamdu.Expr.IRef as ExprIRef
import qualified Lamdu.Expr.UniqueId as UniqueId
import qualified Lamdu.Sugar.Config as Config
import Lamdu.Sugar.Convert.Binder.Types (BinderKind(..))
import qualified Lamdu.Sugar.Convert.Input as Input
import Lamdu.Sugar.Convert.Monad (ConvertM(..))
import qualified Lamdu.Sugar.Convert.Monad as ConvertM
import qualified Lamdu.Sugar.Convert.TId as ConvertTId
import qualified Lamdu.Sugar.Convert.TaggedList as ConvertTaggedList
import qualified Lamdu.Sugar.Convert.Tag as ConvertTag
import Lamdu.Sugar.Internal
import qualified Lamdu.Sugar.Internal.EntityId as EntityId
import Lamdu.Sugar.Types
import Revision.Deltum.IRef (IRef)
import Revision.Deltum.Transaction (Transaction)
import Lamdu.Prelude
type T = Transaction
data FieldParam = FieldParam
{ fpTag :: T.Tag
, fpFieldType :: Pure # T.Type
}
data StoredLam m = StoredLam
{ _slLam :: V.TypedLam V.Var (HCompose Prune T.Type) V.Term # Ann (HRef m)
, _slLambdaProp :: HRef m # V.Term
}
Lens.makeLenses ''StoredLam
mkStoredLam ::
V.TypedLam V.Var (HCompose Prune T.Type) V.Term # Ann (Input.Payload m) ->
Input.Payload m # V.Term -> StoredLam m
mkStoredLam lam pl =
StoredLam
(hmap (Proxy @(Recursively HFunctor) #> hflipped %~ hmap (const (^. Input.stored))) lam)
(pl ^. Input.stored)
unappliedUsesOfVar :: V.Var -> Ann a # V.Term -> [a # V.Term]
unappliedUsesOfVar var (Ann pl (V.BLeaf (V.LVar v)))
| v == var = [pl]
unappliedUsesOfVar var (Ann _ (V.BApp (App f x))) =
rf <> rx
where
rf | Lens.has ExprLens.valVar f = []
| otherwise = unappliedUsesOfVar var f
rx | Lens.has ExprLens.valHole f && Lens.has ExprLens.valVar x = []
| otherwise = unappliedUsesOfVar var x
unappliedUsesOfVar var x =
hfoldMap
( \case
HWitness V.W_Term_Term -> unappliedUsesOfVar var
_ -> const []
) (x ^. hVal)
wrapUnappliedUsesOfVar :: Monad m => V.Var -> Ann (HRef m) # V.Term -> T m ()
wrapUnappliedUsesOfVar var = traverse_ DataOps.applyHoleTo . unappliedUsesOfVar var
argsOfCallTo :: V.Var -> Ann a # V.Term -> [a # V.Term]
argsOfCallTo var (Ann _ (V.BApp (App (Ann _ (V.BLeaf (V.LVar v))) x)))
| v == var = [x ^. hAnn]
argsOfCallTo var x =
hfoldMap
( \case
HWitness V.W_Term_Term -> argsOfCallTo var
_ -> const []
) (x ^. hVal)
changeCallArgs ::
Monad m =>
(ValI m -> T m (ValI m)) -> Ann (HRef m) # V.Term -> V.Var -> T m ()
changeCallArgs change v var =
do
argsOfCallTo var v & traverse_ (\x -> x ^. ExprIRef.iref & change >>= x ^. ExprIRef.setIref)
wrapUnappliedUsesOfVar var v
-- | If the lam is bound to a variable, we can fix all uses of the
-- variable. When it isn't, we may need to fix the lam itself
fixLamUsages ::
Monad m =>
ConvertM m
((ValI m -> T m (ValI m)) -> BinderKind m -> StoredLam m -> T m ())
fixLamUsages =
ConvertM.typeProtectedSetToVal
<&> \protectedSetToVal fixOp binderKind storedLam ->
case binderKind of
BinderKindDef defI ->
changeCallArgs fixOp (storedLam ^. slLam . V.tlOut) (ExprIRef.globalId defI)
BinderKindLet redexLam ->
changeCallArgs fixOp (redexLam ^. V.tlOut) (redexLam ^. V.tlIn)
BinderKindLambda ->
protectedSetToVal prop (prop ^. ExprIRef.iref) & void
where
prop = storedLam ^. slLambdaProp
writeNewParamList ::
Monad m =>
[T.Tag] ->
T m (F (IRef m) # HCompose Prune T.Type)
writeNewParamList tags =
hcomposed _Unpruned . T._TRecord . _HCompose #
foldl extend (newTerm (hcomposed _Unpruned # T.REmpty)) tags
& newTerm
& ExprIRef.writeRecursively
<&> (^. hAnn . _1)
where
newTerm = Ann (ExprIRef.WriteNew :*: Const ())
extend rest f =
hcomposed _Unpruned . T._RExtend #
RowExtend f (_HCompose # newTerm (_HCompose # Pruned)) (_HCompose # rest)
& newTerm
addFieldParam ::
Monad m =>
ConvertM m
(T m (ValI m) -> BinderKind m -> StoredLam m -> (T.Tag -> [T.Tag]) -> T.Tag -> T m ())
addFieldParam =
fixLamUsages
<&>
\fixUsages mkArg binderKind storedLam mkTags tag ->
do
let t = storedLam ^. slLam . V.tlInType
case t ^. hVal . _HCompose of
Pruned ->
writeNewParamList (mkTags tag)
>>= t ^. hAnn . ExprIRef.setIref
Unpruned (HCompose (T.TRecord (HCompose r))) ->
do
fieldType <- _HCompose # Pruned & ExprIRef.newValI
hcomposed _Unpruned . T._RExtend #
RowExtend tag (_HCompose # fieldType) (_HCompose # (r ^. hAnn . ExprIRef.iref))
& ExprIRef.newValI
>>= r ^. hAnn . ExprIRef.setIref
_ -> error "adding field to type that isn't a record!"
let addFieldToCall argI =
do
newArg <- mkArg
RowExtend tag newArg argI
& V.BRecExtend & ExprIRef.newValI
fixUsages addFieldToCall binderKind storedLam
getFieldOnVar :: Lens.Traversal' (Pure # V.Term) (V.Var, T.Tag)
getFieldOnVar =
_Pure . V._BApp . inApp
where
inApp f (V.App (Pure (V.BLeaf (V.LGetField t))) (Pure (V.BLeaf (V.LVar v)))) =
f (v, t) <&> pack
inApp _ other = pure other
pack (v, t) = V.App (Pure (V.BLeaf (V.LGetField t))) (Pure (V.BLeaf (V.LVar v)))
getFieldParamsToHole ::
Monad m =>
T.Tag -> V.TypedLam V.Var (HCompose Prune T.Type) V.Term # Ann (HRef m) -> T m ()
getFieldParamsToHole tag (V.TypedLam param _paramTyp lamBody) =
SubExprs.onMatchingSubexprs SubExprs.toHole (getFieldOnVar . Lens.only (param, tag)) lamBody
getFieldParamsToParams ::
Monad m =>
V.TypedLam V.Var (HCompose Prune T.Type) V.Term # Ann (HRef m) -> T.Tag -> T m ()
getFieldParamsToParams (V.TypedLam param _paramTyp lamBody) tag =
SubExprs.onMatchingSubexprs (toParam . (^. ExprIRef.iref))
(getFieldOnVar . Lens.only (param, tag)) lamBody
where
toParam bodyI = ExprIRef.writeValI bodyI $ V.BLeaf $ V.LVar param
fixCallArgRemoveField :: Monad m => T.Tag -> ValI m -> T m (ValI m)
fixCallArgRemoveField tag argI =
ExprIRef.readValI argI
>>= \case
V.BRecExtend (RowExtend t v restI)
| t == tag -> pure restI
| otherwise ->
do
newRestI <- fixCallArgRemoveField tag restI
when (newRestI /= restI) $
ExprIRef.writeValI argI $
V.BRecExtend $ RowExtend t v newRestI
pure argI
_ -> pure argI
fixCallToSingleArg ::
Monad m => T.Tag -> ValI m -> T m (ValI m)
fixCallToSingleArg tag argI =
ExprIRef.readValI argI
>>= \case
V.BRecExtend (RowExtend t v restI)
| t == tag -> pure v
| otherwise -> fixCallToSingleArg tag restI
_ -> pure argI
delFieldParamAndFixCalls ::
Monad m =>
BinderKind m -> [T.Tag] -> FieldParam -> StoredLam m ->
ConvertM m (T m ())
delFieldParamAndFixCalls binderKind tags fp storedLam =
fixLamUsages
<&> \fixUsages ->
do
case (mNewTags, storedParamType ^. hVal . _HCompose) of
(Just newTags, Pruned) ->
writeNewParamList newTags
>>= storedParamType ^. hAnn . ExprIRef.setIref
(Just{}, Unpruned (HCompose (T.TRecord (HCompose r)))) ->
r & hflipped %~ hmap (\_ i -> ExprIRef.ExistingRef (i ^. ExprIRef.iref) :*: Const ())
& removeField
& ExprIRef.writeRecursively
<&> (^. hAnn . _1)
>>= r ^. hAnn . ExprIRef.setIref
(Just{}, Unpruned _) -> error "removing field from type that isn't a record!"
(Nothing, _) ->
_HCompose # Pruned & ExprIRef.newValI
>>= storedParamType ^. hAnn . ExprIRef.setIref
getFieldParamsToHole tag (storedLam ^. slLam)
traverse_ onLastTag mLastTag
fixUsages fixRecurseArg binderKind storedLam
where
removeField (Ann a (HCompose (Unpruned (HCompose (T.RExtend (RowExtend f t rest))))))
| f == fpTag fp = rest ^. _HCompose
| otherwise =
rest & _HCompose %~ removeField
& RowExtend f t
& Ann a . (hcomposed _Unpruned . T._RExtend #)
removeField (Ann _ _) = error "expected field not preset!"
storedParamType = storedLam ^. slLam . V.tlInType
onLastTag lastTag =
do
getFieldParamsToParams (storedLam ^. slLam) lastTag
setP (Anchors.assocTag (storedLam ^. slLam . V.tlIn)) lastTag
tag = fpTag fp
fixRecurseArg =
maybe (fixCallArgRemoveField tag)
fixCallToSingleArg mLastTag
(mNewTags, mLastTag) =
case List.delete tag tags of
[x] -> (Nothing, Just x)
xs -> (Just xs, Nothing)
fieldParamInfo ::
Monad m =>
BinderKind m -> [T.Tag] -> FieldParam -> StoredLam m ->
TagRef InternalName (OnceT (T m)) (T m) ->
ConvertM m (TaggedItem InternalName (OnceT (T m)) (T m) (LhsField InternalName EvalPrep))
fieldParamInfo binderKind tags fp storedLam tag =
do
postProcess <- ConvertM.postProcessAssert
add <- addFieldParam
let resultInfo () newTag =
ConvertTag.TagResultInfo
(EntityId.ofTaggedEntity param newTag)
(add DataOps.newHole binderKind storedLam (: tags) newTag >> postProcess)
addNext <-
ConvertTag.replace (nameWithContext Nothing param) (Set.fromList tags) (pure ()) resultInfo
>>= ConvertM . lift
del <- delFieldParamAndFixCalls binderKind tags fp storedLam
vinfo <- mkVarInfo (fpFieldType fp)
nest <- Lens.view (ConvertM.scConfig . Config.sugarsEnabled . Config.destructureNested)
pure TaggedItem
{ _tiTag = tag
, _tiAddAfter = addNext
, _tiDelete = del <* postProcess
, _tiValue =
LhsField
{ _fParam =
FuncParam
{ _fpAnnotation = EvalPrep (fpFieldType fp) inst
, _fpUsages = []
, _fpVarInfo = vinfo
}
, _fSubFields = guard nest *> subFields
}
}
where
inst = tag ^. tagRefTag . tagInstance
param = storedLam ^. slLam . V.tlIn
subFields =
fpFieldType fp
^? _Pure . T._TRecord . T.flatRow . Lens.filteredBy (freRest . _Pure . T._REmpty) . freExtends
<&> (^@.. Lens.itraversed)
<&> Lens.mapped %~ makeSubField
makeSubField (subTag, subTyp) =
( Tag
{ _tagName = nameWithContext Nothing inst subTag
, _tagInstance = subId
, _tagVal = subTag
}
, LhsField
{ _fParam =
FuncParam
{ _fpAnnotation = EvalPrep subTyp subId
, _fpUsages = []
, _fpVarInfo = VarGeneric -- TODO: Shouldn't matter!
}
, _fSubFields = Nothing -- Don't support further nesting now
}
)
where
subId = EntityId.ofTaggedEntity inst subTag
changeGetFieldTags ::
Monad m =>
V.Var -> T.Tag -> T.Tag -> Ann (HRef m) # V.Term -> T m ()
changeGetFieldTags param prevTag chosenTag x =
case x ^. hVal of
V.BApp (V.App (Ann a (V.BLeaf (V.LGetField t))) (Ann _ (V.BLeaf (V.LVar v))))
| v == param && t == prevTag ->
V.LGetField chosenTag & V.BLeaf & ExprIRef.writeValI (a ^. ExprIRef.iref)
| otherwise -> pure ()
V.BLeaf (V.LVar v)
| v == param -> DataOps.applyHoleTo (x ^. hAnn) & void
b ->
htraverse_
( \case
HWitness V.W_Term_Term -> changeGetFieldTags param prevTag chosenTag
_ -> const (pure ())
) b
setFieldParamTag ::
Monad m =>
Maybe (MkProperty' (T m) PresentationMode) -> BinderKind m ->
StoredLam m -> [T.Tag] -> T.Tag -> ConvertM m (T.Tag -> T m ())
setFieldParamTag mPresMode binderKind storedLam prevTagList prevTag =
(,) <$> fixLamUsages <*> ConvertM.postProcessAssert
<&>
\(fixUsages, postProcess) chosenTag ->
do
traverse_ (`modP` (<&> Lens.filteredBy (Lens.only prevTag) .~ chosenTag)) mPresMode
case storedParamType ^. hVal . _HCompose of
Pruned ->
writeNewParamList (prevTagList <&> Lens.filteredBy (Lens.only prevTag) .~ chosenTag)
>>= storedParamType ^. hAnn . ExprIRef.setIref
Unpruned (HCompose (T.TRecord (HCompose r))) ->
r & hflipped %~ hmap (\_ i -> ExprIRef.ExistingRef (i ^. ExprIRef.iref) :*: Const ())
& changeField
& ExprIRef.writeRecursively
<&> (^. hAnn . _1)
>>= r ^. hAnn . ExprIRef.setIref
where
changeField (Ann a (HCompose (Unpruned (HCompose (T.RExtend (RowExtend f t rest)))))) =
( if prevTag == f
then RowExtend chosenTag t rest
else rest & _HCompose %~ changeField & RowExtend f t
) & Ann a . (hcomposed _Unpruned . T._RExtend #)
changeField (Ann _ _) = error "expected field not preset!"
_ -> error "changing field in type that isn't a record!"
let fixArg argI (V.BRecExtend recExtend)
| recExtend ^. Row.eKey == prevTag =
argI <$
ExprIRef.writeValI argI
(V.BRecExtend (recExtend & Row.eKey .~ chosenTag))
| otherwise =
argI <$
( changeFieldToCall (recExtend ^. Row.eRest)
<&> (\x -> recExtend & Row.eRest .~ x)
<&> V.BRecExtend
>>= ExprIRef.writeValI argI
)
fixArg argI _ =
DataOps.newHole
<&> (`V.App` argI) <&> V.BApp
>>= ExprIRef.newValI
changeFieldToCall argI = ExprIRef.readValI argI >>= fixArg argI
fixUsages changeFieldToCall binderKind storedLam
changeGetFieldTags
(storedLam ^. slLam . V.tlIn) prevTag chosenTag
(storedLam ^. slLam . V.tlOut)
postProcess
where
storedParamType = storedLam ^. slLam . V.tlInType
lhsFieldTags :: Lens.Getting _ (LhsField InternalName v) T.Tag
lhsFieldTags = fSubFields . Lens._Just . Lens.folded . (_1 . tagVal <> _2 . lhsFieldTags)
convertRecordParams ::
Monad m =>
Maybe (MkProperty' (T m) PresentationMode) -> BinderKind m -> [FieldParam] -> StoredLam m ->
ConvertM m (LhsNames InternalName (OnceT (T m)) (T m) EvalPrep)
convertRecordParams mPresMode binderKind fieldParams storedLam =
do
ps <- traverse mkParam fieldParams
let allRecursiveFields = ps ^.. traverse . (tiTag . tagRefTag . tagVal <> tiValue . lhsFieldTags)
let hasDups = List.sort allRecursiveFields & List.group & Lens.has (traverse . Lens.ix 1)
let fixedParams
| hasDups = ps <&> tiValue . fSubFields .~ Nothing
| otherwise = ps
postProcess <- ConvertM.postProcessAssert
add <- addFieldParam
let resultInfo () tag =
ConvertTag.TagResultInfo
(EntityId.ofTaggedEntity param tag)
(add DataOps.newHole binderKind storedLam (: (fieldParams <&> fpTag)) tag >> postProcess)
addFirstSelection <-
ConvertTag.replace (nameWithContext Nothing param) (Set.fromList tags) (pure ()) resultInfo >>= ConvertM . lift
ConvertTaggedList.convert addFirstSelection fixedParams & LhsRecord & pure
where
tags = fieldParams <&> fpTag
param = storedLam ^. slLam . V.tlIn
mkParam fp =
do
setField <- setFieldParamTag mPresMode binderKind storedLam tagList tag
let resultInfo () = ConvertTag.TagResultInfo <$> EntityId.ofTaggedEntity param <*> setField
ConvertTag.ref tag (Just (ConvertTag.NameContext Nothing (UniqueId.toUUID param)))
(Set.delete tag (Set.fromList tagList)) (pure ()) resultInfo
>>= ConvertM . lift
>>= fieldParamInfo binderKind tags fp storedLam
where
tag = fpTag fp
tagList = fieldParams <&> fpTag
removeCallsToVar ::
Monad m =>
V.Var -> Ann (HRef m) # V.Term -> T m ()
removeCallsToVar funcVar x =
do
SubExprs.onMatchingSubexprs changeRecursion
( _Pure . V._BApp . V.appFunc
. _Pure . V._BLeaf . V._LVar . Lens.only funcVar
) x
wrapUnappliedUsesOfVar funcVar x
where
changeRecursion prop =
ExprIRef.readValI (prop ^. ExprIRef.iref)
>>= \case
V.BApp (V.App f _) -> (prop ^. ExprIRef.setIref) f
_ -> error "assertion: expected BApp"
makeDeleteLambda :: Monad m => BinderKind m -> StoredLam m -> ConvertM m (T m ())
makeDeleteLambda binderKind (StoredLam (V.TypedLam paramVar _paramTyp lamBodyStored) lambdaProp) =
ConvertM.typeProtectedSetToVal
<&> \protectedSetToVal ->
do
SubExprs.getVarsToHole paramVar lamBodyStored
case binderKind of
BinderKindDef defI ->
removeCallsToVar
(ExprIRef.globalId defI) lamBodyStored
BinderKindLet redexLam ->
removeCallsToVar
(redexLam ^. V.tlIn) (redexLam ^. V.tlOut)
BinderKindLambda -> pure ()
let lamBodyI = lamBodyStored ^. hAnn . ExprIRef.iref
protectedSetToVal lambdaProp lamBodyI & void
convertVarToGetField ::
Monad m =>
T.Tag -> V.Var -> Ann (HRef m) # V.Term -> T m ()
convertVarToGetField tagForVar paramVar =
SubExprs.onGetVars (convertVar . (^. ExprIRef.iref)) paramVar
where
convertVar bodyI =
V.App
<$> ExprIRef.newValI (V.BLeaf (V.LGetField tagForVar))
<*> ExprIRef.newValI (V.BLeaf (V.LVar paramVar))
<&> V.BApp
>>= ExprIRef.writeValI bodyI
wrapArgWithRecord ::
Monad m => T m (ValI m) -> T.Tag -> T.Tag -> ValI m -> T m (ValI m)
wrapArgWithRecord mkNewArg oldParam newParam oldArg =
do
newArg <- mkNewArg
ExprIRef.newValI (V.BLeaf V.LRecEmpty)
>>= ExprIRef.newValI . V.BRecExtend . RowExtend newParam newArg
>>= ExprIRef.newValI . V.BRecExtend . RowExtend oldParam oldArg
data NewParamPosition = NewParamBefore | NewParamAfter
convertToRecordParams ::
Monad m =>
ConvertM m
(T m (ValI m) -> BinderKind m -> StoredLam m -> NewParamPosition -> T.Tag ->
T m ())
convertToRecordParams =
fixLamUsages <&>
\fixUsages mkNewArg binderKind storedLam newParamPosition newParam ->
do
let paramVar = storedLam ^. slLam . V.tlIn
oldParam <-
getP (Anchors.assocTag paramVar)
>>=
\x ->
if x == Anchors.anonTag
then DataOps.genNewTag
else pure x
-- the associated tag becomes an actual field in the new
-- params record, remove the duplicate associated tag so that
-- the params record is not named the same as the first param
setP (Anchors.assocTag paramVar) Anchors.anonTag
case newParamPosition of
NewParamBefore -> [newParam, oldParam]
NewParamAfter -> [oldParam, newParam]
& Lens.itraverse_ (flip DataOps.setTagOrder)
let t = storedLam ^. slLam . V.tlInType
newParamType <- _HCompose # Pruned & ExprIRef.newValI
hcomposed _Unpruned # T.REmpty
& ExprIRef.newValI
>>= extend oldParam (t ^. hAnn . ExprIRef.iref)
>>= extend newParam newParamType
<&> (hcomposed _Unpruned . T._TRecord . _HCompose #)
>>= ExprIRef.newValI
>>= t ^. hAnn . ExprIRef.setIref
convertVarToGetField oldParam paramVar
(storedLam ^. slLam . V.tlOut)
fixUsages (wrapArgWithRecord mkNewArg oldParam newParam)
binderKind storedLam
where
extend tag typ rest =
hcomposed _Unpruned . T._RExtend #
RowExtend tag (_HCompose # typ) (_HCompose # rest)
& ExprIRef.newValI
lamParamType :: Input.Payload m # V.Term -> Pure # T.Type
lamParamType lamExprPl =
unsafeUnjust "Lambda value not inferred to a function type?!" $
lamExprPl ^? Input.inferredType . _Pure . T._TFun . funcIn
mkVarInfo :: MonadTransaction n m => Pure # T.Type -> m VarInfo
mkVarInfo (Pure T.TFun{}) = pure VarFunction
mkVarInfo (Pure T.TVar{}) = pure VarGeneric
mkVarInfo (Pure (T.TRecord (Pure T.REmpty))) = pure VarUnit
mkVarInfo (Pure T.TRecord{}) = pure VarRecord
mkVarInfo (Pure (T.TVariant (Pure T.REmpty))) = pure VarVoid
mkVarInfo (Pure T.TVariant{}) = pure VarVariant
mkVarInfo (Pure (T.TInst (NominalInst tid _))) =
ConvertTId.convert tid
<&> VarNominal . (tidName %~ (^. inTag))
convertNonRecordParam ::
Monad m =>
BinderKind m ->
V.TypedLam V.Var (HCompose Prune T.Type) V.Term # Ann (Input.Payload m) ->
Input.Payload m # V.Term ->
ConvertM m (LhsNames InternalName (OnceT (T m)) (T m) EvalPrep)
convertNonRecordParam binderKind lam@(V.TypedLam param _ _) lamExprPl =
do
nullParamSugar <-
Lens.view (ConvertM.scConfig . Config.sugarsEnabled . Config.nullaryParameter)
let typ = lamParamType lamExprPl
varInfo <- mkVarInfo typ
tag <- ConvertTag.taggedEntity (Just varInfo) param >>= ConvertM . lift
del <- makeDeleteLambda binderKind storedLam
postProcess <- ConvertM.postProcessAssert
oldParam <- Anchors.assocTag param & getP
let mkAddParam pos
| oldParam == Anchors.anonTag = EntityId.ofTaggedEntity param oldParam & NeedToPickTagToAddNext & pure
| otherwise =
do
addParamAfter <- convertToRecordParams ?? DataOps.newHole ?? binderKind ?? storedLam ?? pos
let resultInfo () =
ConvertTag.TagResultInfo <$> EntityId.ofTaggedEntity param <*> (addParamAfter <&> (<* postProcess))
ConvertTag.replace (nameWithContext Nothing param) (Set.singleton oldParam) (pure ()) resultInfo
>>= ConvertM . lift <&> AddNext
addPrev <- mkAddParam NewParamBefore
addNext <- mkAddParam NewParamAfter
LhsVar Var
{ _vParam =
FuncParam
{ _fpAnnotation =
EvalPrep
{ _eType = typ
, _eEvalId = tag ^. oTag . tagRefTag . tagInstance
}
, _fpVarInfo = varInfo
, _fpUsages =
-- TODO: Replace varRefsOfLambda mechanism with one that traverses the sugar,
-- So it goes to actual first use after reordering by sugar.
lamExprPl ^. Input.varRefsOfLambda
}
, _vTag = tag
, _vAddPrev = addPrev
, _vAddNext = addNext
, _vDelete = del <* postProcess
, _vIsNullParam =
nullParamSugar
&& Lens.has (_Pure . T._TRecord . _Pure . T._REmpty) typ
&& null (lamExprPl ^. Input.varRefsOfLambda)
} & pure
where
storedLam = mkStoredLam lam lamExprPl
isParamAlwaysUsedWithGetField :: V.TypedLam V.Var (HCompose Prune T.Type) V.Term # Ann a -> Bool
isParamAlwaysUsedWithGetField (V.TypedLam param _paramTyp bod) =
go False bod
where
go isGetFieldChild expr =
case expr ^. hVal of
V.BLeaf (V.LVar v) | v == param -> isGetFieldChild
V.BApp (V.App (Ann _ (V.BLeaf V.LGetField{})) r) -> go True r
x ->
hfoldMap @_ @[Bool]
( \case
HWitness V.W_Term_Term -> (:[]) . go False
HWitness V.W_Term_HCompose_Prune_Type -> const []
) x
& and
convertLamParams ::
Monad m =>
V.TypedLam V.Var (HCompose Prune T.Type) V.Term # Ann (Input.Payload m) ->
Input.Payload m # V.Term ->
ConvertM m (LhsNames InternalName (OnceT (T m)) (T m) EvalPrep)
convertLamParams lambda lambdaPl =
do
sugarLhsRecord <- Lens.view (ConvertM.scConfig . Config.sugarsEnabled . Config.destructure)
case lambdaPl ^. Input.inferredType . _Pure of
T.TFun (FuncType (Pure (T.TRecord composite)) _)
| sugarLhsRecord
, FlatRowExtends fieldsMap (Pure T.REmpty) <- composite ^. T.flatRow
, let fields = fieldsMap ^@.. Lens.itraversed
, List.isLengthAtLeast 2 fields
, isParamAlwaysUsedWithGetField lambda
, let fieldParams = fields <&> uncurry FieldParam
-> convertRecordParams Nothing BinderKindLambda fieldParams (mkStoredLam lambda lambdaPl)
_ -> convertNonRecordParam BinderKindLambda lambda lambdaPl
convertVarToCalls ::
Monad m => T m (ValI m) -> V.Var -> Ann (HRef m) # V.Term -> T m ()
convertVarToCalls mkArg var =
SubExprs.onMatchingSubexprs (\x -> x ^. ExprIRef.iref & change >>= x ^. ExprIRef.setIref)
(_Pure . V._BLeaf . V._LVar . Lens.only var)
where
change x = mkArg >>= ExprIRef.newValI . V.BApp . V.App x
convertBinderToFunction ::
(HasCallStack, Monad m) =>
T m (ValI m) -> BinderKind m -> Ann (HRef m) # V.Term ->
T m (V.Var, HRef m # V.Term)
convertBinderToFunction mkArg binderKind x =
do
(newParam, newValP) <- DataOps.lambdaWrap (x ^. hAnn)
case binderKind of
BinderKindDef defI ->
convertVarToCalls mkArg (ExprIRef.globalId defI) x
BinderKindLet redexLam ->
convertVarToCalls mkArg
(redexLam ^. V.tlIn) (redexLam ^. V.tlOut)
BinderKindLambda -> error "Lambda will never be an empty-params binder"
pure (newParam, newValP)
convertEmptyParams ::
Monad m =>
BinderKind m -> Ann (Input.Payload m) # V.Term -> ConvertM m (Transaction m EntityId)
convertEmptyParams binderKind x =
ConvertM.postProcessAssert
<&>
\postProcess ->
do
(newParam, _) <-
x & hflipped %~ hmap (const (^. Input.stored))
& convertBinderToFunction DataOps.newHole binderKind
postProcess
EntityId.ofTaggedEntity newParam Anchors.anonTag & pure
| lamdu/lamdu | src/Lamdu/Sugar/Convert/Binder/Params.hs | gpl-3.0 | 28,470 | 0 | 26 | 8,922 | 8,797 | 4,420 | 4,377 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.Disks.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the specified persistent disk. Deleting a disk removes its data
-- permanently and is irreversible. However, deleting a disk does not
-- delete any snapshots previously made from the disk. You must separately
-- delete snapshots.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.disks.delete@.
module Network.Google.Resource.Compute.Disks.Delete
(
-- * REST Resource
DisksDeleteResource
-- * Creating a Request
, disksDelete
, DisksDelete
-- * Request Lenses
, ddRequestId
, ddProject
, ddDisk
, ddZone
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.disks.delete@ method which the
-- 'DisksDelete' request conforms to.
type DisksDeleteResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"zones" :>
Capture "zone" Text :>
"disks" :>
Capture "disk" Text :>
QueryParam "requestId" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Operation
-- | Deletes the specified persistent disk. Deleting a disk removes its data
-- permanently and is irreversible. However, deleting a disk does not
-- delete any snapshots previously made from the disk. You must separately
-- delete snapshots.
--
-- /See:/ 'disksDelete' smart constructor.
data DisksDelete =
DisksDelete'
{ _ddRequestId :: !(Maybe Text)
, _ddProject :: !Text
, _ddDisk :: !Text
, _ddZone :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'DisksDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ddRequestId'
--
-- * 'ddProject'
--
-- * 'ddDisk'
--
-- * 'ddZone'
disksDelete
:: Text -- ^ 'ddProject'
-> Text -- ^ 'ddDisk'
-> Text -- ^ 'ddZone'
-> DisksDelete
disksDelete pDdProject_ pDdDisk_ pDdZone_ =
DisksDelete'
{ _ddRequestId = Nothing
, _ddProject = pDdProject_
, _ddDisk = pDdDisk_
, _ddZone = pDdZone_
}
-- | An optional request ID to identify requests. Specify a unique request ID
-- so that if you must retry your request, the server will know to ignore
-- the request if it has already been completed. For example, consider a
-- situation where you make an initial request and the request times out.
-- If you make the request again with the same request ID, the server can
-- check if original operation with the same request ID was received, and
-- if so, will ignore the second request. This prevents clients from
-- accidentally creating duplicate commitments. The request ID must be a
-- valid UUID with the exception that zero UUID is not supported
-- (00000000-0000-0000-0000-000000000000).
ddRequestId :: Lens' DisksDelete (Maybe Text)
ddRequestId
= lens _ddRequestId (\ s a -> s{_ddRequestId = a})
-- | Project ID for this request.
ddProject :: Lens' DisksDelete Text
ddProject
= lens _ddProject (\ s a -> s{_ddProject = a})
-- | Name of the persistent disk to delete.
ddDisk :: Lens' DisksDelete Text
ddDisk = lens _ddDisk (\ s a -> s{_ddDisk = a})
-- | The name of the zone for this request.
ddZone :: Lens' DisksDelete Text
ddZone = lens _ddZone (\ s a -> s{_ddZone = a})
instance GoogleRequest DisksDelete where
type Rs DisksDelete = Operation
type Scopes DisksDelete =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient DisksDelete'{..}
= go _ddProject _ddZone _ddDisk _ddRequestId
(Just AltJSON)
computeService
where go
= buildClient (Proxy :: Proxy DisksDeleteResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/Disks/Delete.hs | mpl-2.0 | 4,674 | 0 | 17 | 1,090 | 558 | 336 | 222 | 82 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Discovery.APIs.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieve the list of APIs supported at this endpoint.
--
-- /See:/ <https://developers.google.com/discovery/ APIs Discovery Service Reference> for @discovery.apis.list@.
module Network.Google.Resource.Discovery.APIs.List
(
-- * REST Resource
APIsListResource
-- * Creating a Request
, apisList
, APIsList
-- * Request Lenses
, alPreferred
, alName
) where
import Network.Google.Discovery.Types
import Network.Google.Prelude
-- | A resource alias for @discovery.apis.list@ method which the
-- 'APIsList' request conforms to.
type APIsListResource =
"discovery" :>
"v1" :>
"apis" :>
QueryParam "preferred" Bool :>
QueryParam "name" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] DirectoryList
-- | Retrieve the list of APIs supported at this endpoint.
--
-- /See:/ 'apisList' smart constructor.
data APIsList = APIsList'
{ _alPreferred :: !Bool
, _alName :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'APIsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'alPreferred'
--
-- * 'alName'
apisList
:: APIsList
apisList =
APIsList'
{ _alPreferred = False
, _alName = Nothing
}
-- | Return only the preferred version of an API.
alPreferred :: Lens' APIsList Bool
alPreferred
= lens _alPreferred (\ s a -> s{_alPreferred = a})
-- | Only include APIs with the given name.
alName :: Lens' APIsList (Maybe Text)
alName = lens _alName (\ s a -> s{_alName = a})
instance GoogleRequest APIsList where
type Rs APIsList = DirectoryList
type Scopes APIsList = '[]
requestClient APIsList'{..}
= go (Just _alPreferred) _alName (Just AltJSON)
discoveryService
where go
= buildClient (Proxy :: Proxy APIsListResource)
mempty
| rueshyna/gogol | gogol-discovery/gen/Network/Google/Resource/Discovery/APIs/List.hs | mpl-2.0 | 2,769 | 0 | 13 | 670 | 379 | 227 | 152 | 56 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Classroom.Courses.Teachers.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a teacher of a course. This method returns the following error
-- codes: * \`PERMISSION_DENIED\` if the requesting user is not permitted
-- to delete teachers of this course or for access errors. * \`NOT_FOUND\`
-- if no teacher of this course has the requested ID or if the course does
-- not exist. * \`FAILED_PRECONDITION\` if the requested ID belongs to the
-- primary teacher of this course.
--
-- /See:/ <https://developers.google.com/classroom/ Google Classroom API Reference> for @classroom.courses.teachers.delete@.
module Network.Google.Resource.Classroom.Courses.Teachers.Delete
(
-- * REST Resource
CoursesTeachersDeleteResource
-- * Creating a Request
, coursesTeachersDelete
, CoursesTeachersDelete
-- * Request Lenses
, ctdtXgafv
, ctdtUploadProtocol
, ctdtCourseId
, ctdtAccessToken
, ctdtUploadType
, ctdtUserId
, ctdtCallback
) where
import Network.Google.Classroom.Types
import Network.Google.Prelude
-- | A resource alias for @classroom.courses.teachers.delete@ method which the
-- 'CoursesTeachersDelete' request conforms to.
type CoursesTeachersDeleteResource =
"v1" :>
"courses" :>
Capture "courseId" Text :>
"teachers" :>
Capture "userId" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Empty
-- | Deletes a teacher of a course. This method returns the following error
-- codes: * \`PERMISSION_DENIED\` if the requesting user is not permitted
-- to delete teachers of this course or for access errors. * \`NOT_FOUND\`
-- if no teacher of this course has the requested ID or if the course does
-- not exist. * \`FAILED_PRECONDITION\` if the requested ID belongs to the
-- primary teacher of this course.
--
-- /See:/ 'coursesTeachersDelete' smart constructor.
data CoursesTeachersDelete =
CoursesTeachersDelete'
{ _ctdtXgafv :: !(Maybe Xgafv)
, _ctdtUploadProtocol :: !(Maybe Text)
, _ctdtCourseId :: !Text
, _ctdtAccessToken :: !(Maybe Text)
, _ctdtUploadType :: !(Maybe Text)
, _ctdtUserId :: !Text
, _ctdtCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CoursesTeachersDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ctdtXgafv'
--
-- * 'ctdtUploadProtocol'
--
-- * 'ctdtCourseId'
--
-- * 'ctdtAccessToken'
--
-- * 'ctdtUploadType'
--
-- * 'ctdtUserId'
--
-- * 'ctdtCallback'
coursesTeachersDelete
:: Text -- ^ 'ctdtCourseId'
-> Text -- ^ 'ctdtUserId'
-> CoursesTeachersDelete
coursesTeachersDelete pCtdtCourseId_ pCtdtUserId_ =
CoursesTeachersDelete'
{ _ctdtXgafv = Nothing
, _ctdtUploadProtocol = Nothing
, _ctdtCourseId = pCtdtCourseId_
, _ctdtAccessToken = Nothing
, _ctdtUploadType = Nothing
, _ctdtUserId = pCtdtUserId_
, _ctdtCallback = Nothing
}
-- | V1 error format.
ctdtXgafv :: Lens' CoursesTeachersDelete (Maybe Xgafv)
ctdtXgafv
= lens _ctdtXgafv (\ s a -> s{_ctdtXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ctdtUploadProtocol :: Lens' CoursesTeachersDelete (Maybe Text)
ctdtUploadProtocol
= lens _ctdtUploadProtocol
(\ s a -> s{_ctdtUploadProtocol = a})
-- | Identifier of the course. This identifier can be either the
-- Classroom-assigned identifier or an alias.
ctdtCourseId :: Lens' CoursesTeachersDelete Text
ctdtCourseId
= lens _ctdtCourseId (\ s a -> s{_ctdtCourseId = a})
-- | OAuth access token.
ctdtAccessToken :: Lens' CoursesTeachersDelete (Maybe Text)
ctdtAccessToken
= lens _ctdtAccessToken
(\ s a -> s{_ctdtAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ctdtUploadType :: Lens' CoursesTeachersDelete (Maybe Text)
ctdtUploadType
= lens _ctdtUploadType
(\ s a -> s{_ctdtUploadType = a})
-- | Identifier of the teacher to delete. The identifier can be one of the
-- following: * the numeric identifier for the user * the email address of
-- the user * the string literal \`\"me\"\`, indicating the requesting user
ctdtUserId :: Lens' CoursesTeachersDelete Text
ctdtUserId
= lens _ctdtUserId (\ s a -> s{_ctdtUserId = a})
-- | JSONP
ctdtCallback :: Lens' CoursesTeachersDelete (Maybe Text)
ctdtCallback
= lens _ctdtCallback (\ s a -> s{_ctdtCallback = a})
instance GoogleRequest CoursesTeachersDelete where
type Rs CoursesTeachersDelete = Empty
type Scopes CoursesTeachersDelete =
'["https://www.googleapis.com/auth/classroom.rosters"]
requestClient CoursesTeachersDelete'{..}
= go _ctdtCourseId _ctdtUserId _ctdtXgafv
_ctdtUploadProtocol
_ctdtAccessToken
_ctdtUploadType
_ctdtCallback
(Just AltJSON)
classroomService
where go
= buildClient
(Proxy :: Proxy CoursesTeachersDeleteResource)
mempty
| brendanhay/gogol | gogol-classroom/gen/Network/Google/Resource/Classroom/Courses/Teachers/Delete.hs | mpl-2.0 | 6,075 | 0 | 18 | 1,354 | 791 | 466 | 325 | 115 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.SQL.BackupRuns.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a resource containing information about a backup run.
--
-- /See:/ <https://cloud.google.com/sql/docs/reference/latest Cloud SQL Administration API Reference> for @sql.backupRuns.get@.
module Network.Google.Resource.SQL.BackupRuns.Get
(
-- * REST Resource
BackupRunsGetResource
-- * Creating a Request
, backupRunsGet
, BackupRunsGet
-- * Request Lenses
, brgProject
, brgId
, brgInstance
) where
import Network.Google.Prelude
import Network.Google.SQLAdmin.Types
-- | A resource alias for @sql.backupRuns.get@ method which the
-- 'BackupRunsGet' request conforms to.
type BackupRunsGetResource =
"sql" :>
"v1beta4" :>
"projects" :>
Capture "project" Text :>
"instances" :>
Capture "instance" Text :>
"backupRuns" :>
Capture "id" (Textual Int64) :>
QueryParam "alt" AltJSON :> Get '[JSON] BackupRun
-- | Retrieves a resource containing information about a backup run.
--
-- /See:/ 'backupRunsGet' smart constructor.
data BackupRunsGet = BackupRunsGet'
{ _brgProject :: !Text
, _brgId :: !(Textual Int64)
, _brgInstance :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BackupRunsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'brgProject'
--
-- * 'brgId'
--
-- * 'brgInstance'
backupRunsGet
:: Text -- ^ 'brgProject'
-> Int64 -- ^ 'brgId'
-> Text -- ^ 'brgInstance'
-> BackupRunsGet
backupRunsGet pBrgProject_ pBrgId_ pBrgInstance_ =
BackupRunsGet'
{ _brgProject = pBrgProject_
, _brgId = _Coerce # pBrgId_
, _brgInstance = pBrgInstance_
}
-- | Project ID of the project that contains the instance.
brgProject :: Lens' BackupRunsGet Text
brgProject
= lens _brgProject (\ s a -> s{_brgProject = a})
-- | The ID of this Backup Run.
brgId :: Lens' BackupRunsGet Int64
brgId
= lens _brgId (\ s a -> s{_brgId = a}) . _Coerce
-- | Cloud SQL instance ID. This does not include the project ID.
brgInstance :: Lens' BackupRunsGet Text
brgInstance
= lens _brgInstance (\ s a -> s{_brgInstance = a})
instance GoogleRequest BackupRunsGet where
type Rs BackupRunsGet = BackupRun
type Scopes BackupRunsGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/sqlservice.admin"]
requestClient BackupRunsGet'{..}
= go _brgProject _brgInstance _brgId (Just AltJSON)
sQLAdminService
where go
= buildClient (Proxy :: Proxy BackupRunsGetResource)
mempty
| rueshyna/gogol | gogol-sqladmin/gen/Network/Google/Resource/SQL/BackupRuns/Get.hs | mpl-2.0 | 3,547 | 0 | 16 | 861 | 483 | 286 | 197 | 73 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.NetworkEndpointGroups.ListNetworkEndpoints
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists the network endpoints in the specified network endpoint group.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.networkEndpointGroups.listNetworkEndpoints@.
module Network.Google.Resource.Compute.NetworkEndpointGroups.ListNetworkEndpoints
(
-- * REST Resource
NetworkEndpointGroupsListNetworkEndpointsResource
-- * Creating a Request
, networkEndpointGroupsListNetworkEndpoints'
, NetworkEndpointGroupsListNetworkEndpoints'
-- * Request Lenses
, neglneReturnPartialSuccess
, neglneOrderBy
, neglneProject
, neglneZone
, neglnePayload
, neglneNetworkEndpointGroup
, neglneFilter
, neglnePageToken
, neglneMaxResults
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.networkEndpointGroups.listNetworkEndpoints@ method which the
-- 'NetworkEndpointGroupsListNetworkEndpoints'' request conforms to.
type NetworkEndpointGroupsListNetworkEndpointsResource
=
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"zones" :>
Capture "zone" Text :>
"networkEndpointGroups" :>
Capture "networkEndpointGroup" Text :>
"listNetworkEndpoints" :>
QueryParam "returnPartialSuccess" Bool :>
QueryParam "orderBy" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
NetworkEndpointGroupsListEndpointsRequest
:>
Post '[JSON]
NetworkEndpointGroupsListNetworkEndpoints
-- | Lists the network endpoints in the specified network endpoint group.
--
-- /See:/ 'networkEndpointGroupsListNetworkEndpoints'' smart constructor.
data NetworkEndpointGroupsListNetworkEndpoints' =
NetworkEndpointGroupsListNetworkEndpoints''
{ _neglneReturnPartialSuccess :: !(Maybe Bool)
, _neglneOrderBy :: !(Maybe Text)
, _neglneProject :: !Text
, _neglneZone :: !Text
, _neglnePayload :: !NetworkEndpointGroupsListEndpointsRequest
, _neglneNetworkEndpointGroup :: !Text
, _neglneFilter :: !(Maybe Text)
, _neglnePageToken :: !(Maybe Text)
, _neglneMaxResults :: !(Textual Word32)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'NetworkEndpointGroupsListNetworkEndpoints'' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'neglneReturnPartialSuccess'
--
-- * 'neglneOrderBy'
--
-- * 'neglneProject'
--
-- * 'neglneZone'
--
-- * 'neglnePayload'
--
-- * 'neglneNetworkEndpointGroup'
--
-- * 'neglneFilter'
--
-- * 'neglnePageToken'
--
-- * 'neglneMaxResults'
networkEndpointGroupsListNetworkEndpoints'
:: Text -- ^ 'neglneProject'
-> Text -- ^ 'neglneZone'
-> NetworkEndpointGroupsListEndpointsRequest -- ^ 'neglnePayload'
-> Text -- ^ 'neglneNetworkEndpointGroup'
-> NetworkEndpointGroupsListNetworkEndpoints'
networkEndpointGroupsListNetworkEndpoints' pNeglneProject_ pNeglneZone_ pNeglnePayload_ pNeglneNetworkEndpointGroup_ =
NetworkEndpointGroupsListNetworkEndpoints''
{ _neglneReturnPartialSuccess = Nothing
, _neglneOrderBy = Nothing
, _neglneProject = pNeglneProject_
, _neglneZone = pNeglneZone_
, _neglnePayload = pNeglnePayload_
, _neglneNetworkEndpointGroup = pNeglneNetworkEndpointGroup_
, _neglneFilter = Nothing
, _neglnePageToken = Nothing
, _neglneMaxResults = 500
}
-- | Opt-in for partial success behavior which provides partial results in
-- case of failure. The default value is false.
neglneReturnPartialSuccess :: Lens' NetworkEndpointGroupsListNetworkEndpoints' (Maybe Bool)
neglneReturnPartialSuccess
= lens _neglneReturnPartialSuccess
(\ s a -> s{_neglneReturnPartialSuccess = a})
-- | Sorts list results by a certain order. By default, results are returned
-- in alphanumerical order based on the resource name. You can also sort
-- results in descending order based on the creation timestamp using
-- \`orderBy=\"creationTimestamp desc\"\`. This sorts results based on the
-- \`creationTimestamp\` field in reverse chronological order (newest
-- result first). Use this to sort resources like operations so that the
-- newest operation is returned first. Currently, only sorting by \`name\`
-- or \`creationTimestamp desc\` is supported.
neglneOrderBy :: Lens' NetworkEndpointGroupsListNetworkEndpoints' (Maybe Text)
neglneOrderBy
= lens _neglneOrderBy
(\ s a -> s{_neglneOrderBy = a})
-- | Project ID for this request.
neglneProject :: Lens' NetworkEndpointGroupsListNetworkEndpoints' Text
neglneProject
= lens _neglneProject
(\ s a -> s{_neglneProject = a})
-- | The name of the zone where the network endpoint group is located. It
-- should comply with RFC1035.
neglneZone :: Lens' NetworkEndpointGroupsListNetworkEndpoints' Text
neglneZone
= lens _neglneZone (\ s a -> s{_neglneZone = a})
-- | Multipart request metadata.
neglnePayload :: Lens' NetworkEndpointGroupsListNetworkEndpoints' NetworkEndpointGroupsListEndpointsRequest
neglnePayload
= lens _neglnePayload
(\ s a -> s{_neglnePayload = a})
-- | The name of the network endpoint group from which you want to generate a
-- list of included network endpoints. It should comply with RFC1035.
neglneNetworkEndpointGroup :: Lens' NetworkEndpointGroupsListNetworkEndpoints' Text
neglneNetworkEndpointGroup
= lens _neglneNetworkEndpointGroup
(\ s a -> s{_neglneNetworkEndpointGroup = a})
-- | A filter expression that filters resources listed in the response. The
-- expression must specify the field name, a comparison operator, and the
-- value that you want to use for filtering. The value must be a string, a
-- number, or a boolean. The comparison operator must be either \`=\`,
-- \`!=\`, \`>\`, or \`\<\`. For example, if you are filtering Compute
-- Engine instances, you can exclude instances named \`example-instance\`
-- by specifying \`name != example-instance\`. You can also filter nested
-- fields. For example, you could specify \`scheduling.automaticRestart =
-- false\` to include instances only if they are not scheduled for
-- automatic restarts. You can use filtering on nested fields to filter
-- based on resource labels. To filter on multiple expressions, provide
-- each separate expression within parentheses. For example: \`\`\`
-- (scheduling.automaticRestart = true) (cpuPlatform = \"Intel Skylake\")
-- \`\`\` By default, each expression is an \`AND\` expression. However,
-- you can include \`AND\` and \`OR\` expressions explicitly. For example:
-- \`\`\` (cpuPlatform = \"Intel Skylake\") OR (cpuPlatform = \"Intel
-- Broadwell\") AND (scheduling.automaticRestart = true) \`\`\`
neglneFilter :: Lens' NetworkEndpointGroupsListNetworkEndpoints' (Maybe Text)
neglneFilter
= lens _neglneFilter (\ s a -> s{_neglneFilter = a})
-- | Specifies a page token to use. Set \`pageToken\` to the
-- \`nextPageToken\` returned by a previous list request to get the next
-- page of results.
neglnePageToken :: Lens' NetworkEndpointGroupsListNetworkEndpoints' (Maybe Text)
neglnePageToken
= lens _neglnePageToken
(\ s a -> s{_neglnePageToken = a})
-- | The maximum number of results per page that should be returned. If the
-- number of available results is larger than \`maxResults\`, Compute
-- Engine returns a \`nextPageToken\` that can be used to get the next page
-- of results in subsequent list requests. Acceptable values are \`0\` to
-- \`500\`, inclusive. (Default: \`500\`)
neglneMaxResults :: Lens' NetworkEndpointGroupsListNetworkEndpoints' Word32
neglneMaxResults
= lens _neglneMaxResults
(\ s a -> s{_neglneMaxResults = a})
. _Coerce
instance GoogleRequest
NetworkEndpointGroupsListNetworkEndpoints'
where
type Rs NetworkEndpointGroupsListNetworkEndpoints' =
NetworkEndpointGroupsListNetworkEndpoints
type Scopes
NetworkEndpointGroupsListNetworkEndpoints'
=
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient
NetworkEndpointGroupsListNetworkEndpoints''{..}
= go _neglneProject _neglneZone
_neglneNetworkEndpointGroup
_neglneReturnPartialSuccess
_neglneOrderBy
_neglneFilter
_neglnePageToken
(Just _neglneMaxResults)
(Just AltJSON)
_neglnePayload
computeService
where go
= buildClient
(Proxy ::
Proxy
NetworkEndpointGroupsListNetworkEndpointsResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/NetworkEndpointGroups/ListNetworkEndpoints.hs | mpl-2.0 | 10,120 | 0 | 23 | 2,243 | 992 | 588 | 404 | 158 | 1 |
{- |
Internal module for pretty-printing showable Haskell values.
-}
--
-- Copyright (c) 2009-2022 Stefan Wehr - http://www.stefanwehr.de
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA
--
module Test.Framework.PrettyHaskell (
prettyHaskell, prettyHaskell', prettyHaskellTests
) where
import qualified Data.List as List
import Language.Haskell.Parser
import Language.Haskell.Pretty
import Test.HUnit
import Test.Framework.Utils
prettyHaskell :: Show a => a -> String
prettyHaskell x =
case prettyHaskell' x of
Just s -> s
Nothing -> "FALLBACK: " ++ show x
prettyHaskell' :: Show a => a -> Maybe String
prettyHaskell' x =
fmap (postProcess (show x)) (prettyHaskell'' x )
prettyHaskell'' :: Show a => a -> Maybe String
prettyHaskell'' x =
let str = show x
code = "module M where TOP = " ++ str
in case parseModule code of
ParseOk x -> Just (prettyPrint x)
ParseFailed _ _ -> Nothing
postProcess :: String -> String -> String
postProcess fallback s =
case dropWhile (\l -> not ('=' `elem` l)) (lines s) of
[] -> fallback
(l:ls) ->
case List.span (/= '=') l of
(prefix, '=':' ':suffix) ->
let indentLen = length prefix + 2
in strip $ unlines (suffix : (map (drop indentLen) ls))
_ -> fallback
prettyHaskellTests =
[("testPrettyHaskell", testPrettyHaskell)]
data MySuperHero
= MySuperHero
{ msh_age :: Int
, msh_name :: String
, msh_address :: String
, msh_fun :: Int
}
deriving (Show)
data MySuperSuperHero
= MySuperSuperHero
{ mssh_isHere :: Bool
, mssh_hero :: MySuperHero
}
deriving (Show)
testPrettyHaskell =
do assertPretty "Just 1" (Just 1)
let hero =
MySuperHero
{ msh_age = 35
, msh_name = "FOO"
, msh_address = "address"
, msh_fun = 1
}
assertPretty
("MySuperHero{msh_age = 35, msh_name = \"FOO\",\n" ++
" msh_address = \"address\", msh_fun = 1}")
hero
assertPretty
("MySuperSuperHero{mssh_isHere = True,\n" ++
" mssh_hero =\n" ++
" MySuperHero{msh_age = 35, msh_name = \"FOO\",\n" ++
" msh_address = \"address\", msh_fun = 1}}")
(MySuperSuperHero { mssh_isHere = True, mssh_hero = hero })
where
assertPretty s x =
assertEqual (s ++ " /=\n" ++ prettyHaskell x)
s (prettyHaskell x)
| skogsbaer/HTF | Test/Framework/PrettyHaskell.hs | lgpl-2.1 | 3,307 | 0 | 20 | 984 | 652 | 355 | 297 | 67 | 3 |
-- |
-- Module : Netowork.Entity
-- Copyright : (c) Alexandru Scvortov 2008
-- License : LGPL (see LICENSE file)
-- Maintainer : [email protected]
--
module Network.Entity
( Entity(..)
) where
-- | A network entity.
--
-- e.g. a Peer, a Tracker; anything with an IP and a port, really
class Entity a where
-- | returns the IP of the entity
getIP :: a -> String
-- | returns the port of the entity
getPort :: a -> Int
| scvalex/ltorrent | Network/Entity.hs | lgpl-3.0 | 460 | 0 | 7 | 124 | 53 | 35 | 18 | 5 | 0 |
-- The main module of the application. Performs GLFW-specific initialization and others.
module Main ( main ) where
import Control.Applicative
import Control.Concurrent
import Control.Exception
import Control.Monad
import Control.Monad.Trans
import qualified Graphics.UI.GLFW as GLFW
import System.Environment
import System.IO
import qualified ApplicationModel as AM
import qualified FixedStepManager as FSM
import qualified Renderer as R
-- | The error handler to be called when a GLFW error occurs.
errorHandler :: GLFW.ErrorCallback
errorHandler error description = do
hPutStrLn stderr $ (show error) ++ ": " ++ description
-- | Scans a key which is being pressed.
scanKeyPress
:: GLFW.Window -- ^ the window handle
-> GLFW.Key -- ^ a code of a key which is scanned
-> IO Bool -- ^ True indicates the key is being pressed
scanKeyPress window key = (== GLFW.KeyState'Pressed) <$> (GLFW.getKey window key)
-- | Updates the application model.
update
:: GLFW.Window -- ^ the window handle
-> AM.RectangleData -- ^ data before updating
-> IO AM.RectangleData -- ^ data after updating
update window (AM.RectangleData x y) = do
l <- (toValue (-1)) <$> (scanKeyPress window GLFW.Key'A)
r <- (toValue 1) <$> (scanKeyPress window GLFW.Key'D)
u <- (toValue 1) <$> (scanKeyPress window GLFW.Key'W)
d <- (toValue (-1)) <$> (scanKeyPress window GLFW.Key'X)
return $ AM.RectangleData (x + l + r) (y + u + d)
where
toValue v True = v
toValue _ False = 0
-- | The rendering loop.
renderingLoop
:: GLFW.Window -- ^ the window handle
-> (AM.RectangleData -> IO ()) -- ^ rendering action
-> IO ()
renderingLoop window render = do
GLFW.setTime 0
FSM.runStepManager (1/60) (loop (AM.RectangleData 0 0))
where
loop rd = ((lift . GLFW.windowShouldClose) window) >>= (flip unless) (go rd)
getTime = GLFW.getTime >>= maybe (throwIO $ userError "getTime") (\t -> return t)
go rd = do
t <- lift getTime
fp <- FSM.checkNextAction t
case fp of
FSM.None -> do
(lift . threadDelay) 10 -- Suspends to reduce the CPU usage.
loop rd
FSM.Update -> do
rd' <- FSM.doUpdate (update window rd)
loop rd'
FSM.Drawing -> do
FSM.doDrawing (render rd)
(lift . GLFW.swapBuffers) window
lift GLFW.pollEvents
loop rd
-- | The process after the createWindow.
afterCreateWindow
:: GLFW.Window -- ^ the window handle
-> IO ()
afterCreateWindow window = do
GLFW.makeContextCurrent $ Just window
GLFW.swapInterval 1
desc <- R.initialize
renderingLoop window (R.render desc)
R.terminate desc
GLFW.destroyWindow window
-- | The entry point of the application.
main :: IO ()
main = do
progName <- getProgName
GLFW.setErrorCallback $ Just errorHandler
GLFW.init
GLFW.windowHint $ GLFW.WindowHint'ContextVersionMajor 3
GLFW.windowHint $ GLFW.WindowHint'ContextVersionMinor 3
GLFW.windowHint $ GLFW.WindowHint'OpenGLProfile GLFW.OpenGLProfile'Core
GLFW.createWindow 500 500 progName Nothing Nothing >>= maybe (return ()) afterCreateWindow
GLFW.terminate
| fujiyan/toriaezuzakki | haskell/glfw/keyboard/Rectangle.hs | bsd-2-clause | 3,276 | 0 | 17 | 810 | 919 | 459 | 460 | 76 | 3 |
{-# LANGUAGE BangPatterns #-}
{-| RPC test program.
-}
{-
Copyright (C) 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
import Control.Concurrent
import Control.Monad
import System.Console.GetOpt
import System.Environment
import System.IO
import Text.JSON (decode)
import Text.Printf
import Ganeti.BasicTypes
import Ganeti.Common
import Ganeti.Config
import Ganeti.Errors
import Ganeti.JSON
import Ganeti.Objects
import qualified Ganeti.Path as P
import Ganeti.Rpc
import Ganeti.Utils
-- | Command line options structure.
data Options = Options
{ optRpc :: String -- ^ RPC to execute
, optDataFile :: FilePath -- ^ Path to the RPC serialised form
, optVerbose :: Bool -- ^ Verbosity level
, optStats :: Bool -- ^ Whether to show timing stats
, optCount :: Int -- ^ Count of (multi) RPCs to do
, optBatch :: Int -- ^ How many (multi) RPCs to run in parallel
, optShowHelp :: Bool -- ^ Just show the help
, optShowComp :: Bool -- ^ Just show the completion info
, optShowVer :: Bool -- ^ Just show the program version
} deriving Show
-- | Default values for the command line options.
defaultOptions :: Options
defaultOptions = Options
{ optRpc = "version"
, optDataFile = "rpc.json"
, optVerbose = False
, optStats = False
, optCount = 1
, optBatch = 1
, optShowHelp = False
, optShowComp = False
, optShowVer = False
}
instance StandardOptions Options where
helpRequested = optShowHelp
verRequested = optShowVer
compRequested = optShowComp
requestHelp o = o { optShowHelp = True }
requestVer o = o { optShowVer = True }
requestComp o = o { optShowComp = True }
-- | The rpcs we support. Sadly this duplicates the RPC list.
data KnownRpc = KRInstanceInfo RpcCallInstanceInfo
| KRAllInstancesInfo RpcCallAllInstancesInfo
| KRInstanceList RpcCallInstanceList
| KRNodeInfo RpcCallNodeInfo
| KRVersion RpcCallVersion
| KRStorageList RpcCallStorageList
| KRTestDelay RpcCallTestDelay
| KRExportList RpcCallExportList
deriving (Show)
-- | The command line options.
options :: [GenericOptType Options]
options =
[ (Option "r" ["rpc"]
(ReqArg (\ r o -> Ok o { optRpc = r }) "RPC")
"the rpc to use [version]",
OptComplChoices [])
, (Option "f" ["data-file"]
(ReqArg (\ f o -> Ok o { optDataFile = f }) "FILE")
"the rpc serialised form [\"rpc.json\"]",
OptComplFile)
, (Option "v" ["verbose"]
(NoArg (\ opts -> Ok opts { optVerbose = True}))
"show more information when executing RPCs",
OptComplNone)
, (Option "t" ["stats"]
(NoArg (\ opts -> Ok opts { optStats = True}))
"show timing information summary",
OptComplNone)
, (Option "c" ["count"]
(reqWithConversion (tryRead "reading count")
(\count opts -> Ok opts { optCount = count }) "NUMBER")
"Count of (multi) RPCs to execute [1]",
OptComplInteger)
, (Option "b" ["batch"]
(reqWithConversion (tryRead "reading batch size")
(\batch opts -> Ok opts { optBatch = batch }) "NUMBER")
"Parallelisation factor for RPCs [1]",
OptComplInteger)
, oShowHelp
, oShowComp
, oShowVer
]
-- | Arguments we expect
arguments :: [ArgCompletion]
arguments = [ArgCompletion OptComplOneNode 1 Nothing]
-- | Log a message.
logMsg :: MVar () -> String -> IO ()
logMsg outmvar text =
withMVar outmvar $ \_ -> do
let p = if null text || last text /= '\n'
then putStrLn
else putStr
p text
hFlush stdout
-- | Parses a RPC.
parseRpc :: String -> String -> Result KnownRpc
parseRpc "instance_info" f =
fromJResult "parsing rpc" (decode f) >>= Ok . KRInstanceInfo
parseRpc "all_instances_info" f =
fromJResult "parsing rpc" (decode f) >>= Ok . KRAllInstancesInfo
parseRpc "instance_list" f =
fromJResult "parsing rpc" (decode f) >>= Ok . KRInstanceList
parseRpc "node_info" f =
fromJResult "parsing rpc" (decode f) >>= Ok . KRNodeInfo
parseRpc "version" f =
fromJResult "parsing rpc" (decode f) >>= Ok . KRVersion
parseRpc "storage_list" f =
fromJResult "parsing rpc" (decode f) >>= Ok . KRStorageList
parseRpc "test_delay" f =
fromJResult "parsing rpc" (decode f) >>= Ok . KRTestDelay
parseRpc "export_list" f =
fromJResult "parsing rpc" (decode f) >>= Ok . KRExportList
parseRpc s _ = Bad $ "Unknown rpc '" ++ s ++ "'"
-- | Executes a RPC. These duplicate definitions are needed due to the
-- polymorphism of 'executeRpcCall', and the binding of the result
-- based on the input rpc call.
execRpc :: [Node] -> KnownRpc -> IO [[String]]
execRpc n (KRInstanceInfo v) = formatRpcRes `fmap` executeRpcCall n v
execRpc n (KRAllInstancesInfo v) = formatRpcRes `fmap` executeRpcCall n v
execRpc n (KRInstanceList v) = formatRpcRes `fmap` executeRpcCall n v
execRpc n (KRNodeInfo v) = formatRpcRes `fmap` executeRpcCall n v
execRpc n (KRVersion v) = formatRpcRes `fmap` executeRpcCall n v
execRpc n (KRStorageList v) = formatRpcRes `fmap` executeRpcCall n v
execRpc n (KRTestDelay v) = formatRpcRes `fmap` executeRpcCall n v
execRpc n (KRExportList v) = formatRpcRes `fmap` executeRpcCall n v
-- | Helper to format the RPC result such that it can be printed by
-- 'printTable'.
formatRpcRes :: (Show b) => [(Node, ERpcError b)] -> [[String]]
formatRpcRes = map (\(n, r) -> [nodeName n, either explainRpcError show r])
-- | Main function.
main :: IO ()
main = do
cmd_args <- getArgs
(opts, args) <-
parseOpts defaultOptions cmd_args "rpc-test" options arguments
rpc <- parseRpc (optRpc opts) `liftM` readFile (optDataFile opts) >>=
exitIfBad "parsing RPC"
cfg_file <- P.clusterConfFile
cfg <- loadConfig cfg_file>>= exitIfBad "Can't load configuration"
nodes <- exitIfBad "Can't find node" . errToResult $
mapM (getNode cfg) args
token <- newEmptyMVar -- semaphore for batch calls
outmvar <- newMVar () -- token for stdout non-interleaving
let logger = if optVerbose opts
then logMsg outmvar
else const $ return ()
let batch = [1..optBatch opts]
count = optCount opts
rpcs = count * length nodes
logger $ printf "Will execute %s multi-ops and %s RPCs"
(show count) (show rpcs)
tstart <- getCurrentTimeUSec
_ <- forkIO $ mapM_ (\_ -> putMVar token ()) batch
mapM_ (\idx -> do
let str_idx = show idx
logger $ "Acquiring token for run " ++ str_idx
_ <- takeMVar token
forkIO $ do
start <- getCurrentTimeUSec
logger $ "Start run " ++ str_idx
!results <- execRpc nodes rpc
stop <- getCurrentTimeUSec
let delta = (fromIntegral (stop - start)::Double) / 1000
putMVar token ()
let stats = if optVerbose opts
then printf "Done run %d in %7.3fmsec\n" idx delta
else ""
table = printTable "" ["Node", "Result"]
results [False, False]
logMsg outmvar $ stats ++ table
) [1..count]
mapM_ (\_ -> takeMVar token) batch
_ <- takeMVar outmvar
when (optStats opts) $ do
tstop <- getCurrentTimeUSec
let delta = (fromIntegral (tstop - tstart) / 1000000)::Double
printf "Total runtime: %9.3fs\n" delta :: IO ()
printf "Total mult-ops: %9d\n" count :: IO ()
printf "Total single RPCs: %9d\n" rpcs :: IO ()
printf "Multi-ops/sec: %9.3f\n" (fromIntegral count / delta) :: IO ()
printf "RPCs/sec: %9.3f\n" (fromIntegral rpcs / delta) :: IO ()
| apyrgio/snf-ganeti | src/rpc-test.hs | bsd-2-clause | 9,085 | 0 | 22 | 2,309 | 2,082 | 1,096 | 986 | 176 | 3 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{- |
Module : Kiosk.Backend.Data.ReportTemplateSpec
Description : Tests for Report Generation
Copyright : Plow Technologies LLC
License : MIT License
Maintainer : Scott Murphy
Stability : experimental
Portability : portable
These are tests for report generation as
it specifically relates to data templates
-}
module Kiosk.Backend.Data.ReportTemplateSpec (spec,main,convertToKioskForm) where
import Control.Applicative ((<$>))
import Data.String (IsString)
-- import Control.Lens
import Control.Lens
import Data.Aeson
import qualified Data.ByteString.Lazy as L
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time
import Data.Time.Clock.POSIX
-- import Kiosk.Backend.Data.DataTemplate
import Data.Map.Lazy (Map)
import qualified Data.Map.Lazy as M
import Data.Text (Text)
import qualified Data.Text as T
import Kiosk.Backend.Data.DataTemplate
import Kiosk.Backend.Data.ReportTemplate
import Kiosk.Backend.Form
import Language.Haskell.TH
import Codec.Xlsx
import Data.Monoid ((<>))
-- import ReportTemplate.Internal hiding (spec)
import ReportTemplate.Report
import System.Locale (defaultTimeLocale)
import System.Time
import Test.Hspec
import Test.QuickCheck
main :: IO ()
main = hspec spec
spec :: SpecWith ()
spec = do
describe (nameBase 'makeCellDoubleFromInputDouble) $ do
it "Gets an Input double and transfers it to a CellDouble" $ do
True `shouldBe` True
describe (nameBase 'makeCellTextFromInputText) $ do
it "Gets an InputText and transfers it to a CellText" $ do
True `shouldBe` True
-- a template is described from here:
makeCobaltExcelTemplate :: XlsxReportTemplate
makeCobaltExcelTemplate = buildReportTemplate preambleTemplate rowTemplate
preambleTemplate :: XlsxPreambleTemplateList
preambleTemplate = [("Report Prepared For", const $ getCompanyName (1,1))
]
where
formatTimestampDate context _ = def & cellValue ?~ CellText "Current Time not active"
-- formatTimestampDate context _ = makeCellMapFromUTCTime "%c" (2,2) . _xlsxCurrentTime $ context
rowTemplate:: XlsxRowTemplateList
rowTemplate = [ ("Water Hauling Number",getWaterHauler )
,("Lease Name",getLeaseName)
,("Description", getDescription)
,("Truck Number",getTruckNumber)
,("Customer Ticket Number", getCustomerTicketNumber)
]
where
getDescription = const $ makeCellTextWithCellTemplate descriptionTemplate descriptionList
descriptionList = ["Truck #", "Name of Lease", "Water Hauling Permit #"]
getWaterHauler = const $ makeCellTextFromInputText "Water Hauling Permit #"
getLeaseName = const $ makeCellTextFromInputText "Name of Lease"
getTruckNumber = const $ makeCellTextFromInputText "Truck #"
getCustomerTicketNumber = const $ makeCellTextFromInputText "Customer Ticket #"
descriptionTemplate [field1, field2, field3] = "Field1 Is: " <> field1 <>
"Field 2 is:" <> field2 <>
"Field 3 is:" <> field3
descriptionTemplate _ = "Wrong Number of arguments"
-- | Report Inspection
dispayReportValues = do
report <- generateReport
sequence $ (foldrTableByRowWithIndex printAndMoveOn (return ())) <$>
(toListOf (reportRows._ReportTableRowIndex._2) report)
where
printAndMoveOn k rowVal m = do
m
print k
print rowVal
-- | Generate report
makeXlsxFile = do
ct <- getPOSIXTime
xl <- generateReportXlsx
L.writeFile "example.xlsx" $ fromXlsx ct xl
generateReportXlsx = do
sheet <- generateReportSheet
return $ def & atSheet "ex" ?~ sheet
generateReportXlsx :: IO Xlsx
generateReportSheet = renderSpreadsheet <$> generateReport
generateReport :: IO XlsxReport
generateReport = do
ct <- getCurrentTime
dtes <- generate generateDataTemplatesWithData
let forms@(oneForm:_) = convertToKioskForm <$> currentCobaltForms
reportTemplate = makeCobaltExcelTemplate
report = buildXlsxReport reportTemplate (XlsxContext "") oneForm dtes
return report
generateDataTemplatesWithData = do
txt <- T.pack <$> arbitrary
let targetDataTemplates = (fromFormToDataTemplate.convertToKioskForm <$> currentCobaltForms )
transformedDataTemplates = targetDataTemplates & (traverse .
_templateItems .
traverse .
_templateValue .
_InputTypeText .
getInputText) .~ "an arbitrary thign"
return transformedDataTemplates
-- | Form Generation (Cobalt Version)
convertToKioskForm :: CobaltWaterHaulingCompany -> Form
convertToKioskForm waterHaulingCompany = Form cobaltEnvironmentalSolutions cobaltAddress cobaltLogo defaultPhone [createWaterHauler waterHaulingName] cobaltFormBody
where
waterHaulingName = _whcCompanyName $ waterHaulingCompany
newtype UUID = UUID { _getUUID :: Text}
deriving (Read,Eq,Show,IsString,ToJSON,FromJSON,Ord)
data CobaltWaterHaulingCompany = CobaltWaterHaulingCompany { _whcFormId:: Maybe FormId
, _whcCompanyName :: CompanyName
, _whcGetUUID :: UUID }
deriving (Eq,Ord)
cobaltEnvironmentalSolutions :: Company
cobaltEnvironmentalSolutions = Company "Cobalt Environmental Solutions LLC" [CompanyWidth $ WidthAttribute (12::Int) ]
cobaltAddress:: Address
cobaltAddress= Address "PO Box 130 Wilson, Oklahoma 73463\n886-849-5483\n" [AddressWidth $ WidthAttribute (12::Int)]
cobaltLogo :: Logo
cobaltLogo = Logo "" [LogoPath . PathAttribute $ "Cobalt.png"]
createWaterHauler :: CompanyName -> Constant
createWaterHauler whc = Constant (T.pack.show $ whc) [ ConstantAttributeType "'Water Hauling Company'"
, ConstantAttributeIndexable $ IndexableAttribute True ]
newtype FormId = FormId {_getFormId :: Integer}
deriving (Read,Eq,Show,Num,ToJSON,FromJSON,Ord)
cobaltFormBody :: [Row]
cobaltFormBody = [ truckNumberRow
, permitNumberRow
, customerTicketNumberRow
, leaseInfoRow
, leaseOperatorRow
, leaseNameRow
, waterTypeAndAmountRow
, dateRow
, timeInRow
, driverNameRow
, signatureRow]
where
truckNumberRow = generateInputRowText "Truck #"
permitNumberRow = generateInputRowText "Water Hauling Permit #"
customerTicketNumberRow = generateInputRowText "Customer Ticket #"
leaseInfoRow = generateLabelRow "Lease Information"
leaseOperatorRow = leaseOperatorDropdownRow
leaseNameRow = generateInputRowText "Name of Lease"
waterTypeAndAmountRow = waterTypeRadioRow
dateRow = generateInputRowDate "Date"
timeInRow = generateInputRowTime "Time In"
driverNameRow = generateInputRowText "Driver's Name"
signatureRow = generateInputRowSignature "Driver Signature"
waterTypeRadioRow :: Row
waterTypeRadioRow = Row [waterTypeRadio] []
waterTypeRadio :: Item
waterTypeRadio = Item [ItemRadio . generateRadio "Type of Water Hauled" $ options ] []
where
options = [generateOption "Produced Water"
,generateOption "Pit Water"
,generateOption "Fresh Water"
,generateOption "Flowback Water" ]
leaseOperatorDropdownRow :: Row
leaseOperatorDropdownRow = Row [leaseOperatorItem] []
where
leaseOperatorItem = Item [ItemDropdown leaseOperatorDropdown] []
dropdownOptions :: [Option]
dropdownOptions = generateOption <$> leaseOperators
leaseOperatorDropdown :: Dropdown
leaseOperatorDropdown = Dropdown (Label "Lease Operator" [])
dropdownOptions
(Just fullDefaultInputText )
generateLabelRow :: Text -> Row
generateLabelRow labelText = Row [generateLabelItem labelText] []
-- Input Text
generateInputRowText :: Text -> Row
generateInputRowText labelText = Row [generateInputItemText labelText] []
generateInputItemText :: Text -> Item
generateInputItemText labelText = Item [ItemLabel . generateLabel $ labelText
, ItemInput fullDefaultInputText] []
fullDefaultInputText :: Input
fullDefaultInputText = Input fullDefaultInputTypeText fullDefaultInputAttributesList
fullDefaultInputTypeText :: InputType
fullDefaultInputTypeText = InputTypeText $ InputText (""::Text)
-- Input Date
generateInputRowDate :: Text -> Row
generateInputRowDate labelDate = Row [generateInputItemDate labelDate] []
generateInputItemDate :: Text -> Item
generateInputItemDate labelDate = Item [ItemLabel . generateLabel $ labelDate
, ItemAutoInput . AutoInput $ fullDefaultInputDate] []
fullDefaultInputDate :: Input
fullDefaultInputDate = Input fullDefaultInputTypeDate [InputType InputTypeAttributeDate]
fullDefaultInputTypeDate :: InputType
fullDefaultInputTypeDate = InputTypeDate $ (InputDate "")
-- Input Time
generateInputRowTime :: Text -> Row
generateInputRowTime labelTime = Row [generateInputItemTime labelTime] []
generateInputItemTime :: Text -> Item
generateInputItemTime labelTime = Item [ItemLabel . generateLabel $ labelTime
, ItemAutoInput . AutoInput $ fullDefaultInputTime] []
fullDefaultInputTime :: Input
fullDefaultInputTime = Input fullDefaultInputTypeTime [InputType InputTypeAttributeTime]
fullDefaultInputTypeTime :: InputType
fullDefaultInputTypeTime = InputTypeTime $ (InputTime "")
-- Input Signature
generateInputRowSignature :: Text -> Row
generateInputRowSignature labelText = Row [generateInputItemSignature labelText] []
generateInputItemSignature :: Text -> Item
generateInputItemSignature labelText = Item [ItemLabel . generateLabel $ labelText
, ItemInput fullDefaultInputSignature] []
fullDefaultInputSignature :: Input
fullDefaultInputSignature = Input fullDefaultInputTypeSignature [InputType InputTypeAttributeSignature]
generateLabelItem :: Text -> Item
generateLabelItem labelText = Item [ItemLabel . generateLabel $ labelText ] []
leaseOperators :: [Text]
leaseOperators = ["XTO Energy","Continental Resources","Citation Oil and Gas","Other","Brady's Welding & Machine Shop","WFW Production","Mustang Fuel","SSB Production","LINN Energy","Keith F Walker","GLB","Mack Energy","Nubs","Ardmore Production","Dehart","Southern Oklahoma Production","Silver Creek","Brady Welding & Machine Shop","Coastal Plains","Thunder Oil & Gas","Atlas Pipeline","Cantrell Energy","Kingery Energy","Williford Resources","Mark Shidler","WFD Oil","Yale Oil","Star Oil & Co.","TEF","T&B Oil Co."]
fullDefaultInputTypeSignature :: InputType
fullDefaultInputTypeSignature = InputTypeSignature $ Signature ""
fullDefaultInputAttributesList :: [InputAttribute]
fullDefaultInputAttributesList = [tAttr, ixAttr]
where
ixAttr = InputIndexable $ IndexableAttribute True
tAttr = InputType $ InputTypeAttributeText
generateLabel :: Text -> Label
generateLabel labelText = Label labelText [LabelWidth $ WidthAttribute (12::Int)]
generateRadio :: Text -> [Option] -> Radio
generateRadio labelText options = Radio (generateLabel labelText) [] options [fullDefaultOptionQualifier]
-- | Radio
fullDefaultOptionQualifier :: OptionQualifier
fullDefaultOptionQualifier = OptionQualifier fullDefaultQualifierChoices []
fullDefaultQualifierChoices :: [QualifierChoices]
fullDefaultQualifierChoices = [ QualifierLabel ( Label "Amount" [])
, QualifierInput fullDefaultQualifierInput]
fullDefaultQualifierInput :: Input
fullDefaultQualifierInput = Input dit dia
where
dit = InputTypeDouble . InputDouble $ 0.0
dia = [tAttr, ixAttr,minAttr,maxAttr]
minAttr = InputMinDouble $ MinAttributeDouble (0.0::Double)
maxAttr = InputMaxDouble $ MaxAttributeDouble (150.0::Double)
ixAttr = InputIndexable $ IndexableAttribute True
tAttr = InputType $ InputTypeAttributeDouble
generateOption :: Text -> Option
generateOption optionText = Option optionText []
data CompanyName = BigStarTrucking
| BulletEnergyServices
| CandJTrucking
| BigMacTankTrucks
| BradyWeldingandMachineShop
| KleenOilfieldServices
| BandCBackhoeandTransports
| ForsytheOilfield
| HullsOilfield
| SouthCentralOilfieldServices
| TopOTexas
| MitchellTankTruckServices
| FluidServices
| DavenportOilfieldServices
| TestCompany
| SoonerStar
| NexStream
| HullEnvironmentalServices
| Arkhoma
| ZeroSeven
| HammTankAndTrucking
deriving (Eq,Ord)
instance Show CompanyName where
show (BigStarTrucking) = "Big Star Trucking"
show (BulletEnergyServices) = "Bullet Energy Services"
show (CandJTrucking) = "C and J Trucking"
show (BigMacTankTrucks) = "Big Mac Trucks"
show (BradyWeldingandMachineShop) = "Bradly Welding and Machine Shop"
show (KleenOilfieldServices) = "Kleen Oilfield Services"
show (BandCBackhoeandTransports) = "B and C Backhoe and Transports"
show (ForsytheOilfield ) = "Forsythe Oilfield"
show (HullsOilfield) = "Hulls Oilfield"
show (SouthCentralOilfieldServices) = "South Central Oilfield Services"
show (TopOTexas) = "Top-O-Texas"
show (MitchellTankTruckServices) = "Mitchell Tank Truck Services"
show (FluidServices) = "Fluid Services"
show (DavenportOilfieldServices) = "Davenport Oilfield Services"
show (TestCompany ) = "Test Company"
show (SoonerStar ) = "Sooner Star"
show (NexStream ) = "NexStream"
show (Arkhoma ) = "Arkhoma"
show (HullEnvironmentalServices) = "Hull Environmental Services"
show (ZeroSeven) = "07 Energy"
show (HammTankAndTrucking) = "Hamm Tank and Trucking Service, LLC"
exampleUUID :: UUID
exampleUUID = "a2e3609e-154d-4e60-80e0-c77189098617"
currentCobaltForms :: [CobaltWaterHaulingCompany]
currentCobaltForms = [ CobaltWaterHaulingCompany (Just 0) BigStarTrucking exampleUUID
, CobaltWaterHaulingCompany (Just 1) BulletEnergyServices exampleUUID
, CobaltWaterHaulingCompany (Just 2) CandJTrucking exampleUUID
, CobaltWaterHaulingCompany (Just 3) BigMacTankTrucks exampleUUID
, CobaltWaterHaulingCompany (Just 4) BradyWeldingandMachineShop exampleUUID
, CobaltWaterHaulingCompany (Just 5) KleenOilfieldServices exampleUUID
, CobaltWaterHaulingCompany (Just 6) BandCBackhoeandTransports exampleUUID
, CobaltWaterHaulingCompany (Just 7) ForsytheOilfield exampleUUID
, CobaltWaterHaulingCompany (Just 8) HullsOilfield exampleUUID
, CobaltWaterHaulingCompany (Just 9) SouthCentralOilfieldServices exampleUUID
, CobaltWaterHaulingCompany (Just 10) TopOTexas exampleUUID
, CobaltWaterHaulingCompany (Just 11) MitchellTankTruckServices exampleUUID
, CobaltWaterHaulingCompany (Just 12) FluidServices exampleUUID
, CobaltWaterHaulingCompany (Just 13) DavenportOilfieldServices exampleUUID
, CobaltWaterHaulingCompany (Just 14) TestCompany exampleUUID
, CobaltWaterHaulingCompany (Just 15) SoonerStar exampleUUID
, CobaltWaterHaulingCompany (Just 16) NexStream exampleUUID
, CobaltWaterHaulingCompany (Just 17) Arkhoma exampleUUID
, CobaltWaterHaulingCompany (Just 18) HullEnvironmentalServices exampleUUID
, CobaltWaterHaulingCompany (Just 19) ZeroSeven exampleUUID
, CobaltWaterHaulingCompany (Just 20) HammTankAndTrucking exampleUUID]
| plow-technologies/cobalt-kiosk-data-template | test/Kiosk/Backend/Data/ReportTemplateSpec.hs | bsd-3-clause | 17,251 | 0 | 17 | 4,687 | 3,117 | 1,697 | 1,420 | 287 | 1 |
-- Copyright (c) 2017, Travis Bemann
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- o Redistributions of source code must retain the above copyright notice, this
-- list of conditions and the following disclaimer.
--
-- o Redistributions in binary form must reproduce the above copyright notice,
-- this list of conditions and the following disclaimer in the documentation
-- and/or other materials provided with the distribution.
--
-- o Neither the name of the copyright holder nor the names of its
-- contributors may be used to endorse or promote products derived from
-- this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-- ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
-- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-- CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-- SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-- INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-- CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-- ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-- POSSIBILITY OF SUCH DAMAGE.
{-# LANGUAGE OverloadedStrings, OverloadedLabels, PatternSynonyms, BangPatterns #-}
module Main (Main.main) where
import Robots.Genetic.HunterKiller.Types
import Robots.Genetic.HunterKiller.Utility
import Robots.Genetic.HunterKiller.Intrinsics
import Robots.Genetic.HunterKiller.Params
import Robots.Genetic.HunterKiller.Load
import Robots.Genetic.HunterKiller.Save
import Robots.Genetic.HunterKiller.Combat
import Robots.Genetic.HunterKiller.Render
import Control.Concurrent (forkIO,
forkOS,
threadDelay)
import Control.Concurrent.MVar (MVar,
newEmptyMVar,
putMVar,
takeMVar)
import Control.Concurrent.STM (STM,
atomically)
import Control.Concurrent.STM.TQueue (TQueue,
newTQueueIO,
writeTQueue,
readTQueue,
tryReadTQueue)
import System.Exit (exitWith,
exitFailure,
ExitCode(..))
import qualified Data.Sequence as Seq
import Data.Sequence ((><))
import qualified Data.Text as Text
import Data.Text.IO (hPutStr,
readFile)
import System.IO (stderr,
openFile,
hClose,
IOMode(..))
import System.Environment (getArgs,
getProgName)
import Control.Exception (catch,
IOException,
SomeException)
import qualified GI.Gtk as Gtk
import qualified GI.Gdk as Gdk
import GI.Cairo.Structs.Context (Context(..))
import qualified Graphics.Rendering.Cairo as Cairo
import Data.GI.Base
import Control.Monad.Trans.Reader (ReaderT(..))
import Foreign.Ptr (castPtr)
import Graphics.Rendering.Cairo.Types (Cairo(..))
import Graphics.Rendering.Cairo.Internal (Render(..))
import Control.Monad.IO.Class (liftIO)
import Text.Printf (printf)
import Data.Functor ((<$>))
import Text.Read (readMaybe)
import Data.IORef (IORef,
newIORef,
readIORef,
writeIORef)
import System.Random (StdGen,
newStdGen)
import Data.Foldable (foldl',
toList)
import GI.GLib (idleAdd,
pattern PRIORITY_DEFAULT,
pattern PRIORITY_HIGH)
import GI.Gdk.Objects.Window
import qualified System.Clock as Clock
import Prelude hiding (hPutStr,
readFile)
import Control.DeepSeq (NFData(..),
deepseq)
-- | The main action.
main :: IO ()
main = do
inputs <- getInputs
case inputs of
Right (exprs, params, savePath) ->
setup exprs $ params { robotParamsAutoSavePath = savePath }
Left errorText -> do
hPutStr stderr errorText
exitFailure
-- | Get inputs.
getInputs :: IO (Either Text.Text (Seq.Seq RobotExpr, RobotParams, FilePath))
getInputs = do
args <- getArgs
case args of
[worldPath, worldCopiesText, paramsPath, savePath] -> do
paramsText <-
catch (Right <$> readFile paramsPath)
(\e -> return . Left . Text.pack $ show (e :: IOException))
case paramsText of
Right paramsText -> do
worldText <-
catch (Right <$> readFile worldPath)
(\e -> return . Left . Text.pack $ show (e :: IOException))
case worldText of
Right worldText ->
let errorOrParams = loadParams paramsText in
case errorOrParams of
Right params ->
let errorOrExprs = loadWorld specialConstEntries worldText in
case errorOrExprs of
Right exprs ->
case readMaybe worldCopiesText of
Just worldCopies ->
if worldCopies >= 1
then return $ Right
(foldl' (><) Seq.empty $
Seq.replicate worldCopies exprs,
params, savePath)
else return $ Left
"number of copies must be greater than zero\n"
Nothing -> return $ Left "invalid number of copies\n"
Left errorText ->
return . Left . Text.pack $
printf "%s: unable to load world: %s\n"
worldPath errorText
Left errorText ->
return . Left . Text.pack $
printf "%s: unable to load params: %s\n"
paramsPath errorText
Left errorText ->
return . Left . Text.pack $ printf "%s: %s\n" worldPath errorText
Left errorText ->
return . Left . Text.pack $ printf "%s: %s\n" paramsPath errorText
_ -> do progName <- getProgName
return . Left . Text.pack $
printf "Usage: %s WORLD-FILE COUNT PARAMS-FILE SAVE-FILE\n"
progName
-- | Set up the UI and prepare for running.
setup :: Seq.Seq RobotExpr -> RobotParams -> IO ()
setup exprs params = do
controlQueue <- newTQueueIO
exitQueue <- newTQueueIO
Gtk.init Nothing
window <- Gtk.windowNew Gtk.WindowTypeToplevel
Gtk.setWindowTitle window "Botwars"
Gtk.onWidgetDestroy window $ do
Gtk.mainQuit
atomically $ writeTQueue controlQueue RobotExit
vbox <- Gtk.boxNew Gtk.OrientationVertical 10
Gtk.boxSetHomogeneous vbox False
canvas <- Gtk.drawingAreaNew
Gtk.widgetSetSizeRequest canvas 920 920
worldRef <- newIORef Nothing
Gtk.onWidgetDraw canvas $ \(Context fp) -> do
withManagedPtr fp $ \p ->
(`runReaderT` Cairo (castPtr p)) $ runRender $ do
w <- liftIO $ fromIntegral <$> Gtk.widgetGetAllocatedWidth canvas
h <- liftIO $ fromIntegral <$> Gtk.widgetGetAllocatedHeight canvas
world <- liftIO $ readIORef worldRef
case world of
Just world -> drawWorld world w h
Nothing -> return ()
return True
Gtk.boxPackStart vbox canvas True True 0
buttonBox <- Gtk.buttonBoxNew Gtk.OrientationHorizontal
Gtk.buttonBoxSetLayout buttonBox Gtk.ButtonBoxStyleCenter
backwardButton <- Gtk.buttonNew
stopButton <- Gtk.buttonNew
startButton <- Gtk.buttonNew
forwardButton <- Gtk.buttonNew
saveButton <- Gtk.buttonNew
Gtk.buttonSetLabel backwardButton "<<"
Gtk.buttonSetLabel stopButton "Stop"
Gtk.buttonSetLabel startButton "Start"
Gtk.buttonSetLabel forwardButton ">>"
Gtk.buttonSetLabel saveButton "Save"
Gtk.onButtonClicked backwardButton $
atomically $ writeTQueue controlQueue RobotBackward
Gtk.onButtonClicked stopButton $
atomically $ writeTQueue controlQueue RobotStop
Gtk.onButtonClicked startButton $
atomically $ writeTQueue controlQueue RobotStart
Gtk.onButtonClicked forwardButton $
atomically $ writeTQueue controlQueue RobotForward
Gtk.onButtonClicked saveButton $ do
fileChooser <- Gtk.fileChooserNativeNew (Just "Save As World")
(Just window) Gtk.FileChooserActionSave Nothing
Nothing
result <- toEnum <$> fromIntegral <$> Gtk.nativeDialogRun fileChooser
case result of
Gtk.ResponseTypeAccept -> do
filename <- Gtk.fileChooserGetFilename fileChooser
case filename of
Just filename -> do
let !message = deepseq filename `seq` RobotSave filename
atomically $ writeTQueue controlQueue message
Nothing -> return ()
_ -> return ()
Gtk.boxPackStart buttonBox backwardButton False False 0
Gtk.boxPackStart buttonBox stopButton False False 0
Gtk.boxPackStart buttonBox startButton False False 0
Gtk.boxPackStart buttonBox forwardButton False False 0
Gtk.boxPackStart buttonBox saveButton False False 0
Gtk.boxPackEnd vbox buttonBox False False 0
Gtk.containerAdd window vbox
gen <- newStdGen
Gtk.widgetShowAll window
forkOS Gtk.main
forkIO $ do
time <- Clock.getTime Clock.Monotonic
let play =
RobotPlay { robotPlayCyclesPerSecond =
robotParamsMaxCyclesPerSecond params,
robotPlayRunning = False,
robotPlayReverse = False,
robotPlayIndex = 0,
robotPlayDoStep = RobotNoStep }
mainLoop (initCont exprs params gen) canvas worldRef controlQueue
exitQueue time play
exitStatus <- atomically $ readTQueue exitQueue
exitWith exitStatus
-- | Execute the main loop of the genetically-programmed robot fighting arena.
mainLoop :: RobotCont -> Gtk.DrawingArea -> IORef (Maybe RobotWorld) ->
TQueue RobotControl -> TQueue ExitCode -> Clock.TimeSpec ->
RobotPlay -> IO ()
mainLoop cont canvas worldRef controlQueue exitQueue nextTime play = do
let params = robotContParams cont
control <- atomically $ tryReadTQueue controlQueue
case control of
Just RobotExit -> atomically $ writeTQueue exitQueue ExitSuccess
Just (RobotSave path) -> do
case robotContWorld cont of
Just world -> do
message <- saveWorldToFile path world
case message of
Left errorText -> hPutStr stderr errorText
Right () -> return ()
Nothing -> return ()
mainLoop cont canvas worldRef controlQueue exitQueue nextTime play
Just control ->
let play' = changePlay control play params
in mainLoop cont canvas worldRef controlQueue exitQueue nextTime play'
Nothing -> do
let displayInfo =
robotPlayRunning play || (robotPlayDoStep play /= RobotNoStep)
(cont', world, play) <- nextState cont play
writeIORef worldRef (world `seq` Just world)
Gdk.threadsAddIdle PRIORITY_HIGH $ do
window <- Gtk.widgetGetWindow canvas
case window of
Just window -> Gdk.windowInvalidateRect window Nothing True
Nothing -> return ()
return False
if displayInfo
then do
let robotDisplay =
Text.concat
(toList (fmap (\robot ->
Text.pack $ printf "%d " (robotIndex robot))
(robotWorldRobots world)))
shotDisplay =
Text.concat
(toList (fmap (\shot ->
Text.pack $ printf "%d " (shotRobotIndex shot))
(robotWorldShots world)))
putStr $ printf "Robots: %sShots: %s\n" robotDisplay shotDisplay
else return ()
time <- Clock.getTime Clock.Monotonic
let cyclesPerSecond =
if robotPlayRunning play
then robotPlayCyclesPerSecond play
else robotParamsMaxCyclesPerSecond params
maxDelay =
Clock.fromNanoSecs . floor $ 1000000000.0 / cyclesPerSecond
nextTime' = nextTime + maxDelay
if time < nextTime'
then threadDelay . fromIntegral $
(Clock.toNanoSecs (nextTime' - time)) `div` 1000
else return ()
let nextTime'' =
if time - nextTime > (Clock.fromNanoSecs . floor $
2000000000.0 / cyclesPerSecond)
then time
else nextTime'
mainLoop cont' canvas worldRef controlQueue exitQueue nextTime'' play
-- | Change the playback state.
changePlay :: RobotControl -> RobotPlay -> RobotParams -> RobotPlay
changePlay RobotStart play _ = play { robotPlayRunning = True }
changePlay RobotStop play _ = play { robotPlayRunning = False }
changePlay RobotForward play params =
if robotPlayRunning play
then if robotPlayReverse play
then
let newCyclesPerSecond = robotPlayCyclesPerSecond play / 2.0
in if newCyclesPerSecond >=
robotParamsMaxCyclesPerSecond params / 16.0
then play { robotPlayCyclesPerSecond = newCyclesPerSecond }
else play { robotPlayReverse = False,
robotPlayCyclesPerSecond =
robotParamsMaxCyclesPerSecond params / 16.0 }
else
let newCyclesPerSecond = robotPlayCyclesPerSecond play * 2.0
in if newCyclesPerSecond <= robotParamsMaxCyclesPerSecond params
then play { robotPlayCyclesPerSecond = newCyclesPerSecond }
else play { robotPlayCyclesPerSecond =
robotParamsMaxCyclesPerSecond params }
else play { robotPlayDoStep = RobotStepForward }
changePlay RobotBackward play params =
if robotPlayRunning play
then if robotPlayReverse play
then
let newCyclesPerSecond = robotPlayCyclesPerSecond play * 2.0
in if newCyclesPerSecond <= robotParamsMaxCyclesPerSecond params
then play { robotPlayCyclesPerSecond = newCyclesPerSecond }
else play { robotPlayCyclesPerSecond =
robotParamsMaxCyclesPerSecond params }
else
let newCyclesPerSecond = robotPlayCyclesPerSecond play / 2.0
in if newCyclesPerSecond >= robotParamsMaxCyclesPerSecond params / 16.0
then play { robotPlayCyclesPerSecond = newCyclesPerSecond }
else play { robotPlayReverse = True,
robotPlayCyclesPerSecond =
robotParamsMaxCyclesPerSecond params / 16.0 }
else play { robotPlayDoStep = RobotStepBackward }
changePlay _ _ _ = error "impossible"
-- | Get a new continuity, world, and play control state.
nextState :: RobotCont -> RobotPlay -> IO (RobotCont, RobotWorld, RobotPlay)
nextState cont play =
if robotPlayRunning play
then if not $ robotPlayReverse play
then if robotPlayIndex play >= -1
then do
let (event, cont') = executeCycle cont
play' = play { robotPlayIndex = 0 }
world <- case event of
RobotWorldCycle world -> return world
RobotRoundDone world autoSave -> do
doRoundDone world autoSave $ robotContParams cont
return world
return (cont', world, play')
else
let index = robotPlayIndex play + 1
prevWorlds = robotContPrevWorlds cont
world =
case Seq.lookup (Seq.length prevWorlds + index)
prevWorlds of
Just world -> world
Nothing -> error "impossible"
play' = play { robotPlayIndex = index }
in return (cont, world, play')
else
let prevWorlds = robotContPrevWorlds cont in
if robotPlayIndex play >= -(Seq.length prevWorlds - 1)
then
let index = robotPlayIndex play - 1
world =
case Seq.lookup (Seq.length prevWorlds + index)
prevWorlds of
Just world -> world
Nothing -> error "impossible"
play' = play { robotPlayIndex = index }
in return (cont, world, play')
else
let play' = play { robotPlayRunning = False,
robotPlayReverse = False }
world =
case Seq.lookup (Seq.length prevWorlds +
robotPlayIndex play') prevWorlds of
Just world -> world
Nothing -> error "impossible"
in return (cont, world, play')
else
case robotPlayDoStep play of
RobotStepForward ->
if robotPlayIndex play >= -1
then do
let (event, cont') = executeCycle cont
play' = play { robotPlayIndex = 0,
robotPlayDoStep = RobotNoStep }
world <- case event of
RobotWorldCycle world -> return world
RobotRoundDone world autoSave -> do
doRoundDone world autoSave $ robotContParams cont
return world
return (cont', world, play')
else
let index = robotPlayIndex play + 1
prevWorlds = robotContPrevWorlds cont
world =
case Seq.lookup (Seq.length prevWorlds + index)
prevWorlds of
Just world -> world
Nothing -> error "impossible"
play' = play { robotPlayIndex = index,
robotPlayDoStep = RobotNoStep }
in return (cont, world, play')
RobotStepBackward ->
let prevWorlds = robotContPrevWorlds cont in
if robotPlayIndex play >= -(Seq.length prevWorlds - 1)
then
let index = robotPlayIndex play - 1
world =
case Seq.lookup (Seq.length prevWorlds + index)
prevWorlds of
Just world -> world
Nothing -> error "impossible"
play' = play { robotPlayIndex = index,
robotPlayDoStep = RobotNoStep }
in return (cont, world, play')
else
let play' = play { robotPlayDoStep = RobotNoStep } in
if robotPlayIndex play' < 0 then
let world =
case Seq.lookup (Seq.length prevWorlds +
robotPlayIndex play') prevWorlds of
Just world -> world
Nothing -> error "impossible"
in return (cont, world, play)
else
case robotContWorld cont of
Just world -> return (cont, world, play')
Nothing -> do
let (event, cont') = executeCycle cont
play'' = play' { robotPlayIndex = 0 }
world <- case event of
RobotWorldCycle world -> return world
RobotRoundDone world autoSave -> do
doRoundDone world autoSave $ robotContParams cont
return world
return (cont', world, play'')
RobotNoStep ->
if robotPlayIndex play >= 0
then
case robotContWorld cont of
Just world -> return (cont, world, play)
Nothing -> do
let (event, cont') = executeCycle cont
world <- case event of
RobotWorldCycle world -> return world
RobotRoundDone world autoSave -> do
doRoundDone world autoSave $ robotContParams cont
return world
return (cont', world, play)
else do
let prevWorlds = robotContPrevWorlds cont
world =
case Seq.lookup (Seq.length prevWorlds + robotPlayIndex play)
prevWorlds of
Just world -> world
Nothing -> error "impossible"
return (cont, world, play)
-- | Handle round done.
doRoundDone :: RobotWorld -> RobotAutoSave -> RobotParams -> IO ()
doRoundDone world autoSave params = do
saveWorldToFile (robotParamsBackupSavePath params) world >> return ()
doAutoSave (robotParamsAutoSavePath params) autoSave
-- | Do autosave if needed.
doAutoSave :: FilePath -> RobotAutoSave -> IO ()
doAutoSave path autoSave = do
let round = robotAutoSaveRound autoSave
case robotAutoSaveWorld autoSave of
Just world -> do
message <- saveWorldToFile (printf "%s.%d" path round) world
case message of
Left errorText -> hPutStr stderr errorText
Right () -> return ()
Nothing -> return ()
case robotAutoSaveRobot autoSave of
Just robot -> do
message <- saveRobotToFile (printf "%s.%d.individual" path round) robot
case message of
Left errorText -> hPutStr stderr errorText
Right () -> return ()
Nothing -> return ()
-- | Save a world.
saveWorldToFile :: FilePath -> RobotWorld -> IO (Either Text.Text ())
saveWorldToFile path world = do
let worldText =
saveWorld specialConstEntries . fmap robotExpr $ robotWorldRobots world
saveFile <- catch (Right <$> openFile path WriteMode)
(\e -> return . Left . Text.pack $ show (e :: IOException))
case saveFile of
Right saveFile -> do
hPutStr saveFile worldText
hClose saveFile
return $ Right ()
Left errorText -> do
return $ Left errorText
-- | Save a robot.
saveRobotToFile :: FilePath -> Robot -> IO (Either Text.Text ())
saveRobotToFile path expr = do
let worldText = saveWorld specialConstEntries . Seq.singleton $ robotExpr expr
saveFile <- catch (Right <$> openFile path WriteMode)
(\e -> return . Left . Text.pack $ show (e :: IOException))
case saveFile of
Right saveFile -> do
hPutStr saveFile worldText
hClose saveFile
return $ Right ()
Left errorText -> do
return $ Left errorText
| tabemann/botwars | src/Main.hs | bsd-3-clause | 22,547 | 16 | 39 | 7,414 | 5,188 | 2,585 | 2,603 | 469 | 24 |
----------------------------------------------------------------------------
-- |
-- Module : Imported1
-- Copyright : (c) Sergey Vinokurov 2015
-- License : BSD3-style (see LICENSE)
-- Maintainer : [email protected]
----------------------------------------------------------------------------
{-# LANGUAGE TypeOperators #-}
module Imported1 where
foo :: a -> a
foo x = x
bar :: a -> a
bar x = x
($$) :: a -> a -> a
x $$ _ = x
data (:$$:) a b =
(:$$$:) a b
| sergv/tags-server | test-data/0001module_with_imports/Imported1.hs | bsd-3-clause | 482 | 1 | 8 | 94 | 91 | 55 | 36 | 10 | 1 |
module OpenSet where
import Data.List hiding (union)
import Data.Set as Set
dup x = (x, x)
openSets :: Ord a => [a] -> [[[a]]]
openSets x = toList $ Set.map (toList.(Set.map toList)) $ Set.filter isOpen candidates
where
(e, u) = (fromList [], fromList x)
conpact = fromList [e, u]
candidates = Set.map (union conpact) $ powerSet (powerSet u Set.\\ conpact)
isOpen o = Set.fold (\a b -> p a && b) True ps
where
p (a, b) = uncurry (&&) (intersection a b `member` o, union a b `member` o)
ps = Set.filter (uncurry (<)) $ uncurry cartesianProduct $ dup (o Set.\\ conpact)
main :: IO ()
main = mapM_ go $ zip [1..] $ openSets [0,1,2]
where
go (i, ln) = putStrLn $ show i ++ " : " ++ show ln
| cutsea110/aop | src/OpenSet.hs | bsd-3-clause | 790 | 1 | 13 | 237 | 388 | 207 | 181 | 15 | 1 |
module Rules.Eq where
import Derivation
import Goal
import Rules.Utils
import Tactic
import Term
-- H >> (a = b in A) = (a' = b' in A') in U(i)
-- H >> A = A' in U(i)
-- H >> a = a' in A
-- H >> b = b' in A'
-- Uses: EQ_EQ
eqEQ :: PrlTactic
eqEQ (Goal ctx t) =
case t of
Eq (Eq m1 n1 a1) (Eq m2 n2 a2) (Uni i) ->
return $ Result
{ resultGoals = [ Goal ctx (Eq a1 a2 (Uni i))
, Goal ctx (Eq m1 m2 a1)
, Goal ctx (Eq n1 n2 a1)
]
, resultEvidence = \d ->
case d of
[d1, d2, d3] -> EQ_EQ d1 d2 d3
_ -> error "Eq.EQ: Invalid evidence!"
}
_ -> fail "Eq.EQ does not apply."
-- H >> tt = tt in (a = b in A)
-- H >> a = b in A
-- Uses: EQ_MEM_EQ
eqMEMEQ :: PrlTactic
eqMEMEQ (Goal ctx t) =
case t of
Eq TT TT (Eq m n a) -> return $ Result
{ resultGoals = [ Goal ctx (Eq m n a) ]
, resultEvidence = \d -> case d of
[d] -> EQ_MEM_EQ d
_ -> error "Eq.MEMEQ: Invalid evidence!"
}
_ -> fail "Eq.MEMEQ does not apply."
-- H >> a = b in A
-- H >> b = a in A
-- Uses: EQ_SYM
eqSYM :: PrlTactic
eqSYM (Goal ctx t) =
case t of
Eq m n a -> return $ Result
{ resultGoals = [ Goal ctx (Eq n m a) ]
, resultEvidence = \d -> case d of
[d] -> EQ_SYM d
_ -> error "Eq.SYM: Invalid evidence!"
}
_ -> fail "Eq.SYM does not apply."
-- H >> [a/x]C
-- H, x : A >> C in U(i)
-- H >> a = b in A
-- H >> [b/x]C
-- Uses: EQ_SUBST
-- Note that first supplied term should be a = b in A and
-- the second one should be C.
eqSUBST :: Universe -> Term -> Term -> PrlTactic
eqSUBST uni eq pat (Goal ctx t) =
case eq of
Eq m n a | subst m 0 pat == t -> return $ Result
{ resultGoals = [ Goal (a <:> ctx) (Eq pat pat (Uni uni))
, Goal ctx eq
, Goal ctx (subst n 0 pat)
]
, resultEvidence = \d -> case d of
[d1, d2, d3] -> EQ_SUBST uni pat d1 d2 d3
_ -> error "Eq.SUBST: Invalid evidence!"
}
_ -> fail "Eq.SUBST does not apply."
| thsutton/cha | lib/Rules/Eq.hs | bsd-3-clause | 2,365 | 0 | 17 | 1,016 | 669 | 354 | 315 | 48 | 3 |
{-# LANGUAGE FlexibleContexts #-}
module Language.Lambda.Semantics.Named.BigStep.Tests
(
bigStepTests
) where
import Prelude hiding (fromInteger, toInteger)
import Data.Char
import Bound.Unwrap as BU
import Test.Tasty
import qualified Test.Tasty.QuickCheck as QC
import qualified Test.Tasty.SmallCheck as SC
import qualified Test.SmallCheck.Series as SC
import Test.Tasty.HUnit
import Language.Lambda.Syntax.Named.Exp
import Language.Lambda.Semantics.Named.BigStep
import Language.Lambda.Syntax.Named.Testdata
renderFresh :: Printer String
renderFresh (n,i) = n ++ show i
normalOrder = mkNormalOrder renderFresh
callByValue = mkCallByValue renderFresh
callByName = mkCallByName renderFresh
bigStepTests = testGroup "tests" [
identityTests
, logicTests
, arithmeticTests
, letrecTests
]
-- -----------------------------------------------------------------------------
identityTests = testGroup "IdentityTests"
[ testCase "i_ 1" $
normalOrder i_ @=? i_
, testCase "i_ 2" $
normalOrder (i_ # i_) @=? i_
, testCase "i_ 3" $
normalOrder (i_ # k_) @=? k_
]
-- -----------------------------------------------------------------------------
logicTests = testGroup "LogicTests"
[ if_Tests
, not_Tests
, and_Tests
, or_Tests
, imp_Tests
, iff_Tests
, logicLawsTests
]
if_Tests = testGroup "if_Tests"
[ testCase "if_ 1" $
one_ @=? normalOrder (if_ # tru_ # one_ # zro_)
, testCase "if_ 2" $
zro_ @=? normalOrder (if_ # fls_ # one_ # zro_)
]
not_Tests = testGroup "not_Tests"
[ testCase "not_ 1" $
fls_ @=? normalOrder (not_ # tru_)
, testCase "not_ 2" $
tru_ @=? normalOrder (not_ # fls_)
]
and_Tests = testGroup "and_Tests"
[ testCase "and_ 1" $
tru_ @=? normalOrder (and_ # tru_ # tru_)
, testCase "and_ 2" $
fls_ @=? normalOrder (and_ # tru_ # fls_)
, testCase "and_ 3" $
fls_ @=? normalOrder (and_ # fls_ # tru_)
, testCase "and_4 " $
fls_ @=? normalOrder (and_ # fls_ # fls_)
]
or_Tests = testGroup "or_Tests"
[ testCase "or_ 1" $
tru_ @=? normalOrder (or_ # tru_ # tru_)
, testCase "or_ 2" $
tru_ @=? normalOrder (or_ # tru_ # fls_)
, testCase "or_ 3" $
tru_ @=? normalOrder (or_ # fls_ # tru_)
, testCase "or_ 4" $
fls_ @=? normalOrder (or_ # fls_ # fls_)
]
imp_Tests = testGroup "imp_Tests"
[ testCase "imp_ 1" $
tru_ @=? normalOrder (imp_ # tru_ # tru_)
, testCase "imp_ 2" $
fls_ @=? normalOrder (imp_ # tru_ # fls_)
, testCase "imp_ 3" $
tru_ @=? normalOrder (imp_ # fls_ # tru_)
, testCase "imp_ " $
tru_ @=? normalOrder (imp_ # fls_ # fls_)
]
iff_Tests = testGroup "iff_Tests"
[ testCase "iff_ 1" $
tru_ @=? normalOrder (iff_ # tru_ # tru_)
, testCase "iff_ 2" $
fls_ @=? normalOrder (iff_ # tru_ # fls_)
, testCase "iff_ 3" $
fls_ @=? normalOrder (iff_ # fls_ # tru_)
, testCase "iff_ 4" $
tru_ @=? normalOrder (iff_ # fls_ # fls_)
]
logicLawsTests = testGroup "Laws of Logic"
[ deMorganTests
]
deMorganTests = testGroup "De Morgan"
[ SC.testProperty "De Morgan 1" $
\p q ->
let p_ = fromBool p
q_ = fromBool q
in normalOrder (not_ # (and_ # p_ # q_)) ==
normalOrder (or_ # (not_ # p_) # (not_ # q_))
, SC.testProperty "De Morgan 2" $
\p q ->
let p_ = fromBool p
q_ = fromBool q
in normalOrder (not_ # (or_ # p_ # q_)) ==
normalOrder (and_ # (not_ # p_) # (not_ # q_))
]
-- -----------------------------------------------------------------------------
arithmeticTests = testGroup "ArithmeticTests"
[ iszro_Tests
, scc_Tests
, prd_Tests
, scc_prd_RelationTests
, pls_Tests
, mlt_Tests
, add_mlt_RelationTests
, pow_Tests
, leqnat_Tests
, eqnat_Tests
, fac_Tests
]
iszro_Tests = testGroup "iszro_Tests"
[ testCase "iszro_ 1" $
normalOrder (iszro_ # zro_) @=? tru_
, testCase "iszro_ 2" $
normalOrder (iszro_ # (scc_ # zro_)) @=? fls_
]
scc_Tests = testGroup "scc_Tests"
[ testCase "scc_ 1" $
normalOrder (scc_ # zro_) @=? one_
, QC.testProperty "scc_ 2" $
QC.forAll (interval 0 250) $
\n -> normalOrder (scc_ # (unsafeFromInt n)) == unsafeFromInt (succ n)
, scProp "scc_ 3" 100 $
\n -> let pos = SC.getNonNegative n :: Int
in normalOrder (scc_ # (unsafeFromInt pos))
== (unsafeFromInt (succ pos))
]
prd_Tests = testGroup "prd_Tests"
[ testCase "prd_ 1" $
normalOrder (prd_ # zro_) @=? zro_
, testCase "prd_ 2" $
normalOrder (prd_ # one_) @=? zro_
]
scc_prd_RelationTests = testGroup "scc_prd_RelationTests"
[ QC.testProperty "scc_ prd_ 1 (inverse)" $
QC.forAll (interval 0 250) $
\n -> let cn = unsafeFromInt n
in normalOrder (prd_ # (scc_ # cn)) == cn
, scProp "scc_ prd_ 2 (inverse)" 50 $
\n -> let cn = fromPositive n
in normalOrder (prd_ # (scc_ # cn)) == cn
, scProp "scc_ prd_ 3 (inverse)" 50 $
\n -> let cn = fromPositive n
in normalOrder (scc_ # (prd_ # cn)) == cn
]
pls_Tests = testGroup "pls_Tests"
[ scProp "pls_ 1 (`zro_` right neutral)" 50 $
\n -> let cn = fromPositive n
in normalOrder (add_ # cn # zro_) == cn
, scProp "pls_ 2 (`zro_` left neutral)" 50 $
\n -> let cn = fromPositive n
in normalOrder (add_ # zro_ # cn) == cn
, scProp "sub_ 1 (`zro_` right neutral)" 50 $
\n -> let cn = fromPositive n
in normalOrder (sub_ # cn # zro_) == cn
, scProp "add_ sub_ (inverse)" 30 $
\n -> let cn = fromPositive n
in normalOrder (sub_ # cn # (add_ # cn # zro_)) == zro_
]
mlt_Tests = testGroup "mtl_Tests"
[ scProp "mlt_ 1" 30 $
\n -> let cn = fromPositive n
in normalOrder (mlt_ # cn # zro_) == zro_
, scProp "mlt_ 2" 30 $
\n -> let cn = fromPositive n
in normalOrder (mlt_ # zro_ # cn) == zro_
]
add_mlt_RelationTests = testGroup "add_mlt_RelationTests"
[ scProp "add_ mlt_ (distributivity)" 3 $
\n m o -> let cn = fromPositive n
cm = fromPositive m
co = fromPositive o
in normalOrder (mlt_ # (add_ # cn # cm) # co)
== normalOrder (add_ # (mlt_ # cn # co) # (mlt_ # cm # co))
]
pow_Tests = testGroup "pow_Tests"
[ scProp "pow 1" 10 $
\n -> let cn = fromPositive n
in normalOrder (pow_ # cn # zro_) == one_
, scProp "pow 2" 10 $
\n -> let cn = fromPositive n
in normalOrder (pow_ # cn # one_ )
== cn
, scProp "pow 3" 4 $
\n -> let cn = fromPositive n
two_ = scc_ # one_
in normalOrder (pow_ # cn # two_ )
== normalOrder (mlt_ # cn # cn)
]
leqnat_Tests = testGroup "leqnat_Tests"
[ scProp "leqnat 1: reflexivity" 10 $
\n -> let cn = fromPositive n
in normalOrder (leqnat_ # cn # cn )
== tru_
, scProp "leqnat 2: antisymmetry" 3 $
\n m -> let cn = fromPositive n
cm = fromPositive m
premis = and_ # (leqnat_ # cn # cm ) # (leqnat_ # cm # cn)
conclusion = fromBool (cn == cm)
in normalOrder (imp_ # premis # conclusion) == tru_
, scProp "leqnat 3: transitivity" 3 $
\n m o -> let cn = fromPositive n
cm = fromPositive m
co = fromPositive o
premis = and_ # (leqnat_ # cn # cm ) # (leqnat_ # cm # co)
conclusion = leqnat_ # cn # cm
in normalOrder (imp_ # premis # conclusion) == tru_
]
eqnat_Tests = testGroup "eqnat_Tests"
[ scProp "eqnat 1 (reflexivity)" 10 $
\n -> let cn = fromPositive n
in normalOrder (eqnat_ # cn # cn )
== tru_
, scProp "eqnat 2 (symmetry)" 3 $
\n m -> let cn = fromPositive n
cm = fromPositive m
premis = eqnat_ # cn # cm
conclusion = eqnat_ # cm # cn
in normalOrder (imp_ # premis # conclusion) == tru_
, scProp "eqnat 3 (transitivity)" 3 $
\n m o -> let cn = fromPositive n
cm = fromPositive m
co = fromPositive o
premis = and_ # (eqnat_ # cn # cm ) # (eqnat_ # cm # co)
conclusion = eqnat_ # cn # cm
in normalOrder (imp_ # premis # conclusion) == tru_
]
fac_Tests = testGroup "fac_Tests"
[ testCase "fac 0 1" $
normalOrder (fac_ # zro_) @=? one_
, testCase "fac 1 1" $
normalOrder (fac_ # one_) @=? one_
, testCase "fac 2 2" $
normalOrder (fac_ # n2_) @=? (normalOrder n2_)
, scProp "fac_ golden" 3 $
\n -> let cn = SC.getNonNegative n
in normalOrder (fac_ # (unsafeFromInt cn)) == unsafeFromInt (fac cn)
]
where
fac x = if x == 0 then 1 else x * fac (pred x)
-- -----------------------------------------------------------------------------
letrecTests = testGroup "Let Tests"
[ testCase "Let 1 (identity)" $
let ltc = Let ("i", i_) (Var"i")
in normalOrder ltc @=? i_
]
-- -----------------------------------------------------------------------------
-- -----------------------------------------------------------------------------
-- helper functions
unsafeFromInt :: Int -> Exp String
unsafeFromInt = maybe (error "unsafeFromInt") id . fromInt
fromInt :: Int -> Maybe (Exp String)
fromInt i | i < 0 = Nothing
| otherwise = Just $ "f" ! "x" ! go i
where
go 0 = Var "x"
go i = Var "f" # go (pred i)
toInt :: Exp String -> Maybe Int
toInt (Lam f (Lam x body)) = go body
where
go (Var x) = Just 0
go (Var f `App` arg) = succ <$> go arg
go _ = Nothing
toInteger _ = Nothing
unsafeToInt :: Exp String -> Int
unsafeToInt = maybe (error "unsafeToInt") id . toInt
fromBool :: Bool -> Exp String
fromBool True = tru_
fromBool False = fls_
toBool :: Exp String -> Maybe Bool
toBool exp | exp == tru_ = Just True
| exp == fls_ = Just False
| otherwise = Nothing
unsafeToBool :: Exp String -> Bool
unsafeToBool = maybe (error "unsafeToBool") id . toBool
fromChar :: Char -> Exp String
fromChar = unsafeFromInt . ord
toChar :: Exp String -> Maybe Char
toChar = fmap chr . toInteger
unsafeToChar :: Exp String -> Char
unsafeToChar = maybe (error "unsafeToChar") id . toChar
-- -----------------------------------------------------------------------------
scProp :: SC.Testable IO a => String -> SC.Depth -> a -> TestTree
scProp s d = SC.testProperty s . SC.changeDepth (const d)
fromNonNegative :: SC.NonNegative Int -> Exp String
fromNonNegative = unsafeFromInt . SC.getNonNegative
fromPositive :: SC.Positive Int -> Exp String
fromPositive = unsafeFromInt . SC.getPositive
interval :: (Enum a, Num a) => a -> a -> QC.Gen a
interval l u = QC.oneof . fmap return $ [l .. u]
| julmue/UntypedLambda | test/Language/Lambda/Semantics/Named/BigStep/Tests.hs | bsd-3-clause | 11,361 | 0 | 18 | 3,565 | 3,697 | 1,881 | 1,816 | 273 | 3 |
--------------------------------------------------------------------------------
-- |
-- Module : Sequence.Location
-- Copyright : (c) [2009..2010] Trevor L. McDonell
-- License : BSD
--
-- Locate sequences fragments from index keys
--
--------------------------------------------------------------------------------
module Sequence.Location (SKey, lookup) where
import Mass
import Sequence.Fragment
import Util.Misc
import Prelude hiding (lookup)
import Numeric.Search.Range
import qualified Data.ByteString.Lazy as L
import qualified Data.Vector.Generic as G
--------------------------------------------------------------------------------
-- Database Search
--------------------------------------------------------------------------------
type SKey = Int
--
-- Find the last index in the ordered array whose value is less than or equal to
-- the given search element. Binary search, O(log n).
--
searchVector :: (Ord a, G.Vector v a) => v a -> a -> Maybe Int
searchVector vec x =
searchFromTo (\i -> vec G.! i > x) 0 (G.length vec - 1)
--
-- Locate a particular sequence in the database
--
lookup :: SequenceDB -> SKey -> Maybe Fragment
lookup db k = do
-- Index of the sequence this fragment derives from
--
seqIdx <- searchVector (G.tail (dbFragSeg db)) (fromIntegral k)
-- Extract the supporting information for the fragment
--
let (res,c,n) = dbFrag db G.! k
[a,b] = G.toList $ G.slice seqIdx 2 (dbIonSeg db)
hdr = dbHeader db G.! seqIdx
aa = G.toList $ G.slice (fromIntegral c) (fromIntegral (n-c+1)) (dbIon db)
ca = if c > a then dbIon db G.! (fromIntegral c-1) else c2w '-'
na = if n < b-1 then dbIon db G.! (fromIntegral n+1) else c2w '-'
return $ Fragment (res + massH + massH2O) hdr (L.pack $ [ca,c2w '.'] ++ aa ++ [c2w '.',na])
| tmcdonell/hfx | src/haskell/Sequence/Location.hs | bsd-3-clause | 1,869 | 0 | 16 | 383 | 498 | 275 | 223 | 22 | 3 |
module Playground01 where
import Data.List
simple :: t -> t
simple x = x
calcChange :: (Num t, Ord t) => t -> t -> t
calcChange owed given = if change > 0
then change
else 0
where change = given - owed
doublePlusTwo :: Num t => t -> t
doublePlusTwo x = doubleX + 2
where doubleX = x * 2
yorga :: Integer
yorga = 9
doubleMe :: Num a => a -> a
doubleMe x = x * 2
-- (\x -> x) "hi"
body :: Ord t => t -> t -> t
body sumSquare squareSum = if sumSquare > squareSum
then sumSquare
else squareSum
body' :: Ord a => a -> a -> a
body' = (\sumSquare squareSum ->
if sumSquare > squareSum
then sumSquare
else squareSum)
body'' :: Num a => a -> a -> a
body'' = (\x y -> x + y)
body''' :: Num a => a -> a -> a
body''' x y = x + y
sumSquareOrSquareSum :: (Ord a, Num a) => a -> a -> a
sumSquareOrSquareSum x y = body' (x^2 + y^2) ((x+y)^2)
sumSquareOrSquareSum' :: (Num t, Ord t) => t -> t -> t
sumSquareOrSquareSum' x y = let sumSquare = (x^2 + y^2)
squareSum = ((x+y)^2)
in
if sumSquare > squareSum
then sumSquare
else squareSum
overwrite :: Num t => t1 -> t
overwrite x = let x = 2
in
let x = 3
in
let x = 4
in
x
k :: Integer
k = 10
add :: Integer -> Integer
add y = y + k
add' :: Num a => a -> a
add' y = (\k -> y + k) 3
add'' :: Num a => t -> a
add'' y = (\y ->
(\k -> y + k ) 1
) 2
--Q2.2
counter :: Num a => t -> a
counter x = let x = x + 1
in
let x = x + 1
in
x
counter' :: Num a => a -> a
counter' x = (\x -> x + 1) x
counter'' :: Num a => a -> a
counter'' x = (\x ->
(\x -> x + 1) x + 1
) x
--listing 3.3
ifEven :: Integral t => (t -> t) -> t -> t
ifEven myFunction x = if even x
then myFunction x
else x
inc :: Num a => a -> a
inc n = n + 1
double :: Num a => a -> a
double n = n * 2
square :: Num a => a -> a
square n = n ^ 2
ifEvenInc :: Integral t => t -> t
ifEvenInc n = ifEven inc n
ifEvenDouble :: Integral t => t -> t
ifEvenDouble n = ifEven double n
ifEvenSquare :: Integral t => t -> t
ifEvenSquare n = ifEven square n
--esempio con la lambda
ifEvenLambaDouble :: Integral k => k -> k
ifEvenLambaDouble k' = ifEven (\x -> x * 2) k'
--QUICK CHECK 3.1 Write a lambda function for cubing x and pass it to ifEven
ifEvenLambaCube :: Integral a => a -> a
ifEvenLambaCube k' = ifEven (\x -> x ^ 3) k'
--listing 3.4
names :: [([Char], [Char])]
names = [("Ian", "Curtis"),
("Bernard","Sumner"),
("Peter", "Hook"),
("Stephen","Morris")]
-- Listing 3.4 names
sortedNames :: [(String, String)]
sortedNames = sort names
-- lastNamesComparator :: Ord(a,a) => a -> a -> Ordering
lastNamesComparator :: Ord a1 => (a2, a1) -> (a, a1) -> Ordering
--lastNamesComparator :: Ord a1 => ((a2, a1), (a, a1)) -> Ordering
lastNamesComparator name1 name2 =
let lastName1 = snd name1
in
let lastName2 = snd name2
in
if lastName1 > lastName2
then GT
else if lastName1 < lastName2
then LT
else EQ
-- Listing 3.5 compareLastNames
compareLastNames :: Ord a1 => (a2, a1) -> (a, a1) -> Ordering
compareLastNames name1 name2 = if lastName1 > lastName2
then GT
else if lastName1 < lastName2
then LT
else EQ
where lastName1 = snd name1
lastName2 = snd name2
-- esempio
-- compareLastNames ((\n -> head n) names) ((\n -> head n) names)
-- uso
-- sortBy compareLastNames names
-- QUICK CHECK 3.2 In compareLastNames
-- you didn’t handle the case of having two last names that are the same
-- but with different first names. Modify the compareLastNamesfunction to compare first names and use it to fix
-- compareLastNames.
compareLastNames' :: Ord a => ([a], [a]) -> ([a], [a]) -> Ordering
compareLastNames' name1 name2 = if complexName1 > complexName2
then GT
else if complexName1 < complexName2
then LT
else EQ
where lastName1 = snd name1
lastName2 = snd name2
firstName1 = fst name1
firstName2 = fst name2
complexName1 = lastName1 ++ firstName1
complexName2 = lastName2 ++ firstName2
| stefanocerruti/haskell-primer-alpha | src/Playground01.hs | bsd-3-clause | 5,208 | 0 | 12 | 2,245 | 1,530 | 817 | 713 | 118 | 3 |
-- |
-- Module : Network.Machine.Protocol.SMTP
-- Copyright : Lodvær 2015
-- License : BSD3
--
-- Maintainer : Lodvær <[email protected]>
-- Stability : provisional
-- Portability : unknown
--
-- SMTP machines.
module Network.Machine.Protocol.SMTP where
-- TODO
| lodvaer/machines-network | src/Network/Machine/Protocol/SMTP.hs | bsd-3-clause | 285 | 0 | 3 | 59 | 19 | 17 | 2 | 1 | 0 |
module Paths_simple (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
catchIO = Exception.catch
version :: Version
version = Version [0,1,0,0] []
bindir, libdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "/home/chuck/Documents/simple/.stack-work/install/x86_64-linux/lts-6.17/7.10.3/bin"
libdir = "/home/chuck/Documents/simple/.stack-work/install/x86_64-linux/lts-6.17/7.10.3/lib/x86_64-linux-ghc-7.10.3/simple-0.1.0.0-3KtLGDpFPvjB6VnwWh4q2E"
datadir = "/home/chuck/Documents/simple/.stack-work/install/x86_64-linux/lts-6.17/7.10.3/share/x86_64-linux-ghc-7.10.3/simple-0.1.0.0"
libexecdir = "/home/chuck/Documents/simple/.stack-work/install/x86_64-linux/lts-6.17/7.10.3/libexec"
sysconfdir = "/home/chuck/Documents/simple/.stack-work/install/x86_64-linux/lts-6.17/7.10.3/etc"
getBinDir, getLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "simple_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "simple_libdir") (\_ -> return libdir)
getDataDir = catchIO (getEnv "simple_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "simple_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "simple_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
| Chuck-Aguilar/haskell-opencv-work | .stack-work/dist/x86_64-linux/Cabal-1.22.5.0/build/autogen/Paths_simple.hs | bsd-3-clause | 1,617 | 0 | 10 | 177 | 362 | 206 | 156 | 28 | 1 |
{-# LANGUAGE ForeignFunctionInterface #-}
module Network.UV
( Loop
, defaultLoop
, createLoop
, run
) where
import Foreign.C
import Foreign.Ptr
import Network.UV.TCP
import Network.UV.Internal.UV
foreign import ccall unsafe "uv_loop_new" c_uv_loop_new
:: IO (Ptr a)
foreign import ccall unsafe "uv_default_loop" c_uv_default_loop
:: IO (Ptr a)
foreign import ccall unsafe "uv_run" c_uv_run
:: Ptr a -> IO CInt
-- | Get the default loop.
--
-- This function always returns the same loop.
defaultLoop :: IO Loop
defaultLoop = do
ptr <- c_uv_default_loop
return $ Loop ptr
-- | Create a new loop.
createLoop :: IO Loop
createLoop = do
ptr <- c_uv_loop_new
return $ Loop ptr
-- | Run the given loop.
run :: Loop -> IO ()
run (Loop ptr) = c_uv_run ptr >> return ()
| aardvarrk/hlibuv | src/Network/UV.hs | bsd-3-clause | 835 | 0 | 8 | 201 | 219 | 117 | 102 | 26 | 1 |
-- | Allows the abstract syntax tree to be compiled into C++ template
-- metaprogram.
module Compiler.Compile
(
-- * Module compiling
compileModule
, compileTopLevel
-- * Top level entities compiling
, compileType
, compileDataDef
, compileValDef
-- * Expression compiling
, compileExpr
)
where
import Data.Char
import Data.List
import Compiler.AST
import Utility
-- | Left opening brace surrounded by newlines.
lbrace :: String
lbrace = "\n{\n"
-- | Right closing brace surrounded by newlines.
rbrace :: String
rbrace = "\n};\n"
-- | Print a @struct@ given its name and contents.
--
-- >>> putStrLn $ struct "s" "int x;"
-- struct s
-- {
-- int x;
-- };
struct :: String -- ^ Name of the @struct@.
-> String -- ^ Content of the @struct@.
-> String
struct name content = concat ["struct ", name, lbrace, content, rbrace]
-- | Print a @template@ given its name, contents and the (only) template
-- argument.
--
-- >>> putStrLn $ template "x" "s" "int y;"
-- template <typename x>
-- struct s
-- {
-- int y;
-- };
template :: String -- ^ Name of the template argument.
-> String -- ^ Name of the @struct@.
-> String -- ^ Content of the @struct@.
-> String
template arg name content
= "template <typename " ++ arg ++ ">\n" ++ struct name content
-- | Print a @template@ @struct@ forward declaration.
--
-- >>> putStrLn $ fwdTemplate "s"
-- template <typename>
-- struct s;
fwdTemplate :: String -- ^ Name of the @struct@.
-> String
fwdTemplate name = "template <typename>\nstruct " ++ name ++ ";\n"
-- | Print a @typedef@ which identifies type expression @what@ with @type@.
--
-- >>> putStrLn $ typedef "int"
-- typedef int type;
typedef :: String -- ^ Type expression.
-> String
typedef what = "typedef " ++ what ++ " " ++ ty ++ ";"
-- | Print a type expression extracting inner @type@ from another type.
--
-- >>> putStrLn $ innerType "vector"
-- typename vector::type
innerType :: String -- ^ Type expression.
-> String
innerType what = "typename " ++ what ++ "::" ++ ty
-- | Print a nested hierarchy of @struct@ures used for lambda abstraction.
--
-- >>> putStrLn $ innerApply "x" "s" "int x;"
-- struct s
-- {
-- struct type
-- {
-- template <typename x>
-- struct apply
-- {
-- int x;
-- };
-- };
-- };
innerApply :: String -- ^ Name of the template argument.
-> String -- ^ Name of the @struct@.
-> String -- ^ Content of the @struct@.
-> String
innerApply arg name = struct name . struct ty . template arg apply
-- | Print a list of declarations.
--
-- This is just a name-flavored 'concat'.
decls :: [String] -- ^ List of declarations.
-> String
decls = concat
-- | 'String' constant for inner @template@ used for lambda abstractions.
apply :: String
apply = "apply"
-- | 'String' constant for inner @typedef@s.
ty :: String
ty = "type"
-- | 'String' constant for the @dummy@ type.
dummy :: String
dummy = "__dummy"
-- | Compile whole module.
--
-- Compiles all top level entities contained in module.
compileModule :: Module -> String
compileModule (Module m) = intercalate sep $ map compileTopLevel m
where
sep = "\n\n"
-- | Compile a top level declaration.
compileTopLevel :: TopLevel -> String
compileTopLevel tl = case tl of
Data dd -> compileDataDef dd
Value vd -> compileValDef vd
Type _ -> "" -- Types are erased.
Assume _ -> "" -- So are type assumptions.
-- | Compile a type signature.
--
-- Since type signatures have no correspondence in the template C++ code,
-- no C++ code is produced.
compileType :: TypeSig -> String
compileType _ = ""
-- | Compile a data definition.
--
-- Note that since this language doesn't allow pattern matching, this
-- function will automatically define an appropriate eliminator for the
-- data type.
compileDataDef :: DataDef -> String
compileDataDef (DataDef (TyCon tyConName _) variants) = decls
[ intercalate sep $ zipWith defineCtor variants [0 ..]
, defineElim variants
]
where
sep = "\n\n"
localArg n = "__ctor_arg" ++ show n
localStruct n = "__ctor_local" ++ show n
ctorStruct = "__ctor_top_local"
elimStruct = "__elim_top_local"
primDataStruct = "__data"
applyAlt = "apply_alt"
-- Compile a single data constructor.
defineCtor :: Variant -- ^ Data constructor.
-> Int -- ^ Numeric suffix for @struct@s.
-> String
defineCtor (DataCon cname ts) n = struct cname . decls $
[ go 0 ctorStruct [] ts
, typedef $ innerType ctorStruct
]
where
go :: Int -> String -> [String] -> [Type] -> String
go _ name args [] = struct name . typedef . concat $
[ primDataStruct
, "<"
, show n
, ", "
, dummy
, concatMap ((", " ++) . innerType) (reverse args)
, ">"
]
go u name args (_:rest) = innerApply localA name . decls $
[ go (u + 1) localS (localA:args) rest
, typedef $ innerType localS
]
where
localA = localArg u
localS = localStruct u
-- Compile an eliminator for the whole data type.
defineElim :: [Variant] -> String
defineElim vs = struct (firstToLower tyConName) . decls $
[ go 0 elimStruct [] vs
, typedef $ innerType elimStruct
]
where
firstToLower [] = []
firstToLower (c:cs) = toLower c:cs
go :: Int -- ^ Numeric suffix for @struct@s.
-> String -- ^ Outer @struct@ name.
-> [String] -- ^ Names of eliminator arguments.
-> [Variant] -- ^ Data constructors.
-> String
go _ name args [] =
struct name . struct ty . decls . intersperse "\n" $
[ fwdTemplate applyAlt
]
++ zipWith3 handleCase vs (reverse args) [0 ..]
++ [ template typeArg apply . typedef . innerType . concat $
[ applyAlt
, "<"
, innerType typeArg
, ">"
]
]
where
typeArg = "__type_arg"
go u name args (_:rest) = innerApply localA name . decls $
[ go (u + 1) localS (localA:args) rest
, typedef $ innerType localS
]
where
localA = localArg u
localS = localStruct u
-- Compile a @template@ specialization which deconstructs @n@-th
-- constructor and applies the corresponding elimination function to
-- all its fields.
handleCase :: Variant -- ^ 'Variant' to be compiled.
-> String -- ^ Argument name.
-> Int -- ^ Argument position.
-> String
handleCase (DataCon _ ts) arg n = concat
[ "template <typename "
, dummy
, concatMap (", typename " ++) args
, ">\nstruct "
, applyAlt
, "<"
, primDataStruct
, "<"
, show n
, ", "
, dummy
, concatMap (", " ++) args
, "> >"
, lbrace
, decls $
wrapFields
++ [ compileExpr localS . foldl1 App . map Var $
arg:map (extra ++) args
, typedef $ innerType localS
]
, rbrace
]
where
-- Names of all constructor fields.
args = zipWith (const $ (fieldA ++) . show) ts [0 :: Int ..]
-- Create a wrapper @struct@ures so the data type can
-- contain the values directly rather than just the
-- expression names.
--
-- This would otherwise lead to all kinds of problems with
-- expressions not being interchangeable even though their
-- values are.
wrapFields = map wrapStruct args
where
wrapStruct name = struct (extra ++ name) $
typedef name
fieldA = "__field_arg"
localS = "__local_case"
-- Prefix for the wrapped structures.
extra = "__extra"
-- | Compile a value definition.
compileValDef :: ValueDef -> String
compileValDef (ValueDef name expr) =
struct name . decls $
[ compileExpr defStruct expr
, typedef . innerType $ defStruct
]
where
defStruct = "__def"
-- | Compile and expression given a name of @struct@ it should be declared in.
compileExpr :: String -- ^ Name of the @struct@.
-> Expr
-> String
compileExpr = go (0 :: Int)
where
localStruct n = "__local" ++ show n
leftStruct n = "__left" ++ show n
rightStruct n = "__right" ++ show n
go :: Int -- ^ Numeric suffix for @struct@s.
-> String -- ^ Outer @struct@ name.
-> Expr -- ^ Expression to be compiled.
-> String
go _ name (Var v) =
struct name . typedef . innerType $ v
go u name (Lam x expr) = innerApply x name . decls $
[ go (u + 1) local expr
, typedef $ innerType local
]
where
local = localStruct u
go u name (App e1 e2) =
struct name . decls $
[ go (u + 1) left e1
, go (u + 1) right e2
, typedef . concat $
[ "typename "
, left
, "::type::template apply<"
, right
, ">::type"
]
]
where
left = leftStruct u
right = rightStruct u
go u name (Let dec expr) =
struct name . decls $
map compileValDef dec
++ [ go (u + 1) local expr
, typedef $ innerType local
]
where
local = localStruct u
go u name (SetType expr _) = go u name expr
go _ name (NumLit n) =
struct name . typedef $ "Int<" ++ show n ++ ">"
go _ name (BoolLit b) =
struct name . typedef $ "Bool<" ++ uncap (show b) ++ ">"
-- Fixed point operator is transformed into language primitive
-- "fix" and a lambda abstraction.
go u name (Fix x expr) = go u name (App (Var "fix") (Lam x expr))
| vituscze/norri | src/Compiler/Compile.hs | bsd-3-clause | 10,491 | 0 | 17 | 3,685 | 2,135 | 1,163 | 972 | 196 | 8 |
{-# LANGUAGE
DeriveGeneric
, OverloadedStrings
#-}
module Lib where
import Control.Monad
import Data.Aeson
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Yaml
import GHC.Generics
import Network.Nats
import System.Exit
import System.IO
import Build
import Debug.Trace
data ServerConfig = ServerConfig
{ natsConf :: NatsConf
} deriving( Eq, Generic, Show )
instance FromJSON ServerConfig
instance ToJSON ServerConfig
data NatsConf = NatsConf
{ natsHost :: String
, natsPort :: Int
} deriving( Eq, Generic, Show )
instance FromJSON NatsConf
instance ToJSON NatsConf
-- | Read config from the yaml file at the given path.
-- @
-- readConfig "examples/config.yaml"
-- @
readConfig :: FilePath -> IO (Either String ServerConfig)
readConfig filePath = do
eitherConf <- decodeFileEither filePath
case eitherConf of
Left ex -> return . Left $
prettyPrintParseException ex
Right conf -> return $ Right conf
-- | Spin up the server.
launch :: FilePath -> IO ()
launch fp = do
eitherConf <- readConfig fp
case eitherConf of
Left err -> putStrLn err
Right conf -> launch' conf
launch' :: ServerConfig -> IO ()
launch' conf = do
let addr = buildAddr conf
nats <- trace addr $ connect addr
sid <- subscribe nats "build" Nothing $ \_ _ msg _ -> buildCb msg
loop nats
where
buildAddr :: ServerConfig -> String
buildAddr conf =
let nconf = natsConf conf
in "nats://" ++ (natsHost nconf) ++ ":" ++ (show $ natsPort nconf)
loop :: Nats -> IO ()
loop nats = do
str <- getLine
if str == "exit" then
exitWith ExitSuccess
else do
-- Allow manual testing for now
publish nats "build" $ BL.pack str
loop nats
buildCb :: BL.ByteString -> IO ()
buildCb msg = case eitherDecode msg of
Left err -> error $ show err -- FIXME: Remove this!
Right buildSpec -> runBuild buildSpec >> return () -- FIXME: What!!?? Why!?
| wayofthepie/cspawn | src/Lib.hs | bsd-3-clause | 2,119 | 0 | 14 | 562 | 609 | 307 | 302 | 62 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
module Retcon.Network.Server where
import Control.Applicative
import Control.Concurrent
import Control.Concurrent.Async
import Control.Error.Util ()
import qualified Control.Exception as E
import Control.Lens hiding (Context, coerce)
import Control.Monad
import Control.Monad.Catch
import Control.Monad.Error.Class
import Control.Monad.Reader
import Control.Monad.Trans.Except
import qualified Data.Aeson.Diff as D
import Data.Binary
import qualified Data.ByteString as BS hiding (unpack)
import qualified Data.ByteString.Char8 as BS (unpack)
import Data.ByteString.Lazy (fromStrict, toStrict)
import qualified Data.ByteString.Lazy as LBS
import Data.Coerce
import Data.Either
import qualified Data.List as L
import Data.List.NonEmpty hiding (filter, length, map)
import qualified Data.Map as M
import Data.Maybe
import Data.Monoid
import Data.String
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Traversable ()
import System.Log.Logger
import qualified System.Metrics as Ekg
import qualified System.Remote.Monitoring as Ekg
import System.ZMQ4
import Retcon.Configuration
import Retcon.DataSource (runDSMonad)
import qualified Retcon.DataSource as DS
import Retcon.Diff
import Retcon.Document
import Retcon.Identifier
import Retcon.Monad
import Retcon.Network.Ekg
import Retcon.Network.Protocol
import Retcon.Store
import Retcon.Store.PostgreSQL
type ErrorMsg = String
--------------------------------------------------------------------------------
-- * Server
data ServerState = ServerState
{ _serverContext :: Context -- ^ ZMQ context
, _serverSocket :: Socket Rep -- ^ ZMQ socket
, _serverConfig :: Configuration -- ^ retcond config
, _serverStore :: PGStore -- ^ Internal data store, shared between all server threads
, _serverEkgServer :: Ekg.Server -- ^ Ekg server
}
makeLenses ''ServerState
-- | Name of server component for logging.
logName :: String
logName = "Retcon.Server"
-- | Spawn a thread serving the retcon API and a number of worker threads
-- to process requests accepted by that server.
--
spawnServer :: Configuration -> Int -> IO ()
spawnServer cfg n = do
_ <- bracket start stop $ \state -> do
-- Spawn a server implementing the protocol and some workers
api <- spawnServerAPI state
peasants <- spawnServerWorkers state
-- Ensures workers die if the API server does.
mapM_ (link2 api) peasants
-- Wait for all of the API server or worker threads to finish.
mapM_ wait (api:peasants)
return ()
where
spawnServerAPI :: ServerState -> IO (Async ())
spawnServerAPI = async . flip runProtocol protocol
spawnServerWorkers :: ServerState -> IO [Async ()]
spawnServerWorkers state
= replicateM n (async $ worker (_serverStore state) cfg)
start :: IO ServerState
start = do
noticeM logName "Starting Server"
-- Setup ekg
ekgStore <- Ekg.newStore
initialiseMeters ekgStore cfg
ekgServer <- Ekg.forkServerWith ekgStore "localhost" 8888
let (zmq_conn, _, pg_conn) = configServer cfg
ctx <- context
sock <- socket ctx Rep
bind sock zmq_conn
db <- initBackend (PGOpts pg_conn)
return $ ServerState ctx sock cfg db ekgServer
stop :: ServerState -> IO ()
stop state = do
closeBackend $ state ^. serverStore
close $ state ^. serverSocket
term $ state ^. serverContext
killThread $ Ekg.serverThreadId $ state ^. serverEkgServer
noticeM logName "Stopped Server"
--------------------------------------------------------------------------------
-- * Protocol Implementation
-- | A monad which wraps up some state, some error handling, and some IO to
-- implement the server side of retcond.
newtype Protocol a = Proto
{ unProtocol :: ExceptT APIError (ReaderT ServerState IO) a }
deriving (Applicative, Functor, Monad, MonadError APIError,
MonadIO, MonadReader ServerState)
instance MonadThrow Protocol where
throwM = liftIO . E.throwIO
instance MonadCatch Protocol where
(Proto (ExceptT m)) `catch` f = Proto . ExceptT $ m `catch` (runExceptT . unProtocol . f)
-- | Execute a 'Protocol' action.
runProtocol :: ServerState -> Protocol a -> IO a
runProtocol s act = do
res <- flip runReaderT s . runExceptT . unProtocol $ act
case res of
Left e -> throwM e
Right a -> return a
-- | Server protocol handler.
protocol :: Protocol ()
protocol = loop
where
loop = do
sock <- _serverSocket <$> ask
-- Read a response from the client.
cmd <- liftIO $ receiveMulti sock
-- Decode and process it.
(status, resp) <- case cmd of
[hdr, req] -> catchAndInject . join $
dispatch <$> (toEnum <$> decodeStrict hdr)
<*> pure (fromStrict req)
_ -> throwError InvalidNumberOfMessageParts
-- Send the response to the client.
liftIO . sendMulti sock . fromList $ [encodeStrict status, resp]
-- Play it again, Sam.
loop
dispatch
:: SomeHeader
-> LBS.ByteString
-> Protocol (Bool, BS.ByteString)
dispatch (SomeHeader hdr) body =
(True,) <$> case hdr of
HeaderConflicted -> encodeStrict <$> listConflicts (decode body)
HeaderResolve -> encodeStrict <$> resolveConflict (decode body)
HeaderChange -> encodeStrict <$> notify (decode body)
InvalidHeader -> return . encodeStrict $ InvalidResponse
-- Catch exceptions and inject them into the monad as errors.
--
-- TODO: Chain together the catching and handling of difference Exception
-- types and return more specific errors, if available.
catchAndInject
:: Protocol (Bool, BS.ByteString)
-> Protocol (Bool, BS.ByteString)
catchAndInject act = catchError (catch act injectSomeException) reportAPIError
where
injectSomeException
:: (MonadIO m, MonadError APIError m)
=> SomeException
-> m a
injectSomeException e = do
liftIO . errorM logName . fromString $
"Intercepted error to forward to client: " <> show e
throwError UnknownServerError
-- Handle an error in executing operations by sending it back to the client.
reportAPIError
:: APIError
-> Protocol (Bool, BS.ByteString)
reportAPIError e = do
liftIO . errorM logName . fromString $
"Could not process message: " <> show e
return (False, toStrict . encode . fromEnum $ e)
-- | Process a request for unresolved conflicts.
listConflicts
:: RequestConflicted
-> Protocol ResponseConflicted
listConflicts RequestConflicted = do
liftIO $ infoM logName "Listing conflicts"
conflicts <- liftIO . lookupConflicts =<< view serverStore
return $ ResponseConflictedSerialised $ fmap mkRespItem conflicts
where mkRespItem ConflictResp{..}
= ResponseConflictedSerialisedItem
_conflictRawDoc
_conflictRawDiff
(coerce _conflictDiffID)
(coerce _conflictRawOps)
-- | Process and resolve a conflict.
resolveConflict
:: RequestResolve
-> Protocol ResponseResolve
resolveConflict (RequestResolve diffID opIDs) = do
store <- view serverStore
liftIO . infoM logName $ "Resolving conflict: " <> show diffID
new <- composeNewDiff store
liftIO $ addWork store (WorkApplyPatch diffID $ new ^. patchDiff)
return ResponseResolve
where
composeNewDiff store = do
things <- liftIO $ lookupDiffConflicts store opIDs
return $ Patch Unamed $ D.Patch $ map _ops things
-- | Notification of a change to be queued for processing.
notify
:: RequestChange
-> Protocol ResponseChange
notify (RequestChange nt) = do
let ent_name = nt ^. notificationEntity
src_name = nt ^. notificationSource
fid = nt ^. notificationForeignID
cfg <- _serverConfig <$> ask
store <- _serverStore <$> ask
liftIO . infoM logName . T.unpack $
"Received change notification for: " <> ename ent_name <> "." <>
sname src_name <> "/" <> fid
let m_ds = do
Entity{..} <- M.lookup ent_name (configEntities cfg)
M.lookup src_name entitySources
case m_ds of
Nothing -> do
liftIO . errorM logName . T.unpack $ "Unknown entity or source: "
<> ename ent_name <> "." <> sname src_name
throwError UnknownKeyError
Just _ ->
liftIO . addWork store . WorkNotify $ ForeignKey ent_name src_name fid
return ResponseChange
--------------------------------------------------------------------------------
-- * Asynchronous Server Workers
-- | Get a work item from the work queue, apply a constant backoff if there is
-- nothing in the queue.
--
-- This function must not
getWorkBackoff :: Store store => store -> IO (WorkItemID, WorkItem)
getWorkBackoff store = do
work <- getWork store
case work of
Nothing -> threadDelay 50000 >> getWorkBackoff store
Just x -> return x
-- | A worker for the retcond server.
-- These workers cannot die, they simply log any errors and keep going.
--
worker :: Store store => store -> Configuration -> IO ()
worker store cfg = go
where -- Get a work item from the queue, mark it as busy and try to complete it.
-- If all goes well, mark the work as finished when done, otherwise signal
-- it as free.
--
go = do
bracketOnError getIt ungetIt completeIt
go
getIt = liftIO $ getWorkBackoff store
ungetIt = ungetWork store . fst
completeIt (work_id, item) = do
case item of
WorkNotify fk -> do
liftIO . debugM logName $ "Processing a notifcation: " <> show fk
processNotification store cfg fk
WorkApplyPatch did a_diff -> do
liftIO . debugM logName $ "Processing a diff: " <> show did
processDiff store cfg did a_diff
completeWork store work_id
-- notifications
processNotification :: Store store => store -> Configuration -> ForeignKey -> IO ()
processNotification store cfg fk@(ForeignKey{..}) = do
let x = do e <- M.lookup fkEntity (configEntities cfg)
d <- M.lookup fkSource (entitySources e)
return (e, d)
case x of
Nothing -> liftIO . criticalM logName $ "Unknown key in workqueue: " <> show fk
Just (entity, source) -> do
ik <- liftIO $ lookupInternalKey store fk
doc <- runDSMonad $ DS.readDocument source fk
let allSources = M.elems (entitySources entity)
sources = L.delete source allSources
liftIO $ case (ik, doc) of
(Nothing, Left e) -> notifyProblem (RetconUnknown $ show e)
(Nothing, Right d) -> notifyCreate store sources fk d
(Just i, Left _) -> notifyDelete store sources i
(Just i, Right _) -> notifyUpdate store allSources i (entityPolicy entity)
-- | Creates a new internal document to reflect a new foreign change. Update
-- all given data sources of the change.
--
-- Caller is responsible for: ensuring the datasources exclude the one from
-- which the event originates.
--
notifyCreate :: Store store => store -> [DataSource] -> ForeignKey -> Document -> IO ()
notifyCreate store datasources fk@(ForeignKey{..}) doc@(Document{..}) = do
infoM logName $ "CREATE: " <> show fk
-- Create an internal key associated with the new document
ik <- createInternalKey store fkEntity
recordForeignKey store ik fk
-- Create an initial document
recordInitialDocument store ik doc
-- Update other sources in the entity
forM_ datasources (createDoc ik)
-- Update ekg
incCreates fkEntity
where createDoc ik ds = do
x <- runDSMonad
$ DS.createDocument ds
$ graftDoc doc ds
case x of
Left e -> errorM logName ("notifyCreate: " <> show e)
Right f -> recordForeignKey store ik f
graftDoc Document{..} DataSource{..}
= Document sourceEntity sourceName _documentContent
-- | Deletes internal document to reflect the foreign change. Update
-- all given data sources of the change.
--
-- Caller is responsible for: ensuring the datasources exclude the one from
-- which the event originates.
--
notifyDelete
:: Store store => store
-> [DataSource]
-> InternalKey
-> IO ()
notifyDelete store datasources ik = do
infoM logName $ "DELETE: " <> show ik
forM_ datasources deleteDoc
-- Update ekg
incDeletes $ ikEntity ik
where deleteDoc ds = do
f <- lookupForeignKey store (sourceName ds) ik
case f of
Nothing -> do
warningM logName $ "notifyDelete: unable to find foreign key for internal ID " <> show ik <> "."
Just fk -> do
-- Delete the document
hushBoth $ runDSMonad $ DS.deleteDocument ds fk
-- Delete known foreign key
void $ deleteForeignKey store fk
-- Delete internal bookkeeping
void $ deleteInitialDocument store ik
deleteInternalKey store ik
-- | Updates internal document to reflect the foreign change. Update
-- all given data sources of the change.
--
notifyUpdate
:: Store store => store
-> [DataSource]
-> InternalKey
-> MergePolicy
-> IO ()
notifyUpdate store datasources ik policy = do
infoM logName $ "UPDATE: " <> show ik
-- Fetch documents from all data sources.
docs <- mapM (getDocument store ik) datasources
let (_missing, valid) = partitionEithers docs
-- Load (or calculate) the initial document.
initial <- lookupInitialDocument store ik >>=
maybe (calculate valid) return
-- Extract and merge patches.
let (merged, rejects) = mergeAll policy $ map (diff policy initial) valid
if null rejects
then debugM logName $ "No rejected changes processing " <> show ik
else infoM logName $ "Rejected " <> show (length rejects) <> " changes in " <> show ik
if (null rejects) && (mempty == view patchDiff merged)
then debugM logName $ "Empty diff for " <> show ik <> ", skipping."
else do
-- Record changes in history.
did <- recordDiffs store ik (merged, rejects)
infoM logName $ "Recorded diff " <> show did <> " against " <> show ik
-- Update and save the documents.
let docs' = map (patch policy merged . either (const initial) id) docs
failures <- lefts <$> mapM (setDocument store ik) (L.zip datasources docs')
mapM_ (\e -> errorM logName $ "setDocument error: " <> e) failures
-- Update initial document.
let initial' = patch policy merged initial
recordInitialDocument store ik initial'
-- Update ekg
incUpdates $ ikEntity ik
where
calculate :: [Document] -> IO Document
calculate docs = do
infoM logName $ "No initial document for " <> show ik <> "."
return . either (const $ emptyDocument (ikEntity ik) "<initial>") id
$ calculateInitialDocument docs
-- | Logs a problem with the notification.
notifyProblem :: RetconError -> IO ()
notifyProblem = errorM logName . (<>) "notifyProblem: " . show
-- | Apply a 'Patch' to resolve the conflicts on a previous update.
--
-- TODO(thsutton) We need to check the diff hasn't already been resolved.
processDiff
:: (Store store, MonadIO m, Functor m)
=> store
-> Configuration
-> DiffID
-> D.Patch
-> m ()
processDiff store cfg diffID resolveDiff = do
res <- runExceptT act
case res of
Left e -> liftIO . errorM logName $ e
Right () -> return ()
where
act = do
liftIO . infoM logName $ "Resolving errors in diff " <> show diffID
conflict <- getConflict
let en = EntityName . T.decodeUtf8 $ conflict ^. diffEntity
ik = InternalKey en $ conflict ^. diffKey
resolvePatch = Patch Unamed resolveDiff
(policy, srcs) <- getSources en
-- 0. Load and update the initial document.
initial <- liftIO $ fromMaybe (emptyDocument en "<initial>")
<$> lookupInitialDocument store ik
let initial' = patch policy resolvePatch initial
-- 1. Apply the patch to all sources.
forM_ srcs $ \src -> liftIO $ do
doc <- either (const initial') id
<$> getDocument store ik src
res <- setDocument store ik (src, patch policy resolvePatch doc)
case res of
Right _ -> return ()
Left e -> errorM logName $ "setDocument error: " <> e
-- 2. Record the updated initial document.
liftIO $ recordInitialDocument store ik initial'
-- 3. Mark the conflicted patch as resolved.
let resolveKeys = getKeys (D.patchOperations resolveDiff)
conflictKeys = getKeys $ conflict ^. diffConflicts
liftIO $
if resolveKeys == conflictKeys
then do infoM logName $ "Mark as resolved diff " <> show diffID
resolveDiffs store diffID
else do infoM logName $ "Reduce diff " <> show diffID
reduceDiff store diffID resolveKeys
getKeys = L.nub . L.sort . map D.changePath
getConflict = do
conf <- liftIO $ lookupDiff store diffID
case conf of
Nothing -> throwError
$ "Cannot resolve diff "
<> show diffID
<> " because it doesn't exist."
Just v -> return v
getSources en = do
let things = do
e <- M.lookup en (configEntities cfg)
return (entityPolicy e, map snd . M.toList . entitySources $ e)
case things of
Nothing -> throwError
$ "Cannot resolve diff "
<> show diffID
<> " because there are no "
<> "sources for "
<> show en
<> "."
Just x -> return x
--------------------------------------------------------------------------------
-- * Data source functions
-- | Get the 'Document' corresponding to an 'InternalKey' from a 'DataSource'.
getDocument
:: Store store
=> store
-> InternalKey
-> DataSource
-> IO (Either ErrorMsg Document)
getDocument store ik ds = do
f <- lookupForeignKey store (sourceName ds) ik
case f of
Nothing -> return (Left $ "getDocument: No foreign key found for internal ID " <> show ik <> ".")
Just fk -> fmap (over _Left show) . DS.runDSMonad $ DS.readDocument ds fk
-- | Set the 'Document' in the given 'DataSource' corresponding to an 'InternalKey'.
setDocument
:: Store store
=> store
-> InternalKey
-> (DataSource, Document)
-> IO (Either ErrorMsg ForeignKey)
setDocument store ik (ds, doc) = do
f <- lookupForeignKey store (sourceName ds) ik
case f of
Nothing -> return (Left $ "setDocument: No foreign key found for internal ID " <> show ik <> ".")
Just fk -> fmap (over _Left show) . DS.runDSMonad $ DS.updateDocument ds fk doc
-- | Merge a sequence of 'Patch'es by applying a 'MergePolicy'.
--
mergeAll
:: MergePolicy
-> [Patch]
-> (Patch, [RejectedOp])
mergeAll pol =
foldr (\p1 (p2, r) -> (r <>) <$> merge pol p1 p2)
(emptyPatch, mempty)
extractDiff
:: Document
-> Document
-> Patch
extractDiff = diff ignoreConflicts
--------------------------------------------------------------------------------
-- * Utility functions
-- | Decode a serializable value from a strict 'ByteString'.
--
-- If the bytestring cannot be decoded, a 'DecodeError' is thrown.
decodeStrict
:: (MonadIO m, MonadError APIError m, Binary a)
=> BS.ByteString
-> m a
decodeStrict bs =
case decodeOrFail . fromStrict $ bs of
Left _ -> do
liftIO . warningM logName . BS.unpack $
"Decode failure for input: " <> bs
throwError DecodeError
Right (_, _, x) -> return x
-- | Encode a serialisable value into a strict 'ByteString'.
encodeStrict
:: (Binary a)
=> a
-> BS.ByteString
encodeStrict = toStrict . encode
-- | Silences both errors (via logging) and results.
--
hushBoth :: Show a => IO (Either a b) -> IO ()
hushBoth act = act >>= \x -> case x of
Left e -> errorM logName (show e)
Right _ -> return ()
| anchor/retcon | lib/Retcon/Network/Server.hs | bsd-3-clause | 21,317 | 0 | 19 | 6,072 | 5,110 | 2,549 | 2,561 | 420 | 6 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.