code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE MonoLocalBinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE PartialTypeSignatures #-}
module MonoLocalBinds where
monoLoc :: forall a. a -> ((a, String), (a, String))
monoLoc x = (g True , g 'v')
where
-- g :: b -> (a, String) -- #1
g :: b -> (a, _) -- #2
g y = (x, "foo")
-- For #2, we should infer the same type as in #1.
| urbanslug/ghc | testsuite/tests/partial-sigs/should_compile/MonoLocalBinds.hs | bsd-3-clause | 363 | 0 | 8 | 85 | 93 | 57 | 36 | 8 | 1 |
module ScratchPad where
import Data.List
import Formulas
import Lambda
import Maps
vartps :: ULT -> TCtx
vartps t =
let (_,t') = unlambdas t in
let (_,_,g,_) = synth t' in
g
termsig :: ULT -> [Int]
termsig t =
map length $ map (\(x,tp) -> linearizeType tp) (vartps t)
-- verified for 1<=n<=8
test n =
let ts = allcNLTex n in
let ms = map snd (genROM_tutte' n)
in
(sort $ map sort $ map termsig ts) ==
(sort $ map ((sort . map length) . verticesOM) ms)
-- verified for 1<=n<=9
test2 n =
let ts = allcNPTnb True n in
let a000139' n = if n == 0 then 1 else
2*fact (3*n) `div` (fact (2*n+1) * fact(n+1)) in
let nsts =
[t | t <- ts, flip all (t:subnormal_subnormals t) $ \t' ->
let (xs,u) = unlambdas t' in
length xs <= 1 + length (snd (unapps u []))] in
toInteger (length nsts) == a000139' (toInteger (n-1))
| noamz/linlam-gos | src/ScratchPad.hs | mit | 888 | 0 | 25 | 246 | 467 | 236 | 231 | 27 | 2 |
{-# LANGUAGE TupleSections #-}
module Examples.Simple where
import Circuit
import Circuit.Builder
import Circuit.Utils
import Control.Monad
export :: Gate g => [(String, [IO (String, Circuit g)])]
export = [ ("simple", [("simple",) <$> return simple])
, ("sub1", [("sub1",) <$> return (subCirc 1)])
, ("sub10", [("sub10",) <$> return (subCirc 10)])
, ("and1", [("and1",) <$> return (andCirc 1)])
, ("and10", [("and10",) <$> return (andCirc 10)])
, ("and100", [("and100",) <$> return (andCirc 100)])
, ("and1000", [("and1000",) <$> return (andCirc 1000)])
, ("xor1", [("xor1",) <$> return (xorCirc 1)])
, ("xor10", [("xor10",) <$> return (xorCirc 10)])
, ("xor100", [("xor100",) <$> return (xorCirc 100)])
, ("xor1000", [("xor1000",) <$> return (xorCirc 1000)])
, ("xorand2", [("xorand2",) <$> return (xorAndCirc 2)])
, ("xorand10", [("xorand10",) <$> return (xorAndCirc 10)])
, ("xorand50", [("xorand50",) <$> return (xorAndCirc 50)])
, ("xorand100", [("xorand100",) <$> return (xorAndCirc 100)])
, ("xorand1000", [("xorand1000",) <$> return (xorAndCirc 1000)])
, ("simple2", [("simple2",) <$> return simple2])
, ("sym3", [("sym3",) <$> return simpleSym3])
, ("sym1", [("sym1",) <$> return (simpleSym 1)])
, ("sym2", [("sym2",) <$> return (simpleSym 2)])
, ("sym10", [("sym10",) <$> return (simpleSym 10)])
, ("sym100", [("sym100",) <$> return (simpleSym 100)])
, ("and-secret", [("and-secret",) <$> return andSecret])
, ("and-const", [("and-const",) <$> return andConst])
, ("xor-secret", [("xor-secret",) <$> return xorSecret])
, ("xor-const", [("xor-const",) <$> return xorConst])
, ("andtree", [ ("andtree8",) <$> return (andTree 8)
, ("andtree16",) <$> return (andTree 16)
, ("andtree32",) <$> return (andTree 32)
, ("andtree64",) <$> return (andTree 64)
, ("andtree128",) <$> return (andTree 128)
, ("andtree256",) <$> return (andTree 256)
, ("andtree512",) <$> return (andTree 512)
])
]
xorCirc :: Gate g => Int -> Circuit g
xorCirc n = buildCircuit (symbol (n+1) >>= foldM1 circXor >>= output)
andCirc :: Gate g => Int -> Circuit g
andCirc n = buildCircuit (symbol (n+1) >>= foldM1 circMul >>= output)
subCirc :: Gate g => Int -> Circuit g
subCirc n = buildCircuit (symbol (n+1) >>= foldM1 circSub >>= output)
andSecret :: Gate g => Circuit g
andSecret = buildCircuit $ do
x <- input
y <- secret 1
output =<< circAnd x y
andConst :: Gate g => Circuit g
andConst = buildCircuit $ do
x <- input
y <- constant 1
output =<< circMul x y
xorSecret :: Gate g => Circuit g
xorSecret = buildCircuit $ do
x <- input
y <- secret 1
output =<< circXor x y
xorConst :: Gate g => Circuit g
xorConst = buildCircuit $ do
x <- input
y <- constant 1
output =<< circXor x y
xorAndCirc :: Gate g => Int -> Circuit g
xorAndCirc n = buildCircuit $ do
cs <- safeChunksOf (div n 2) <$> symbol n
output =<< circProd =<< zipWithM circXor (cs !! 0) (cs !! 1)
simple :: Gate g => Circuit g
simple = buildCircuit $ do
x1 <- symbol 2
x2 <- sigma 3
ys <- secrets [0,1,0,1,0]
one <- constant 1
w <- circProd =<< zipWithM circAdd (x1++x2) ys
w' <- circNot w
z <- circMul w' w
z <- circMul z w
output z
simple2 :: Gate g => Circuit g
simple2 = buildCircuit $ do
cs <- safeChunksOf 4 <$> symbol 8
d <- zipWithM circXor (cs !! 0) (cs !! 1)
output =<< circProd d
simpleSym :: Gate g => Int -> Circuit g
simpleSym n = buildCircuit $ do
xs <- symbol n
ys <- symbol n
zs <- zipWithM circXor xs ys
output =<< circProd zs
simpleSym3 :: Gate g => Circuit g
simpleSym3 = buildCircuit $ do
let n = 10
xs <- symbol n
ys <- symbol n
ws <- symbol n
zs <- zipWithM circXor ws =<< zipWithM circXor xs ys
output =<< circProd zs
andTree :: Gate g => Int -> Circuit g
andTree n = buildCircuit $ do
xs <- symbol n
ys <- symbol n
zs <- symbol n
ws <- zipWithM circXor xs =<< zipWithM circXor ys zs
output =<< circProd ws
| spaceships/circuit-synthesis | src/Examples/Simple.hs | mit | 4,511 | 0 | 12 | 1,379 | 1,877 | 978 | 899 | 107 | 1 |
module Do1 where
x = do x; x
| Lemmih/haskell-tc | tests/Do1.hs | mit | 30 | 0 | 6 | 9 | 16 | 9 | 7 | 2 | 1 |
{-# LANGUAGE BangPatterns, DataKinds, DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Hadoop.Protos.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto (RefreshQueuesRequestProto(..)) where
import Prelude ((+), (/))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data RefreshQueuesRequestProto = RefreshQueuesRequestProto{}
deriving (Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data)
instance P'.Mergeable RefreshQueuesRequestProto where
mergeAppend RefreshQueuesRequestProto RefreshQueuesRequestProto = RefreshQueuesRequestProto
instance P'.Default RefreshQueuesRequestProto where
defaultValue = RefreshQueuesRequestProto
instance P'.Wire RefreshQueuesRequestProto where
wireSize ft' self'@(RefreshQueuesRequestProto)
= case ft' of
10 -> calc'Size
11 -> P'.prependMessageSize calc'Size
_ -> P'.wireSizeErr ft' self'
where
calc'Size = 0
wirePut ft' self'@(RefreshQueuesRequestProto)
= case ft' of
10 -> put'Fields
11 -> do
P'.putSize (P'.wireSize 10 self')
put'Fields
_ -> P'.wirePutErr ft' self'
where
put'Fields
= do
Prelude'.return ()
wireGet ft'
= case ft' of
10 -> P'.getBareMessageWith update'Self
11 -> P'.getMessageWith update'Self
_ -> P'.wireGetErr ft'
where
update'Self wire'Tag old'Self
= case wire'Tag of
_ -> let (field'Number, wire'Type) = P'.splitWireTag wire'Tag in P'.unknown field'Number wire'Type old'Self
instance P'.MessageAPI msg' (msg' -> RefreshQueuesRequestProto) RefreshQueuesRequestProto where
getVal m' f' = f' m'
instance P'.GPB RefreshQueuesRequestProto
instance P'.ReflectDescriptor RefreshQueuesRequestProto where
getMessageInfo _ = P'.GetMessageInfo (P'.fromDistinctAscList []) (P'.fromDistinctAscList [])
reflectDescriptorInfo _
= Prelude'.read
"DescriptorInfo {descName = ProtoName {protobufName = FIName \".hadoop.yarn.RefreshQueuesRequestProto\", haskellPrefix = [MName \"Hadoop\",MName \"Protos\"], parentModule = [MName \"YarnServerResourceManagerServiceProtos\"], baseName = MName \"RefreshQueuesRequestProto\"}, descFilePath = [\"Hadoop\",\"Protos\",\"YarnServerResourceManagerServiceProtos\",\"RefreshQueuesRequestProto.hs\"], isGroup = False, fields = fromList [], descOneofs = fromList [], keys = fromList [], extRanges = [], knownKeys = fromList [], storeUnknown = False, lazyFields = False, makeLenses = False}"
instance P'.TextType RefreshQueuesRequestProto where
tellT = P'.tellSubMessage
getT = P'.getSubMessage
instance P'.TextMsg RefreshQueuesRequestProto where
textPut msg = Prelude'.return ()
textGet = Prelude'.return P'.defaultValue | alexbiehl/hoop | hadoop-protos/src/Hadoop/Protos/YarnServerResourceManagerServiceProtos/RefreshQueuesRequestProto.hs | mit | 2,983 | 1 | 16 | 535 | 554 | 291 | 263 | 53 | 0 |
module XMonadConfig
(
Colors(..),
colors
) where
data Colors = Colors
{
normal :: String,
active :: String,
inactive :: String,
urgent :: String,
highlight :: String,
background :: String
}
colors = Colors { normal = "#CCCCCC", active = "#45C0F5", inactive = "#CEFFAC", urgent = "#FF0000", highlight = "#FFFFCC", background = "#202020" } | justinhoward/dotfiles | modules/xmonad/installed-config/lib/XMonadConfig.hs | mit | 355 | 20 | 8 | 71 | 125 | 78 | 47 | 12 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE DeriveGeneric #-}
module Primary where
import Prelude hiding (Word)
import GHC.Generics (Generic)
import Data.Serialize
import Data.Text (Text)
import Data.Text.Encoding (encodeUtf8, decodeUtf8)
instance Serialize Text where
put = put . encodeUtf8
get = decodeUtf8 <$> get
data Language
= Greek
| Hebrew
| Latin
deriving (Show, Generic)
instance Serialize Language
data Division = Division
{ divisionBook :: Maybe Integer
, divisionChapter :: Maybe Integer
, divisionVerse :: Maybe Integer
, divisionSection :: Maybe Integer
, divisionLine :: Maybe Integer
}
deriving (Show, Generic)
instance Serialize Division
data Milestone
= MilestoneParagraph
| MilestoneDivision Division
| MilestoneCard Integer
deriving (Show, Generic)
instance Serialize Milestone
data Word = Word
{ wordPrefix :: Text
, wordSurface :: Text
, wordSuffix :: Text
}
deriving (Show, Generic)
instance Serialize Word
data Content
= ContentMilestone Milestone
| ContentWord Word
deriving (Show, Generic)
instance Serialize Content
data Source = Source
{ sourceId :: Text
, sourceTitle :: Text
, sourceAuthor :: Maybe Text
, sourceLicense :: [Text]
, sourceContents :: [Content]
}
deriving (Show, Generic)
instance Serialize Source
data Group = Group
{ groupId :: Text
, groupLanguage :: Language
, groupTitle :: Text
, groupDescription :: [Text]
, groupSources :: [Source]
}
deriving (Show, Generic)
instance Serialize Group
| ancientlanguage/haskell-analysis | primary-type/src/Primary.hs | mit | 1,533 | 0 | 9 | 295 | 424 | 244 | 180 | 58 | 0 |
{-# LANGUAGE CPP #-}
module GHCJS.DOM.HTMLTrackElement (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.HTMLTrackElement
#else
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.HTMLTrackElement
#else
#endif
| plow-technologies/ghcjs-dom | src/GHCJS/DOM/HTMLTrackElement.hs | mit | 361 | 0 | 5 | 33 | 33 | 26 | 7 | 4 | 0 |
infixr 5 :::
data CList a = CNil
| a ::: (CList a)
deriving (Show)
toList CNil = []
toList (x ::: rs) = x : toList rs
fromList [] = CNil
fromList (x:rs) = x ::: fromList rs
| Kroisse/haskell_practices | oldies/customlist.hs | mit | 192 | 0 | 8 | 58 | 97 | 51 | 46 | 8 | 1 |
main = do
print $ "abcde"
print $ ['a', 'b', 'c', 'd', 'e']
print $ ['a' .. 'e']
print $ 'a' : "bcde"
print $ 'a' : 'b' : "cde"
print $ "abc" ++ "de"
print $ "abcde" !! 3
| shigemk2/haskell_abc | string.hs | mit | 185 | 0 | 9 | 54 | 96 | 48 | 48 | 8 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.AllVideoCapabilities
(js_getSourceType, getSourceType, js_getSourceId, getSourceId,
js_getWidth, getWidth, js_getHeight, getHeight, js_getFrameRate,
getFrameRate, js_getAspectRatio, getAspectRatio, js_getFacingMode,
getFacingMode, AllVideoCapabilities, castToAllVideoCapabilities,
gTypeAllVideoCapabilities)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"sourceType\"]"
js_getSourceType :: AllVideoCapabilities -> IO JSVal
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AllVideoCapabilities.sourceType Mozilla AllVideoCapabilities.sourceType documentation>
getSourceType ::
(MonadIO m, FromJSString result) =>
AllVideoCapabilities -> m [result]
getSourceType self
= liftIO ((js_getSourceType (self)) >>= fromJSValUnchecked)
foreign import javascript unsafe "$1[\"sourceId\"]" js_getSourceId
:: AllVideoCapabilities -> IO JSVal
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AllVideoCapabilities.sourceId Mozilla AllVideoCapabilities.sourceId documentation>
getSourceId ::
(MonadIO m, FromJSString result) =>
AllVideoCapabilities -> m [result]
getSourceId self
= liftIO ((js_getSourceId (self)) >>= fromJSValUnchecked)
foreign import javascript unsafe "$1[\"width\"]" js_getWidth ::
AllVideoCapabilities -> IO (Nullable CapabilityRange)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AllVideoCapabilities.width Mozilla AllVideoCapabilities.width documentation>
getWidth ::
(MonadIO m) => AllVideoCapabilities -> m (Maybe CapabilityRange)
getWidth self = liftIO (nullableToMaybe <$> (js_getWidth (self)))
foreign import javascript unsafe "$1[\"height\"]" js_getHeight ::
AllVideoCapabilities -> IO (Nullable CapabilityRange)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AllVideoCapabilities.height Mozilla AllVideoCapabilities.height documentation>
getHeight ::
(MonadIO m) => AllVideoCapabilities -> m (Maybe CapabilityRange)
getHeight self = liftIO (nullableToMaybe <$> (js_getHeight (self)))
foreign import javascript unsafe "$1[\"frameRate\"]"
js_getFrameRate ::
AllVideoCapabilities -> IO (Nullable CapabilityRange)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AllVideoCapabilities.frameRate Mozilla AllVideoCapabilities.frameRate documentation>
getFrameRate ::
(MonadIO m) => AllVideoCapabilities -> m (Maybe CapabilityRange)
getFrameRate self
= liftIO (nullableToMaybe <$> (js_getFrameRate (self)))
foreign import javascript unsafe "$1[\"aspectRatio\"]"
js_getAspectRatio ::
AllVideoCapabilities -> IO (Nullable CapabilityRange)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AllVideoCapabilities.aspectRatio Mozilla AllVideoCapabilities.aspectRatio documentation>
getAspectRatio ::
(MonadIO m) => AllVideoCapabilities -> m (Maybe CapabilityRange)
getAspectRatio self
= liftIO (nullableToMaybe <$> (js_getAspectRatio (self)))
foreign import javascript unsafe "$1[\"facingMode\"]"
js_getFacingMode :: AllVideoCapabilities -> IO JSVal
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AllVideoCapabilities.facingMode Mozilla AllVideoCapabilities.facingMode documentation>
getFacingMode ::
(MonadIO m, FromJSString result) =>
AllVideoCapabilities -> m [result]
getFacingMode self
= liftIO ((js_getFacingMode (self)) >>= fromJSValUnchecked) | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/AllVideoCapabilities.hs | mit | 4,340 | 42 | 10 | 634 | 896 | 515 | 381 | 66 | 1 |
{-|
Module : Jupyter.Install.Internal
Description : Utilities for installing Jupyter kernels (internal implementation).
Copyright : (c) Andrew Gibiansky, 2016
License : MIT
Maintainer : [email protected]
Stability : stable
Portability : POSIX
This module exposes the internal implementation for "Jupyter.Install".
For user-facing documentation, please check out "Jupyter.Install" instead.
-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
module Jupyter.Install.Internal where
-- Imports from 'base'
import Control.Exception (Exception, IOException, catch, throwIO)
import Control.Monad (void, unless, when, foldM)
import Data.Maybe (isJust)
import System.Environment (getExecutablePath)
import System.IO (withFile, IOMode(..))
import Text.Read (readMaybe)
-- Imports from 'directory'
import System.Directory (findExecutable, getTemporaryDirectory, removeDirectoryRecursive,
createDirectoryIfMissing, copyFile, doesDirectoryExist,
canonicalizePath, doesFileExist)
-- Imports from 'process'
import System.Process (readProcess)
-- Imports from 'unordered-containers'
import qualified Data.HashMap.Lazy as HashMap
--
-- Imports from 'containers'
import Data.Map (Map)
import qualified Data.Map as Map
-- Imports from 'aeson'
import Data.Aeson ((.=), object, encode, eitherDecode, FromJSON(..), Value(..), (.:))
import Data.Aeson.Types (Parser)
-- Imports from 'bytestring'
import qualified Data.ByteString.Lazy as LBS
import qualified Data.ByteString.Lazy.Char8 as CBS
-- Imports from 'text'
import Data.Text (Text)
import qualified Data.Text as T
-- | A /kernelspec/ is a description of a kernel which tells the Jupyter command-line application
-- how to install the kernel and tells the frontends how to invoke the kernel (command line flags,
-- environment, etc).
--
-- More documentation about kernelspecs is located in the
-- <http://jupyter-client.readthedocs.io/en/latest/kernels.html#kernelspecs official documentation>.
data Kernelspec =
Kernelspec
{ kernelspecDisplayName :: Text -- ^ Name for the kernel to be shown in frontends, e.g.
-- \"Haskell\".
, kernelspecLanguage :: Text -- ^ Language name for the kernel, used to refer to this kernel
-- (in command-line arguments, URLs, etc), e.g. "haskell".
, kernelspecCommand :: FilePath -> FilePath -> [String]
-- ^ How to invoke the kernel. Given the path to the currently running executable
-- and connection file, this function should return the full command to invoke the
-- kernel. For example:
--
-- > \exe connectionFile -> [exe, "kernel", "--debug", "--connection-file", connectionFile]
, kernelspecJsFile :: Maybe FilePath -- ^ (optional) path to a Javascript file (kernel.js)
-- to provide to the Jupyter notebook.
-- This file is loaded upon notebook startup.
, kernelspecLogoFile :: Maybe FilePath -- ^ (optional) path to a 64x64 PNG file to display
-- as the kernel logo in the notebook.
, kernelspecEnv :: Map Text Text -- ^ Additional environment variables to set when invoking
-- the kernel. If no additional environment variables are
-- required, pass @'Data.Map.fromList' []@ or
-- 'Data.Monoid.mempty'.
}
-- | Whether the installation was successful.
data InstallResult = InstallSuccessful -- ^ Kernelspec installation was successful.
| InstallFailed Text -- ^ Kernelspec installation failed, with the reason for failure provided.
deriving (Eq, Ord, Show)
-- | Whether to install the kernel globally or just for the current user.
--
-- This corresponds to the @--user@ flag for @jupyter kernelspec install@.
data InstallUser = InstallLocal -- ^ Install this kernel just for this user.
| InstallGlobal -- ^ Install this kernel globally.
deriving (Eq, Ord, Show)
-- | An exception type for expected exceptions whenever the @jupyter kernelspec@ command is used.
newtype JupyterKernelspecException = JupyterKernelspecException Text
deriving (Eq, Ord, Show)
-- | 'JupyterKernelspecException's can be thrown when an expected failure occurs during @jupyter kernelspec@
-- command invocation.
instance Exception JupyterKernelspecException
-- | Version of Jupyter currently running, detected by running @jupyter --version@.
--
-- When a version number is not present it is assumed to be zero, so 4.1 equivalent to 4.1.0.
data JupyterVersion =
JupyterVersion
{ versionMajor :: Int -- ^ Major version number.
, versionMinor :: Int -- ^ Minor version number.
, versionPatch :: Int -- ^ Patch version number.
}
deriving (Eq, Ord, Show)
-- | Convert a 'JupyterVersion' to its original displayed form.
--
-- >>> showVersion (JupyterVersion 4 1 1)
-- "4.1.1"
showVersion :: JupyterVersion -> String
showVersion (JupyterVersion major minor patch) =
concat [show major, ".", show minor, ".", show patch]
-- | Install a 'Kernelspec' using @jupyter kernelspec install@.
--
-- This function expects the @jupyter@ command to be on the user's PATH, and will fail if
-- the @jupyter@ command is either unavailable or is a version incompatible with this library.
--
-- More documentation about kernelspecs is located in the
-- <http://jupyter-client.readthedocs.io/en/latest/kernels.html#kernelspecs Jupyter documentation>
-- and by running @jupyter kernelspec install --help@.
installKernel :: InstallUser -- ^ Whether the kernel should be installed for only the current user (with @--user@) or globally
-> Kernelspec -- ^ The kernelspec to install
-> IO InstallResult -- ^ Installation result, potentially with a friendly error message
installKernel installUser kernelspec = tryInstall `catch` handleInstallFailure
where
tryInstall :: IO InstallResult
tryInstall = do
jupyterPath <- which "jupyter"
verifyJupyterCommand jupyterPath
installKernelspec installUser jupyterPath kernelspec
return InstallSuccessful
handleInstallFailure :: JupyterKernelspecException -> IO InstallResult
handleInstallFailure (JupyterKernelspecException message) = return $ InstallFailed message
-- | Throw a 'JupyterKernelspecException' with a given error message.
installFailed :: String -> IO a
installFailed = throwIO . JupyterKernelspecException . T.pack
-- | Determine the absolute path to an executable on the PATH.
--
-- Throws a 'JupyterKernelspecException' if the the executable cannot be found.
which :: FilePath -> IO FilePath
which cmd = do
mPath <- findExecutable cmd
case mPath of
Just path -> canonicalizePath path
Nothing ->
installFailed $ "Could not find '" ++
cmd ++
"' command on system PATH; please install it."
-- | Verify that a proper version of Jupyter is installed.
--
-- Throws a 'JupyterKernelspecException' if @jupyter@ is not present, is an incompatible version, or
-- otherwise cannot be used with this library.
verifyJupyterCommand :: FilePath -> IO ()
verifyJupyterCommand jupyterPath = do
versionInfo <- runJupyterCommand jupyterPath ["--version"]
case parseVersion versionInfo of
Nothing -> installFailed $ "Could not parse output of 'jupyter --version': " ++ versionInfo
Just jupyterVersion ->
unless (jupyterVersionSupported jupyterVersion) $
installFailed $
"Invalid Jupyter version: Jupyter version 3.0 or higher required, found "
++ showVersion jupyterVersion
-- | Run a @jupyter@ subcommand with no standard input.
--
-- Throws a 'JupyterKernelspecException' if the command cannot be run or returns a non-zero exit code.
runJupyterCommand :: FilePath -> [String] -> IO String
runJupyterCommand jupyterPath args = readProcess jupyterPath args "" `catch` handler
where
handler :: IOException -> IO String
handler _ =
installFailed $
concat
[ "Could not run '"
, jupyterPath
, " "
, unwords args
, "'. "
, "Please make sure Jupyter is installed and functional."
]
-- | Is this Jupyter version supported?
jupyterVersionSupported :: JupyterVersion -> Bool
jupyterVersionSupported JupyterVersion{..} = versionMajor >= 3
-- | Given a directory, populate it with all necessary files to run @jupyter kernelspec install@.
--
-- Currently created files include:
-- * @kernel.js@: (optional) Javascript to include in the notebook frontend.
-- * @logo-64x64.png@: (optional) Small logo PNG to include in the notebook frontend UI.
-- * @kernel.json@: (required) JSON file containing kernel invocation command and other metadata.
--
-- The passed in directory is created and should not exist; if it already exists, it will be
-- deleted.
prepareKernelspecDirectory :: Kernelspec -> FilePath -> IO ()
prepareKernelspecDirectory kernelspec dir = do
-- Make sure the directory doesn't already exist. If we didn't delete the directory, then later
-- kernelspec installs may inherit files created by previous kernelspec installs.
exists <- doesDirectoryExist dir
when exists $ removeDirectoryRecursive dir
createDirectoryIfMissing True dir
copyKernelspecFiles kernelspec
generateKernelJSON kernelspec
where
-- Copy files indicated by the Kernelspec data type into the directory.
copyKernelspecFiles :: Kernelspec -> IO ()
copyKernelspecFiles Kernelspec { .. } = do
whenJust kernelspecJsFile $ \file -> copyFile file $ dir ++ "/kernel.js"
whenJust kernelspecLogoFile $ \file -> copyFile file $ dir ++ "/logo-64x64.png"
-- Generate the kernel.json data structure from the Kernelspec datatype.
generateKernelJSON :: Kernelspec -> IO ()
generateKernelJSON Kernelspec { .. } = do
exePath <- getExecutablePath
withFile (dir ++ "/kernel.json") WriteMode $
flip LBS.hPutStr $
encode $
object
[ "argv" .= kernelspecCommand exePath "{connection_file}"
, "display_name" .= kernelspecDisplayName
, "language" .= kernelspecLanguage
, "env" .= kernelspecEnv
]
whenJust :: Maybe a -> (a -> IO ()) -> IO ()
whenJust Nothing _ = return ()
whenJust (Just a) f = f a
-- | Install a kernelspec using @jupyter kernelspec install@.
--
-- Throws a 'JupyterKernelspecException' on failure.
installKernelspec :: InstallUser -- ^ Whether this kernel should be installed with or without @--user@
-> FilePath -- ^ Path to the @jupyter@ executable
-> Kernelspec -- ^ Kernelspec to install
-> IO ()
installKernelspec installUser jupyterPath kernelspec = do
tempDir <- getTemporaryDirectory
let kernelspecDir = tempDir ++ "/" ++ T.unpack (kernelspecLanguage kernelspec)
prepareKernelspecDirectory kernelspec kernelspecDir
let userFlag =
case installUser of
InstallLocal -> ["--user"]
InstallGlobal -> []
cmd = "kernelspec" : "install" : kernelspecDir : "--replace" : userFlag
void $ runJupyterCommand jupyterPath cmd
-- | Parse a Jupyter version string into a list of integers.
--
-- >>> parseVersion "4.1.3\n"
-- Just (JupyterVersion 4 1 3)
--
-- >>> parseVersion "XYZ"
-- Nothing
--
-- If minor or patch versions are unavailable, they are assumed to be zero:
--
-- >>> parseVersion "4.1"
-- Just (JupyterVersion 4 1 0)
--
-- >>> parseVersion "4"
-- Just (JupyterVersion 4 0 0)
parseVersion :: String -> Maybe JupyterVersion
parseVersion versionStr =
let versions = map (readMaybe . T.unpack) $ T.splitOn "." $ T.pack versionStr
parsed = all isJust versions
in if parsed
then case versions of
[x, y, z] -> JupyterVersion <$> x <*> y <*> z
[x, y] -> JupyterVersion <$> x <*> y <*> pure 0
[x] -> JupyterVersion <$> x <*> pure 0 <*> pure 0
_ -> Nothing
else Nothing
-- | Find the kernelspec for a kernel with a given language name.
--
-- If no such kernel exists, then 'Nothing' is returned. If an error occurs
-- while searching for Jupyter kernels, a 'JupyterKernelspecException' is thrown.
findKernel :: Text -> IO (Maybe Kernelspec)
findKernel language = do
Kernelspecs kernelspecs <- findKernelsInternal
maybe (return Nothing)
(fmap Just . checkKernelspecFiles)
(Map.lookup language kernelspecs)
-- | Find all kernelspecs that the Jupyter installation is aware of,
-- using the @jupyter kernelspec list@ command.
--
-- If an error occurs while searching for Jupyter kernels, a 'JupyterKernelspecException' is thrown.
findKernels :: IO [Kernelspec]
findKernels = do
Kernelspecs kernelspecs <- findKernelsInternal
mapM checkKernelspecFiles $ Map.elems kernelspecs
-- | Get all the installed kernelspecs using @jupyter kernelspec list --json@.
--
-- These kernelspecs must be passed through 'checkKernelspecFiles' before being returned to the
-- user.
findKernelsInternal :: IO Kernelspecs
findKernelsInternal = do
jupyterPath <- which "jupyter"
specsE <- eitherDecode . CBS.pack <$> runJupyterCommand jupyterPath
["kernelspec", "list", "--json"]
case specsE of
Left err -> throwIO $ JupyterKernelspecException $ T.pack err
Right specs -> return specs
-- | Kernelspecs can refer to files such as kernel.js and logo-64x64.png. Check whether the
-- kernelspec refers to that file; if it does, check that the file exists. If the file doesn't
-- exist, then remove it from the kernelspec.
checkKernelspecFiles :: Kernelspec -> IO Kernelspec
checkKernelspecFiles spec = do
let jsFile = kernelspecJsFile spec
logoFile = kernelspecLogoFile spec
kernelspecJsFile' <- checkFile jsFile
kernelspecLogoFile' <- checkFile logoFile
return spec { kernelspecJsFile = kernelspecJsFile', kernelspecLogoFile = kernelspecLogoFile' }
where
checkFile :: Maybe FilePath -> IO (Maybe FilePath)
checkFile Nothing = return Nothing
checkFile (Just file) = do
exists <- doesFileExist file
return $ if exists
then Just file
else Nothing
-- | A list of kernelspecs, obtained by running @jupyter kernelspec list --json@.
--
-- The list contains the name of the kernelspec mapped to the kernelspec itself.
newtype Kernelspecs = Kernelspecs (Map Text Kernelspec)
-- | Parse the output of @jupyter kernelspec list --json@.
instance FromJSON Kernelspecs where
parseJSON (Object outer) = do
inner <- outer .: "kernelspecs"
case inner of
Object innerObj ->
let items = HashMap.toList innerObj
in Kernelspecs <$> foldM accumKernelspecs mempty items
_ -> fail "Expecting object inside 'kernelspecs' key"
parseJSON _ = fail "Expecting object with 'kernelspecs' key"
-- | Collect all kernelspecs from @jupyter kernelspec list --json@ into a single map.
accumKernelspecs :: Map Text Kernelspec -- ^ Previously seen kernelspecs
-> (Text, Value) -- ^ Kernelspec name and JSON value for it
-> Parser (Map Text Kernelspec) -- ^ Map with old kernelspecs and parsed new one
accumKernelspecs prev (name, val) = do
kernelspec <- parseKernelspec val
return $ Map.insert name kernelspec prev
-- | Parse a JSON 'Value' into a 'Kernelspec'.
parseKernelspec :: Value -> Parser Kernelspec
parseKernelspec v =
case v of
Object o -> do
dir <- o .: "resource_dir"
spec <- o .: "spec"
Kernelspec <$> spec .: "display_name"
<*> spec .: "language"
<*> (createCommand <$> spec .: "argv")
<*> pure (Just $ dir ++ "/kernel.js")
<*> pure (Just $ dir ++ "/logo-64x64.png")
<*> spec .: "env"
_ -> fail "Expecting object for kernelspec"
where
createCommand :: [Text] -> FilePath -> FilePath -> [String]
createCommand argv _ connFile =
flip map argv $ \val ->
case val of
"{connection_file}" -> connFile
_ -> T.unpack val
| gibiansky/jupyter-haskell | src/Jupyter/Install/Internal.hs | mit | 16,511 | 0 | 18 | 3,946 | 2,450 | 1,319 | 1,131 | 212 | 5 |
module Site.Types (
Channels,
Streams
) where
-- Channels
import qualified Data.Map as Map
import Control.Concurrent.STM.TChan
import Control.Concurrent.STM.TVar
-- Streams
import qualified System.IO.Streams as S
import qualified Data.ByteString.Char8 as C
-- the websocket server channels
type Channels = TVar (Map.Map String (TChan String, Integer))
-- the pigments server streams
type Streams = (S.OutputStream C.ByteString, S.InputStream C.ByteString)
| BlackBears/alanduncan.me | Pandoc/Types.hs | mit | 464 | 0 | 9 | 64 | 112 | 72 | 40 | 10 | 0 |
module Q22 where
range :: Int -> Int -> [Int]
range x y = case (rangeTest x y) of Left x -> x
Right y -> error y
rangeTest :: Int -> Int -> Either [Int] String
rangeTest x y =
let
check :: Int -> Int -> Either [Int] String
check x y
| y < x = Right "y < x is not supported"
| otherwise = Left (rangeTest' x y)
rangeTest' :: Int -> Int -> [Int]
rangeTest' x y = if (x == y) then [x] else x:(rangeTest' (x + 1) y)
in
check x y | cshung/MiscLab | Haskell99/q22.hs | mit | 504 | 0 | 14 | 179 | 232 | 117 | 115 | 14 | 2 |
{-# LANGUAGE MagicHash, RecursiveDo #-}
module History(
truncateHistory, History,
{- Single-stop operations on snapshots. -}
runThread, setRegister, allocateMemory, setMemory, setMemoryProtection,
setTsc,
{- Queries on histories -}
threadAtAccess, threadState, replayState, fetchMemory, vgIntermediate,
nextThread, getRegisters, getRipAtAccess,
{- Trace operations -}
traceTo, controlTraceTo, traceToEvent,
{- Other stuff -}
initialHistory, destroyWorkerCache
) where
import Control.Monad.Error
import Data.Word
import Data.IORef
import System.IO.Unsafe
import Network.Socket
import Data.List
import System.Posix.Signals
import IO
import Numeric
import Data.Bits
import GHC.Conc hiding (ThreadId)
import GHC.Base hiding (assert)
import System.Mem.Weak (addFinalizer)
import qualified Debug.Trace
import Types
import Util
import Socket
import Logfile
import Debug
import Config
data HistoryEntry = HistoryRun !ThreadId !(Topped AccessNr)
| HistorySetRegister !ThreadId !RegisterName !Word64
| HistoryAllocMemory !Word64 !Word64 !Word64
| HistorySetMemory !Word64 [Word8]
| HistorySetMemoryProtection !Word64 !Word64 !Word64
| HistorySetTsc !ThreadId !Word64
| HistoryAdvanceLog !LogfilePtr
| HistoryRunSyscall !ThreadId
deriving (Eq, Show, Read)
instance Render HistoryEntry where
renderS (HistorySetMemory ptr bytes) suffix =
("HistorySetMemory 0x" ++ (showHex ptr "") ++ " " ++ (show $ length bytes)) ++ suffix
renderS x s = shows x s
{- It is important that there be no references from HistoryWorker back
to the matching History, so that the finalisers run at the right
time. -}
data HistoryWorker = HistoryWorker { hw_dead :: TVar Bool,
hw_worker :: IORef (Maybe Worker),
hw_prev_lru :: TVar HistoryWorker,
hw_next_lru :: TVar HistoryWorker,
hw_is_root :: Bool }
data History = HistoryRoot HistoryWorker
| History { hs_parent :: History,
hs_entry :: HistoryEntry,
hs_worker :: HistoryWorker,
hs_ident :: Integer }
allocateHistoryIdent :: IO Integer
allocateHistoryIdent = do wc <- workerCache
(res:newIdents) <- readIORef $ wc_idents wc
writeIORef (wc_idents wc) newIdents
return res
heListToHistory :: [HistoryEntry] -> History
heListToHistory [] = HistoryRoot rootHistory
heListToHistory (x:xs) =
unsafePerformIO $ mdo d <- newTVarIO False
ww <- newIORef Nothing
p <- newTVarIO worker
n <- newTVarIO worker
ident <- allocateHistoryIdent
let worker = HistoryWorker { hw_dead = d,
hw_worker = ww,
hw_prev_lru = p,
hw_next_lru = n,
hw_is_root = False }
return $ History { hs_parent = heListToHistory xs,
hs_entry = x,
hs_worker = worker,
hs_ident = ident }
instance Show History where
show = show . historyGetHeList
instance Render History where
render = render . historyGetHeList
instance Read History where
readsPrec _ s = do (hes, trailer) <- reads s
return (heListToHistory $ reverse hes, trailer)
instance Eq History where
(HistoryRoot _) == (HistoryRoot _) = True
(HistoryRoot _) == _ = False
_ == (HistoryRoot _) = False
a == b =
if a `unsafePtrEq` b
then True
else if hs_entry a == hs_entry b
then hs_parent a == hs_parent b
else False
data WorkerCache = WorkerCache { wc_logfile :: Logfile,
wc_root :: HistoryWorker,
wc_nr_workers :: TVar Int,
wc_idents :: IORef [Integer] }
historyFold :: (a -> HistoryEntry -> a) -> a -> History -> a
historyFold _ base (HistoryRoot _) = base
historyFold folder base hist =
folder (historyFold folder base (hs_parent hist)) (hs_entry hist)
history_logfile_ptr :: History -> LogfilePtr
history_logfile_ptr =
historyFold (\acc ent ->
case ent of
HistoryAdvanceLog ptr -> ptr
_ -> acc) (LogfilePtr 0 0)
ioAssertTrue :: IO Bool -> IO ()
ioAssertTrue a = do a' <- a
assert "ioAssertTrue" a' $ return ()
doHistoryEntry :: Worker -> HistoryEntry -> IO ()
doHistoryEntry w (HistoryRun tid cntr) =
do setThreadWorker w tid
r <- runWorker "doHistoryEntry" w cntr
when (not r) $
putStrLn $ "failed to replay history entry run " ++ (show tid) ++ " " ++ (show cntr) ++ " in " ++ (show w)
doHistoryEntry w (HistorySetRegister tid reg val) =
ioAssertTrue $ setRegisterWorker w tid reg val
doHistoryEntry w (HistoryAllocMemory addr size prot) =
ioAssertTrue $ allocateMemoryWorker w addr size prot
doHistoryEntry w (HistorySetMemory addr bytes) =
ioAssertTrue $ setMemoryWorker w addr bytes
doHistoryEntry w (HistorySetMemoryProtection addr size prot) =
setMemoryProtectionWorker w addr size prot
doHistoryEntry w (HistorySetTsc tid tsc) =
ioAssertTrue $ setTscWorker w tid tsc
doHistoryEntry _ (HistoryAdvanceLog _) =
return ()
doHistoryEntry w (HistoryRunSyscall t) =
runSyscallWorker w t
rootHistory :: HistoryWorker
rootHistory = unsafePerformIO $ liftM wc_root workerCache
initialHistory :: Logfile -> Worker -> IO History
initialHistory lf w =
do p <- newTVarIO undefined
n <- newTVarIO undefined
isDead <- newTVarIO False
worker <- newIORef $ Just w
let wrk = HistoryWorker { hw_dead = isDead, hw_worker = worker,
hw_prev_lru = p, hw_next_lru = n, hw_is_root = True }
atomically $ writeTVar p wrk
atomically $ writeTVar n wrk
nr <- newTVarIO 0
idents <- newIORef [1..]
let wc = WorkerCache { wc_logfile = lf, wc_root = wrk, wc_nr_workers = nr, wc_idents = idents}
writeIORef globalWorkerCache wc
return $ HistoryRoot wrk
privatiseWorker :: WorkerCache -> HistoryWorker -> STM Bool
privatiseWorker wc hw =
do {- Remove from the list -}
d <- readTVar $ hw_dead hw
writeTVar (hw_dead hw) True
when (not d) $
do p <- readTVar $ hw_prev_lru hw
n <- readTVar $ hw_next_lru hw
writeTVar (hw_next_lru p) n
writeTVar (hw_prev_lru n) p
writeTVar (hw_prev_lru hw) hw
writeTVar (hw_next_lru hw) hw
nw <- readTVar $ wc_nr_workers wc
writeTVar (wc_nr_workers wc) (nw - 1)
return d
reallyKillHistoryWorker :: HistoryWorker -> IO ()
reallyKillHistoryWorker hw =
do wrk <- readIORef $ hw_worker hw
writeIORef (hw_worker hw) Nothing
case wrk of
Nothing -> error "Dead HW with no worker?"
Just wrk' -> killWorker wrk'
killHistoryWorker :: WorkerCache -> HistoryWorker -> IO ()
killHistoryWorker wc hw =
do alreadyDead <- atomically $ privatiseWorker wc hw
when (not alreadyDead) $ reallyKillHistoryWorker hw
{- Finaliser for history objects -}
historyDead :: WorkerCache -> History -> IO ()
historyDead _ (HistoryRoot _) = error "root worker was garbage collected?"
historyDead wc hist =
killHistoryWorker wc $ hs_worker hist
mkSimpleHistory :: History -> HistoryEntry -> History
mkSimpleHistory parent he =
unsafePerformIO $ mdo w <- newIORef Nothing
dead <- newTVarIO True
p <- newTVarIO newWH
n <- newTVarIO newWH
wc <- workerCache
ident <- allocateHistoryIdent
let newWH = HistoryWorker { hw_dead = dead,
hw_worker = w,
hw_prev_lru = p,
hw_next_lru = n,
hw_is_root = False }
newHist = History { hs_parent = parent,
hs_entry = he,
hs_worker = newWH,
hs_ident = ident }
addFinalizer newHist (historyDead wc newHist)
return newHist
appendHistory :: History -> HistoryEntry -> History
appendHistory hist@(HistoryRoot _) he =
mkSimpleHistory hist he
appendHistory hist he =
case (he, hs_entry hist) of
(HistoryRun hetid _, HistoryRun histtid _)
| hetid == histtid ->
appendHistory (hs_parent hist) he
(HistoryAdvanceLog _, HistoryAdvanceLog _)
| he == hs_entry hist -> hist
_ -> mkSimpleHistory hist he
stripNonRun :: History -> History
stripNonRun h@(HistoryRoot _) = h
stripNonRun hist =
case hs_entry hist of
HistoryRun _ _ -> hist
_ -> stripNonRun $ hs_parent hist
{- Truncate a history so that it only runs to a particular epoch number -}
truncateHistory :: History -> Topped AccessNr -> Either String History
truncateHistory h@(HistoryRoot _) (Finite (AccessNr 0)) = Right h
truncateHistory (HistoryRoot _) _ = Left "truncate empty history"
truncateHistory hist cntr =
case hs_entry hist of
HistoryRun tid c | c == cntr -> Right hist
| c < cntr ->
Left "truncate tried to extend history"
| otherwise ->
case stripNonRun (hs_parent hist) of
(HistoryRoot _) ->
Right $ appendHistory (hs_parent hist) (HistoryRun tid cntr)
runParent ->
case hs_entry runParent of
HistoryRun _ c'
| c' >= cntr ->
truncateHistory runParent cntr
| otherwise ->
Right $ appendHistory (hs_parent hist) (HistoryRun tid cntr)
_ -> error "stripNonRun misbehaving"
_ ->
case stripNonRun (hs_parent hist) of
(HistoryRoot _) ->
if cntr == (Finite $ AccessNr 0)
then Right hist
else Left "truncate history with only non-run entries"
runParent ->
case hs_entry runParent of
HistoryRun _ c'
| c' >= cntr ->
truncateHistory runParent cntr
| otherwise ->
Left "truncate tried to extend history (ends in non-run)"
_ -> error "stripNonRun misbehaving"
threadAtAccess :: History -> AccessNr -> Either String ThreadId
threadAtAccess hist acc =
historyFold (\rest (HistoryRun tid acc') ->
if (Finite acc) < acc'
then Right tid
else rest) (Left "ran out of history") hist
traceTo' :: Worker -> (Worker -> ThreadId -> Topped AccessNr -> IO [a]) -> [HistoryEntry] -> IO [a]
traceTo' _ _ [] = return []
traceTo' work tracer ((HistoryRun tid cntr):rest) =
do h <- tracer work tid cntr
rest' <- traceTo' work tracer rest
return $ h ++ rest'
traceTo' work tracer ((HistorySetRegister tid reg val):rest) =
do setRegisterWorker work tid reg val
traceTo' work tracer rest
traceTo' work tracer ((HistoryAllocMemory addr size prot):rest) =
do allocateMemoryWorker work addr size prot
traceTo' work tracer rest
traceTo' work tracer ((HistorySetMemory addr bytes):rest) =
do setMemoryWorker work addr bytes
traceTo' work tracer rest
traceTo' work tracer ((HistorySetMemoryProtection addr size prot):rest) =
do setMemoryProtectionWorker work addr size prot
traceTo' work tracer rest
traceTo' work tracer ((HistorySetTsc tid tsc):rest) =
do setTscWorker work tid tsc
traceTo' work tracer rest
traceTo' worker tracer ((HistoryAdvanceLog _):rest) =
traceTo' worker tracer rest
traceTo' worker tracer ((HistoryRunSyscall tid):rest) =
do runSyscallWorker worker tid
traceTo' worker tracer rest
historyGetHeList :: History -> [HistoryEntry]
historyGetHeList = reverse . historyFold (flip (:)) []
unsafePtrEq :: a -> a -> Bool
unsafePtrEq a b = (unsafeCoerce# a) `eqAddr#` (unsafeCoerce# b)
getDeltaScript :: History -> History -> Maybe [HistoryEntry]
getDeltaScript (HistoryRoot _) end = Just $ historyGetHeList end
getDeltaScript _ (HistoryRoot _) = Nothing
getDeltaScript start end =
let quick s e =
if s `unsafePtrEq` e
then Just []
else case quick s (hs_parent e) of
Just b -> Just $ (hs_entry end):b
Nothing -> Nothing
in case quick start end of
Just r -> Just $ reverse r
Nothing ->
{- start isn't on the path from the root to end. Do it
the hard way. -}
Debug.Trace.trace "getDeltaScript on slow path..." $
let start' = historyGetHeList start
end' = historyGetHeList end
worker [] xs = Just xs
worker _ [] = Nothing
worker aas@(a:as) bbs@(b:bs)
| a == b = worker as bs
| otherwise =
case (a, b) of
(HistoryRun atid acntr,
HistoryRun btid bcntr)
| atid == btid ->
if acntr < bcntr
then worker as bbs
else worker aas bs
_ -> Nothing
in worker start' end'
traceTo'' :: (Worker -> ThreadId -> Topped AccessNr -> IO [a]) -> Worker -> History -> History -> IO (Either String [a])
traceTo'' tracer worker start end =
case getDeltaScript start end of
Just todo -> liftM Right $ traceTo' worker tracer todo
Nothing -> return $ Left $ shows start $ " is not a prefix of " ++ (show end)
{- Take a worker and a history representing its current state and run
it forwards to some other history, logging control expressions as
we go. -}
controlTraceToWorker :: Worker -> History -> History -> IO (Either String [Expression])
controlTraceToWorker = traceTo'' controlTraceWorker
traceToWorker :: Worker -> History -> History -> IO (Either String [TraceRecord])
traceToWorker = traceTo'' traceWorker
sendWorkerCommand :: Worker -> ControlPacket -> IO ResponsePacket
sendWorkerCommand worker cp =
do a <- readIORef $ worker_alive worker
if not a
then error $ "send command " ++ (show cp) ++ " to dead worker on fd " ++ (show $ worker_fd worker)
else sendSocketCommand (worker_fd worker) cp
fromAN :: Topped AccessNr -> [Word64]
fromAN Infinity = [-1]
fromAN (Finite (AccessNr acc)) = [fromInteger acc]
snapshotPacket :: ControlPacket
snapshotPacket = ControlPacket 0x1234 []
killPacket :: ControlPacket
killPacket = ControlPacket 0x1235 []
runPacket :: Topped AccessNr -> ControlPacket
runPacket x = ControlPacket 0x1236 $ fromAN x
tracePacket :: Topped AccessNr -> ControlPacket
tracePacket x = ControlPacket 0x1237 $ fromAN x
threadStatePacket :: ControlPacket
threadStatePacket = ControlPacket 0x123b []
replayStatePacket :: ControlPacket
replayStatePacket = ControlPacket 0x123c []
controlTracePacket :: Topped AccessNr -> ControlPacket
controlTracePacket to = ControlPacket 0x123d $ fromAN to
fetchMemoryPacket :: Word64 -> Word64 -> ControlPacket
fetchMemoryPacket addr size = ControlPacket 0x123e [addr, size]
vgIntermediatePacket :: Word64 -> ControlPacket
vgIntermediatePacket addr = ControlPacket 0x123f [addr]
nextThreadPacket :: ControlPacket
nextThreadPacket = ControlPacket 0x1240 []
setThreadPacket :: ThreadId -> ControlPacket
setThreadPacket (ThreadId tid) = ControlPacket 0x1241 [fromInteger tid]
getRegistersPacket :: ControlPacket
getRegistersPacket = ControlPacket 0x1242 []
traceToEventPacket :: Topped AccessNr -> ControlPacket
traceToEventPacket x = ControlPacket 0x1243 $ fromAN x
setRegisterPacket :: ThreadId -> RegisterName -> Word64 -> ControlPacket
setRegisterPacket (ThreadId tid) reg val = ControlPacket 0x1244 [fromInteger tid, unparseRegister reg, val]
allocateMemoryPacket :: Word64 -> Word64 -> Word64 -> ControlPacket
allocateMemoryPacket addr size prot = ControlPacket 0x1245 [addr, size, prot]
setMemoryPacket :: Word64 -> Word64 -> ControlPacket
setMemoryPacket addr size = ControlPacket 0x1246 [addr, size]
setMemoryProtectionPacket :: Word64 -> Word64 -> Word64 -> ControlPacket
setMemoryProtectionPacket addr size prot = ControlPacket 0x1247 [addr, size, prot]
setTscPacket :: ThreadId -> Word64 -> ControlPacket
setTscPacket (ThreadId tid) tsc = ControlPacket 0x1248 [fromInteger tid, tsc]
getHistoryPacket :: ControlPacket
getHistoryPacket = ControlPacket 0x1249 []
runSyscallPacket :: ThreadId -> ControlPacket
runSyscallPacket (ThreadId tid) = ControlPacket 0x124c [fromInteger tid]
runToEventPacket :: Topped AccessNr -> ControlPacket
runToEventPacket x = ControlPacket 0x124e $ fromAN x
trivCommand :: Worker -> ControlPacket -> IO Bool
trivCommand worker cmd =
do (ResponsePacket s _) <- sendWorkerCommand worker cmd
return s
killWorker :: Worker -> IO ()
killWorker worker =
do s <- trivCommand worker killPacket
if s
then do sClose $ worker_fd worker
writeIORef (worker_alive worker) False
modifyIORef nrForkedWorkers $ \x -> x - 1
else error "can't kill worker?"
freezeWorker :: Worker -> IO ()
freezeWorker worker =
writeIORef (worker_frozen worker) True
workerFrozen :: Worker -> IO Bool
workerFrozen = readIORef . worker_frozen
mustBeThawed :: String -> Worker -> IO ()
mustBeThawed m w = do t <- workerFrozen w
assert ("worker frozen unexpectedly: " ++ m) (not t) $ return ()
runWorker :: String -> Worker -> Topped AccessNr -> IO Bool
runWorker msg worker acc = mustBeThawed ("runWorker: " ++ msg) worker >> trivCommand worker (runPacket acc)
ancillaryDataToTrace :: [ResponseData] -> [TraceRecord]
ancillaryDataToTrace [] = []
ancillaryDataToTrace ((ResponseDataString _):rs) = ancillaryDataToTrace rs
ancillaryDataToTrace ((ResponseDataBytes _):rs) = ancillaryDataToTrace rs
ancillaryDataToTrace ((ResponseDataAncillary code (loc':tid':other_args)):rs) =
let loc = AccessNr $ fromIntegral loc'
tid = ThreadId $ fromIntegral tid'
(entry, rest) =
case code of
1 -> (TraceFootstep { trc_foot_rip = fromIntegral $ other_args!!0,
trc_foot_rdx = fromIntegral $ other_args!!1,
trc_foot_rcx = fromIntegral $ other_args!!2,
trc_foot_rax = fromIntegral $ other_args!!3 },
rs)
2 -> (TraceSyscall { trc_sys_nr = fromIntegral $ other_args!!0 },
rs)
3 -> (TraceRdtsc, rs)
4 -> (TraceLoad { trc_load_val = fromIntegral $ other_args!!0,
trc_load_size = fromIntegral $ other_args!!1,
trc_load_ptr = fromIntegral $ other_args!!2,
trc_load_in_monitor = other_args!!3 /= 0,
trc_rip = other_args!!4 }, rs)
5 -> (TraceStore { trc_store_val = fromIntegral $ other_args!!0,
trc_store_size = fromIntegral $ other_args!!1,
trc_store_ptr = fromIntegral $ other_args!!2,
trc_store_in_monitor = other_args!!3 /= 0,
trc_rip = other_args!!4 }, rs)
6 -> (case head rs of
ResponseDataString s -> TraceCalling s
_ -> error $ "mangled trace calling: " ++ (show other_args) ++ ", " ++ (show rs), tail rs)
7 -> (case head rs of
ResponseDataString s -> TraceCalled s
_ -> error $ "mangled trace called: " ++ (show other_args) ++ ", " ++ (show rs), tail rs)
8 -> (TraceEnterMonitor, rs)
9 -> (TraceExitMonitor, rs)
17 -> (TraceSignal { trc_rip = other_args!!0,
trc_signr = fromIntegral $ other_args!!1,
trc_err = other_args!!2,
trc_va = other_args!!3 }, rs)
_ -> error $ "bad trace ancillary code " ++ (show code)
in (TraceRecord { trc_trc = entry, trc_tid = tid, trc_loc = loc }):(ancillaryDataToTrace rest)
ancillaryDataToTrace x = error $ "bad trace ancillary data " ++ (show x)
traceCmd :: Worker -> ControlPacket -> IO [TraceRecord]
traceCmd worker pkt =
do mustBeThawed "traceCmd" worker
(ResponsePacket _ args) <- sendWorkerCommand worker pkt
return $ ancillaryDataToTrace args
traceWorker :: Worker -> ThreadId -> Topped AccessNr -> IO [TraceRecord]
traceWorker worker tid cntr = setThreadWorker worker tid >> traceCmd worker (tracePacket cntr)
traceToEventWorker :: Worker -> ThreadId -> Topped AccessNr -> IO [TraceRecord]
traceToEventWorker worker tid limit = do setThreadWorker worker tid
traceCmd worker $ traceToEventPacket limit
takeSnapshot :: Worker -> IO (Maybe Worker)
takeSnapshot worker =
do (ResponsePacket s _) <- sendWorkerCommand worker snapshotPacket
if s
then do newFd <- recvSocket (worker_fd worker)
newAlive <- newIORef True
newFrozen <- newIORef False
modifyIORef nrForkedWorkers $ (+) 1
return $ Just $ Worker {worker_fd = newFd, worker_alive = newAlive, worker_frozen = newFrozen }
else return Nothing
threadStateWorker :: Worker -> IO [(ThreadId, ThreadState)]
threadStateWorker worker =
let parseItem :: ConsumerMonad ResponseData (ThreadId, ThreadState)
parseItem = do (ResponseDataAncillary 13 [tid, is_dead, is_crashed, last_access, last_rip]) <- consume
return (ThreadId $ fromIntegral tid,
ThreadState {ts_dead = is_dead /= 0,
ts_crashed = is_crashed /= 0,
ts_last_run = AccessNr $ fromIntegral last_access,
ts_last_rip = last_rip})
in
do (ResponsePacket s params) <- sendWorkerCommand worker threadStatePacket
return $ if s
then evalConsumer params (consumeMany parseItem)
else error "error getting thread state"
parseReplayState :: [ResponseData] -> ReplayState
parseReplayState [ResponseDataAncillary 10 [access_nr]] = ReplayStateOkay $ AccessNr $ fromIntegral access_nr
parseReplayState (ResponseDataAncillary 11 [x, tid, access_nr]:(ResponseDataString s):items) =
ReplayStateFailed s (ThreadId $ fromIntegral tid) (AccessNr $ fromIntegral access_nr) $
case x of
0 -> case items of
[] -> FailureReasonControl
_ -> error $ "unexpected extra data in a failure control response " ++ (show items)
1 -> uncurry FailureReasonData $ evalConsumer items $ pairM parseExpression parseExpression
3 -> case items of
[ResponseDataAncillary 18 [wantedTid]] ->
FailureReasonWrongThread (ThreadId $ fromIntegral wantedTid)
_ -> error $ "can't parse data for wrong thread failure " ++ (show items)
_ -> error $ "unexpected failure class " ++ (show x)
parseReplayState [ResponseDataAncillary 14 [access_nr]] = ReplayStateFinished $ AccessNr $ fromIntegral access_nr
parseReplayState x = error $ "bad replay state " ++ (show x)
replayStateWorker :: Worker -> IO ReplayState
replayStateWorker worker =
do (ResponsePacket _ params) <- sendWorkerCommand worker replayStatePacket
return $ parseReplayState params
data ConsumerMonad a b = ConsumerMonad { runConsumer :: [a] -> (b, [a]) }
instance Monad (ConsumerMonad a) where
return b = ConsumerMonad $ \r -> (b, r)
f >>= s =
ConsumerMonad $ \items ->
let (f_res, items') = runConsumer f items
in runConsumer (s f_res) items'
consume :: ConsumerMonad a a
consume = ConsumerMonad $ \(i:r) -> (i,r)
hitEnd :: ConsumerMonad a Bool
hitEnd = ConsumerMonad $ \i -> case i of
[] -> (True, i)
_ -> (False, i)
consumeMany :: ConsumerMonad a b -> ConsumerMonad a [b]
consumeMany what =
do e <- hitEnd
if e
then return []
else do i <- what
rest <- consumeMany what
return $ i:rest
evalConsumer :: [a] -> ConsumerMonad a b -> b
evalConsumer items monad =
case runConsumer monad items of
(x, []) -> x
_ -> error "Failed to consume all items"
regNames :: [(RegisterName, Word64)]
regNames = [(REG_RAX, 0), (REG_RCX, 1), (REG_RDX, 2), (REG_RBX, 3), (REG_RSP, 4),
(REG_RBP, 5), (REG_RSI, 6), (REG_RDI, 7), (REG_R8, 8), (REG_R9, 9),
(REG_R10, 10), (REG_R11, 11), (REG_R12, 12), (REG_R13, 13),
(REG_R14, 14), (REG_R15, 15), (REG_CC_OP, 16), (REG_CC_DEP1, 17),
(REG_CC_DEP2, 18), (REG_CC_NDEP, 19), (REG_DFLAG, 20),
(REG_RIP, 21), (REG_IDFLAG, 22), (REG_FS_ZERO, 23),
(REG_SSE_ROUND, 24)]
unparseRegister :: RegisterName -> Word64
unparseRegister rname =
maybe (error $ "bad register name" ++ (show rname) ++ "?") id $ lookup rname regNames
parseRegister :: Word64 -> RegisterName
parseRegister idx =
maybe (error $ "bad register encoding " ++ (show idx)) fst $
find ((==) idx . snd) regNames
consumeRegisterBinding :: ConsumerMonad ResponseData (RegisterName, Word64)
consumeRegisterBinding =
do (ResponseDataAncillary 16 [name, val]) <- consume
return (parseRegister name, val)
isBinop :: Word64 -> Bool
isBinop x = x >= 5 && x <= 20
parseBinop :: Word64 -> Binop
parseBinop 5 = BinopCombine
parseBinop 6 = BinopSub
parseBinop 7 = BinopAdd
parseBinop 8 = BinopMull
parseBinop 9 = BinopMullHi
parseBinop 10 = BinopMullS
parseBinop 11 = BinopShrl
parseBinop 12 = BinopShl
parseBinop 13 = BinopShra
parseBinop 14 = BinopAnd
parseBinop 15 = BinopOr
parseBinop 16 = BinopXor
parseBinop 17 = BinopLe
parseBinop 18 = BinopBe
parseBinop 19 = BinopEq
parseBinop 20 = BinopB
parseBinop x = error $ "unknown binop " ++ (show x)
parseExpression :: ConsumerMonad ResponseData Expression
parseExpression =
do d <- consume
let (ResponseDataAncillary 12 params) = d
case params of
[0, val] -> return $ ExpressionConst val
[1, reg, val] -> return $ ExpressionRegister (parseRegister reg) val
[2, sz, acc, tid] ->
do ptr <- parseExpression
val <- parseExpression
return $ ExpressionLoad (ThreadId $ fromIntegral tid) (fromIntegral sz) (fromIntegral acc) ptr val
[3, acc, tid] -> do val <- parseExpression
return $ ExpressionStore (ThreadId $ fromIntegral tid) (fromIntegral acc) val
[4, val] -> return $ ExpressionImported val
[r] | isBinop r -> do a1 <- parseExpression
a2 <- parseExpression
return $ ExpressionBinop (parseBinop r) a1 a2
[21] -> do e <- parseExpression
return $ ExpressionNot e
_ -> error $ "failed to parse an expression " ++ (show d)
parseExpressions :: [ResponseData] -> [Expression]
parseExpressions items =
evalConsumer items $ consumeMany parseExpression
controlTraceWorker :: Worker -> ThreadId -> Topped AccessNr -> IO [Expression]
controlTraceWorker worker tid cntr =
do setThreadWorker worker tid
mustBeThawed "controlTraceWorker" worker
(ResponsePacket _ params) <- sendWorkerCommand worker $ controlTracePacket cntr
return $ parseExpressions params
fetchMemoryWorker :: Worker -> Word64 -> Word64 -> IO (Maybe [Word8])
fetchMemoryWorker worker addr size =
do r <- sendWorkerCommand worker $ fetchMemoryPacket addr size
return $ case r of
(ResponsePacket True [ResponseDataBytes s]) -> Just s
_ -> Nothing
vgIntermediateWorker :: Worker -> Word64 -> IO (Maybe String)
vgIntermediateWorker worker addr =
do sendWorkerCommand worker $ vgIntermediatePacket addr
return Nothing
nextThreadWorker :: Worker -> IO ThreadId
nextThreadWorker worker =
do (ResponsePacket True [ResponseDataAncillary 15 [tid]]) <- sendWorkerCommand worker nextThreadPacket
return $ ThreadId $ fromIntegral tid
setThreadWorker :: Worker -> ThreadId -> IO ()
setThreadWorker worker tid =
do sendWorkerCommand worker $ setThreadPacket tid
return ()
getRegistersWorker :: Worker -> IO RegisterFile
getRegistersWorker worker =
do (ResponsePacket True params) <- sendWorkerCommand worker getRegistersPacket
return $ RegisterFile $ evalConsumer params $ consumeMany consumeRegisterBinding
setRegisterWorker :: Worker -> ThreadId -> RegisterName -> Word64 -> IO Bool
setRegisterWorker worker tid reg val =
trivCommand worker $ setRegisterPacket tid reg val
allocateMemoryWorker :: Worker -> Word64 -> Word64 -> Word64 -> IO Bool
allocateMemoryWorker worker addr size prot =
trivCommand worker $ allocateMemoryPacket addr size prot
setMemoryWorker :: Worker -> Word64 -> [Word8] -> IO Bool
setMemoryWorker worker addr bytes =
let cp = setMemoryPacket addr $ fromIntegral $ length bytes
in do a <- readIORef $ worker_alive worker
if not a
then error "set memory in dead worker"
else do (ResponsePacket s _) <- sendSocketCommandTrailer (worker_fd worker) cp bytes
return s
setMemoryProtectionWorker :: Worker -> Word64 -> Word64 -> Word64 -> IO ()
setMemoryProtectionWorker worker addr size prot =
do trivCommand worker $ setMemoryProtectionPacket addr size prot
return ()
setTscWorker :: Worker -> ThreadId -> Word64 -> IO Bool
setTscWorker worker tid tsc =
trivCommand worker $ setTscPacket tid tsc
validateHistoryWorker' :: Worker -> [HistoryEntry] -> IO Bool
validateHistoryWorker' worker desired_hist =
let validateHistory [] [] = True
validateHistory [ResponseDataAncillary 19 [0]] _ = True
validateHistory ((ResponseDataAncillary 19 [0x1236, tid, acc]):o) o's@((HistoryRun tid' acc'):o')
| (ThreadId $ toInteger tid) == tid' =
case (Finite $ AccessNr $ toInteger acc) `compare` acc' of
LT -> validateHistory o o's
EQ -> validateHistory o o'
GT -> Debug.Trace.trace ("history validation failed because worker was at " ++ (show acc) ++ " and we only wanted " ++ (show acc') ++ ", rest " ++ (show o')) False
validateHistory ((ResponseDataAncillary 19 [0x1244, tid, reg, val]):o)
((HistorySetRegister tid' reg' val'):o')
| (ThreadId $ toInteger tid) == tid' && (parseRegister reg) == reg' && val == val' = validateHistory o o'
validateHistory ((ResponseDataAncillary 19 [0x1245, addr, size, prot]):o)
((HistoryAllocMemory addr' size' prot'):o')
| addr == addr' && size == size' && prot == prot' = validateHistory o o'
validateHistory ((ResponseDataAncillary 19 [0x1246, addr, size]):o)
((HistorySetMemory addr' bytes):o')
| addr == addr' && size == (fromIntegral $ length bytes) = validateHistory o o'
validateHistory ((ResponseDataAncillary 19 [0x1247, addr, size, prot]):o)
((HistorySetMemoryProtection addr' size' prot'):o')
| addr == addr' && size == size' && prot == prot' = validateHistory o o'
validateHistory ((ResponseDataAncillary 19 [0x1248, tid, tsc]):o)
((HistorySetTsc (ThreadId tid') tsc'):o')
| toInteger tid == tid' && tsc == tsc' = validateHistory o o'
validateHistory o ((HistoryAdvanceLog _):o')
= validateHistory o o'
validateHistory ((ResponseDataAncillary 19 [0x124c, t]):o)
((HistoryRunSyscall (ThreadId t')):o')
| t == (fromIntegral t') = validateHistory o o'
validateHistory o o' = Debug.Trace.trace ("history validation failed: " ++ (show o) ++ " vs " ++ (show o')) False
in do (ResponsePacket _ params) <- sendWorkerCommand worker getHistoryPacket
let r = validateHistory params desired_hist
when (not r) $ putStrLn $ "validation of " ++ (show desired_hist) ++ " against " ++ (show params) ++ " in " ++ (show worker) ++ " failed"
return r
validateHistoryWorker :: Worker -> [HistoryEntry] -> IO Bool
validateHistoryWorker w he = if validateHistories
then validateHistoryWorker' w he
else return True
{- Pull a WCE to the front of the LRU list -}
touchWorkerCacheEntry :: HistoryWorker -> IO ()
touchWorkerCacheEntry hw =
if hw_is_root hw
then return ()
else do wc <- workerCache
atomically $ do {- Remove from old place -}
prev <- readTVar $ hw_prev_lru hw
next <- readTVar $ hw_next_lru hw
writeTVar (hw_next_lru prev) next
writeTVar (hw_prev_lru next) prev
{- Insert in new place -}
let newPrev = wc_root wc
newNext <- readTVar (hw_next_lru newPrev)
writeTVar (hw_next_lru newPrev) hw
writeTVar (hw_prev_lru newNext) hw
writeTVar (hw_next_lru hw) newNext
writeTVar (hw_prev_lru hw) newPrev
assert :: String -> Bool -> a -> a
assert _ True = id
assert msg False = unsafePerformIO $ dumpLog >> (error $ "assertion failure: " ++ msg)
fixupWorkerCache :: WorkerCache -> IO ()
fixupWorkerCache wc =
do w <- atomically $ do n <- readTVar $ wc_nr_workers wc
if n <= cacheSize
then return Nothing
else do targ <- readTVar $ hw_prev_lru $ wc_root wc
dead <-
assert ("nr_workers " ++ (show n) ++ " but no workers found") (not $ hw_is_root targ) $
privatiseWorker wc targ
assert "dead worker on list" (not dead) $
return $ Just targ
case w of
Just w' -> reallyKillHistoryWorker w' >> fixupWorkerCache wc
Nothing -> return ()
cacheSize :: Int
cacheSize = 900
globalWorkerCache :: IORef WorkerCache
{-# NOINLINE globalWorkerCache #-}
globalWorkerCache =
unsafePerformIO $ newIORef $ error "use of worker cache before it was ready?"
nrForkedWorkers :: IORef Int
nrForkedWorkers =
unsafePerformIO $ newIORef 1
workerCache :: IO WorkerCache
workerCache =
do wc <- readIORef globalWorkerCache
pending <- getPendingSignals
when (sigUSR1 `inSignalSet` pending) $
do dumpLog
unblockSignals $ addSignal sigUSR1 emptySignalSet
blockSignals $ addSignal sigUSR1 emptySignalSet
return wc
destroyWorkerCache :: IO ()
destroyWorkerCache =
do wc <- workerCache
killAllWorkers wc
where killAllWorkers wc =
do targ <-
atomically $ do t <- readTVar $ hw_next_lru $ wc_root wc
if hw_is_root t
then return Nothing
else do privatiseWorker wc t
return $ Just t
case targ of
Nothing -> return ()
Just t -> do reallyKillHistoryWorker t
killAllWorkers wc
reallySnapshot :: Worker -> IO Worker
reallySnapshot w =
do w' <- takeSnapshot w
case w' of
Nothing -> error "cannot take a snapshot"
Just w'' -> return w''
modifyTVar :: TVar a -> (a -> a) -> STM ()
modifyTVar v f =
do v' <- readTVar v
writeTVar v $ f v'
addWorkerToLRU :: HistoryWorker -> Worker -> IO ()
addWorkerToLRU hw worker =
do wc <- workerCache
writeIORef (hw_worker hw) $ Just worker
atomically $ let newPrev = wc_root wc in
do writeTVar (hw_dead hw) False
newNext <- readTVar (hw_next_lru newPrev)
writeTVar (hw_next_lru newPrev) hw
writeTVar (hw_prev_lru newNext) hw
writeTVar (hw_next_lru hw) newNext
writeTVar (hw_prev_lru hw) newPrev
modifyTVar (wc_nr_workers wc) ((+) 1)
getWorker' :: Bool -> History -> IO Worker
getWorker' pure (HistoryRoot w) =
do w' <- readIORef $ hw_worker w
w'' <- case w' of
Nothing -> error "root HW lost its worker"
Just w''' -> return w'''
if pure
then return w''
else reallySnapshot w''
getWorker' pure hist =
do w <- readIORef $ hw_worker $ hs_worker hist
case w of
Just w' -> do touchWorkerCacheEntry (hs_worker hist)
if pure
then return w'
else reallySnapshot w'
Nothing ->
do worker <- getWorker' False $ hs_parent hist
doHistoryEntry worker (hs_entry hist)
freezeWorker worker
addWorkerToLRU (hs_worker hist) worker
wc <- workerCache
fixupWorkerCache wc
if pure
then return worker
else reallySnapshot worker
getWorker :: Bool -> History -> IO Worker
getWorker pure hist =
do r <- getWorker' pure hist
v <- validateHistoryWorker r (historyGetHeList hist)
wc <- assert ("getWorker' returned bad worker for history " ++ (show hist)) v workerCache
fixupWorkerCache wc
return r
impureCmd :: (Worker -> IO a) -> History -> a
impureCmd w hist =
unsafePerformIO $ do worker <- getWorker False hist
res <- w worker
killWorker worker
return res
queryCmd :: (Worker -> IO a) -> History -> a
queryCmd w hist =
unsafePerformIO $ getWorker True hist >>= w
threadState :: History -> [(ThreadId, ThreadState)]
threadState = queryCmd threadStateWorker
replayState :: History -> ReplayState
replayState = queryCmd replayStateWorker
fetchMemory :: History -> Word64 -> Word64 -> Maybe [Word8]
fetchMemory hist addr size =
queryCmd (\worker -> fetchMemoryWorker worker addr size) hist
vgIntermediate :: History -> Word64 -> Maybe String
vgIntermediate hist addr =
queryCmd (\worker -> vgIntermediateWorker worker addr) hist
nextThread :: History -> ThreadId
nextThread = queryCmd nextThreadWorker
controlTraceTo :: History -> History -> Either String [Expression]
controlTraceTo start end =
impureCmd (\worker -> controlTraceToWorker worker start end) start
traceTo :: History -> History -> Either String [TraceRecord]
traceTo start end =
impureCmd (\worker -> traceToWorker worker start end) start
getRegisters :: History -> RegisterFile
getRegisters = queryCmd getRegistersWorker
getRipAtAccess :: History -> AccessNr -> Either String Word64
getRipAtAccess hist whn =
do hist' <- truncateHistory hist $ Finite $ whn + 1
getRegister (getRegisters hist') REG_RIP
traceToEvent :: History -> ThreadId -> Topped AccessNr -> Either String ([TraceRecord], History)
traceToEvent start tid limit =
unsafePerformIO $ do worker <- getWorker False start
trc <- traceToEventWorker worker tid limit
rs <- replayStateWorker worker
killWorker worker
return $ let finalHist = appendHistory start $ HistoryRun tid $ Finite $ rs_access_nr rs
in Right (trc, finalHist)
runThreadToEventWorker :: Worker -> History -> ThreadId -> Topped AccessNr -> IO (Maybe TraceRecord, History, ReplayState)
runThreadToEventWorker worker start tid limit =
do setThreadWorker worker tid
trc <- traceCmd worker $ runToEventPacket limit
rs <- replayStateWorker worker
let newHist = appendHistory start $ HistoryRun tid $ Finite $ rs_access_nr rs
return (case trc of
[] -> Nothing
[x] -> Just x
_ -> error $ "runToEvent gave long trace " ++ (show trc), newHist, rs)
runSyscallWorker :: Worker -> ThreadId -> IO ()
runSyscallWorker worker tid =
do trivCommand worker $ runSyscallPacket tid
return ()
runThread :: Logfile -> History -> ThreadId -> Topped AccessNr -> Either String History
runThread logfile hist tid acc =
unsafePerformIO $ do worker <- getWorker False hist
mustBeThawed "runThread" worker
(evt', newHist, rs) <- runThreadToEventWorker worker hist tid acc
case (evt', rs) of
(Nothing, _) ->
{- nothing which requires special help -}
return $ Right $ appendHistory hist $ HistoryRun tid acc
(Just evt, ReplayStateOkay acc') ->
if Finite acc' <= acc
then do let lp = history_logfile_ptr hist
fixedHist =
case nextRecord logfile lp of
Nothing ->
error "unexpected end of log when worker thought there was more?"
Just (logrecord, nextLogPtr) ->
let success = return . Right
justAdvance = success (advanceLog newHist nextLogPtr, True)
failed = return . Left
{- Go and process all the populate memory records which come
after logptr. -}
processPopMemRecords pphist logptr =
case nextRecord logfile logptr of
Just (LogRecord _ (LogMemory ptr contents), nlp) ->
processPopMemRecords (setMemory pphist ptr contents) nlp
_ -> (pphist, logptr)
{- syscalls which we handle by just re-running them -}
replaySyscall (LogSyscall sysnr _ _ _ _)
| sysnr `elem` [10, 11, 12, 13, 14, 158, 273] =
(runSyscall newHist $ trc_tid evt, nextLogPtr)
{- syscalls which we handle by just imposing the return value -}
replaySyscall (LogSyscall sysnr sysres _ _ _)
| sysnr `elem` [0, 1, 2, 3, 4, 5, 21, 63, 79, 97, 102, 202] =
(setRegister newHist (trc_tid evt) REG_RAX sysres, nextLogPtr)
{- syscalls which we handle by re-running and then imposing the
recorded return value. -}
replaySyscall (LogSyscall sysnr sysres _ _ _)
| sysnr `elem` [56, 218] =
(setRegister (runSyscall newHist $ trc_tid evt) (trc_tid evt) REG_RAX sysres,
nextLogPtr)
{- exit_group. Ignore it and move to the next record, which should
immediately finish the log. -}
replaySyscall (LogSyscall 231 _ _ _ _) =
(newHist, nextLogPtr)
{- mmap -}
replaySyscall (LogSyscall 9 sysres _ len prot) =
let (doneMmapHist, finalLogPtr) =
if sysres > 0xffffffffffff8000
then {- syscall failed -> just replay the failure -}
(newHist, nextLogPtr)
else {- syscall succeeded -> have to replay it properly -}
let addr = sysres
allocMem = allocateMemory newHist addr len (prot .|. 2) {- Turn on write access -}
(populateMem, ptrAfterPopulateMem) =
processPopMemRecords allocMem nextLogPtr
resetProt =
if prot .&. 2 == 0
then populateMem
else setMemoryProtection populateMem addr len prot
in (resetProt, ptrAfterPopulateMem)
in (setRegister doneMmapHist (trc_tid evt) REG_RAX sysres, finalLogPtr)
replaySyscall (LogSyscall sysnr _ _ _ _) =
error $ "don't know how to replay syscall " ++ (show sysnr)
replaySyscall _ = error "replaySyscall called on non-syscall?"
replaySyscall' x =
let (h, np) = replaySyscall x
(h', np') = processPopMemRecords h np
in advanceLog h' np'
res =
case (trc_trc evt, lr_body logrecord) of
(TraceRdtsc, LogRdtsc tsc) ->
success (advanceLog (setTsc newHist tid tsc) nextLogPtr, True)
(TraceRdtsc, r) ->
failed $ "rdtsc event with record " ++ (show r)
(TraceCalling _, LogClientCall) -> justAdvance
(TraceCalling n, r) ->
failed $ "calling " ++ n ++ " event with record " ++ (show r)
(TraceCalled _, LogClientReturn) -> justAdvance
(TraceCalled n, r) ->
failed $ "called " ++ n ++ " event with record " ++ (show r)
(TraceLoad val sz ptr _ _, LogAccess True val' sz' ptr') |
sz == sz' && ptr == ptr' && val == val' ->
justAdvance
(TraceLoad _ _ _ _ _, _) | useMemoryRecords ->
failed $ "load event " ++ (show evt) ++ " against record " ++ (show logrecord)
(TraceStore val sz ptr _ _, LogAccess False val' sz' ptr') |
sz == sz' && ptr == ptr' && val == val' ->
justAdvance
(TraceStore _ _ _ _ _, _) | useMemoryRecords ->
failed $ "store event " ++ (show evt) ++ " against record " ++ (show logrecord)
(TraceSignal rip signr err va, LogSignal rip' signr' err' va') |
and [rip == rip', signr == (fromIntegral signr'), err == err', va == va'] ->
justAdvance
(TraceSignal _ _ _ _, _) ->
failed $ "signal event " ++ (show evt) ++ " against record " ++ (show logrecord)
(TraceSyscall sysnr, LogSyscall sysnr' _ arg1 arg2 arg3) ->
Debug.Trace.trace ("validate syscall " ++ (show evt)) $
do regs <- getRegistersWorker worker
let rdi = getRegister' regs REG_RDI
rsi = getRegister' regs REG_RSI
rdx = getRegister' regs REG_RDX
if rdi == arg1 && rsi == arg2 && rdx == arg3 && sysnr == (fromIntegral sysnr')
then return $ Right (replaySyscall' $ lr_body logrecord, True)
else failed $ "syscall record " ++ (show logrecord) ++ " against trace event " ++ (show evt) ++ " " ++
(showHex rdi $ " " ++ (showHex rsi $ " " ++ (showHex rdx "")))
(TraceSyscall _, _) ->
failed $ "syscall event " ++ (show evt) ++ " against record " ++ (show logrecord)
(TraceEnterMonitor, _) -> success (newHist, False)
(TraceExitMonitor, _) -> success (newHist, False)
(TraceFootstep _ _ _ _, _) -> success (newHist, False)
(TraceLoad _ _ _ _ _, _) -> success (newHist, False)
(TraceStore _ _ _ _ _, _) -> success (newHist, False)
in do r <- res
case r of
Left e -> failed e
Right (res', checkThread) ->
if checkThread && trc_tid evt /= lr_tid logrecord
then failed $ "wrong thread: wanted " ++ (show logrecord) ++ ", got " ++ (show evt)
else success res'
fixedHist' <- fixedHist
killWorker worker
return $ case fixedHist' of
Left e -> Left e
Right fh' ->
if Finite acc' == acc
then fixedHist'
else runThread logfile fh' tid acc
else do killWorker worker
return $ Right newHist
_ -> do killWorker worker
return $ Right newHist
setRegister :: History -> ThreadId -> RegisterName -> Word64 -> History
setRegister hist tid reg val =
appendHistory hist $ HistorySetRegister tid reg val
allocateMemory :: History -> Word64 -> Word64 -> Word64 -> History
allocateMemory hist addr size prot =
appendHistory hist $ HistoryAllocMemory addr size prot
setMemory :: History -> Word64 -> [Word8] -> History
setMemory hist addr contents =
appendHistory hist $ HistorySetMemory addr contents
setMemoryProtection :: History -> Word64 -> Word64 -> Word64 -> History
setMemoryProtection hist addr size prot =
appendHistory hist $ HistorySetMemoryProtection addr size prot
setTsc :: History -> ThreadId -> Word64 -> History
setTsc hist tid tsc = appendHistory hist $ HistorySetTsc tid tsc
advanceLog :: History -> LogfilePtr -> History
advanceLog hist lp = appendHistory hist $ HistoryAdvanceLog lp
runSyscall :: History -> ThreadId -> History
runSyscall hist tid = appendHistory hist $ HistoryRunSyscall tid
| sos22/ppres | ppres/driver/History.hs | gpl-2.0 | 56,573 | 24 | 38 | 22,636 | 14,091 | 7,066 | 7,025 | 1,010 | 37 |
-- | Make Lamdu Style
{-# LANGUAGE TemplateHaskell, DisambiguateRecordFields #-}
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances #-}
module Lamdu.Style.Make
( make
, mainLoopConfig
) where
import qualified Control.Lens as Lens
import Data.Property (MkProperty)
import qualified Data.Property as Property
import GUI.Momentu.Animation (AnimId)
import qualified GUI.Momentu.Direction as Dir
import qualified GUI.Momentu.Element as Element
import qualified GUI.Momentu.EventMap as E
import GUI.Momentu.Font (Font)
import qualified GUI.Momentu.Font as Font
import qualified GUI.Momentu.Glue as Glue
import qualified GUI.Momentu.I18N as MomentuTexts
import qualified GUI.Momentu.Main.Animation as Anim
import qualified GUI.Momentu.Main.Config as MainConfig
import GUI.Momentu.ModKey (ModKey)
import GUI.Momentu.Widget (Widget)
import qualified GUI.Momentu.Widget as Widget
import qualified GUI.Momentu.Widgets.Cursor as Cursor
import GUI.Momentu.Widgets.EventMapHelp (IsHelpShown(..))
import qualified GUI.Momentu.Widgets.EventMapHelp as EventMapHelp
import qualified GUI.Momentu.Widgets.TextEdit as TextEdit
import qualified GUI.Momentu.Widgets.TextView as TextView
import GUI.Momentu.Zoom (Zoom)
import Lamdu.Config (Config)
import qualified Lamdu.Config as Config
import Lamdu.Config.Theme (Theme)
import qualified Lamdu.Config.Theme as Theme
import Lamdu.Config.Theme.Fonts (Fonts)
import qualified Lamdu.Config.Theme.Fonts as Fonts
import qualified Lamdu.Config.Theme.TextColors as TextColors
import Lamdu.I18N.Language (Language)
import Lamdu.Style (Style(..))
import Lamdu.Prelude
helpStyle :: Font -> Theme.Help -> EventMapHelp.Style
helpStyle font theme =
EventMapHelp.Style
{ EventMapHelp._styleText =
TextView.Style
{ TextView._styleColor = theme ^. Theme.helpTextColor
, TextView._styleFont = font
, TextView._styleUnderline = Nothing
}
, EventMapHelp._styleInputDocColor = theme ^. Theme.helpInputDocColor
, EventMapHelp._styleBGColor = theme ^. Theme.helpBGColor
, EventMapHelp._styleTint = theme ^. Theme.helpTint
, EventMapHelp._styleSrcLocColor = theme ^. Theme.helpSrcLocColor
}
make :: Fonts Font -> Theme -> Style
make fonts theme =
Style
{ _base = textEdit TextColors.baseColor Fonts.base
, _autoNameOrigin = textEdit TextColors.baseColor Fonts.autoName
, _nameAtBinder = textEdit TextColors.baseColor Fonts.binders
, _bytes = textEdit TextColors.literalColor Fonts.literalBytes
, _text = textEdit TextColors.literalColor Fonts.literalText
, _num = textEdit TextColors.literalColor Fonts.base
}
where
textEdit color font =
TextEdit.Style
{ TextEdit._sCursorColor = theme ^. Theme.textEditCursorColor
, TextEdit._sCursorWidth = theme ^. Theme.textEditCursorWidth
, TextEdit._sEmptyStringsColors =
theme ^. Theme.textColors . TextColors.emptyEditColors
, TextEdit._sTextViewStyle =
TextView.Style
{ TextView._styleColor = theme ^. Theme.textColors . color
, TextView._styleFont = fonts ^. font
, TextView._styleUnderline = Nothing
}
}
data HelpEnv = HelpEnv
{ _heConfig :: !EventMapHelp.Config
, _heStyle :: !EventMapHelp.Style
, _heAnimIdPrefix :: !AnimId
, _heDirLayout :: !Dir.Layout
, _heDirTexts :: !(Dir.Texts Text)
, _heGlueTexts :: !(Glue.Texts Text)
, _heCommonTexts :: !(MomentuTexts.Texts Text)
, _heEventMapTexts :: !(E.Texts Text)
}
Lens.makeLenses ''HelpEnv
instance Element.HasAnimIdPrefix HelpEnv where animIdPrefix = heAnimIdPrefix
instance Has TextView.Style HelpEnv where has = heStyle . has
instance Has Dir.Layout HelpEnv where has = heDirLayout
instance Has (Dir.Texts Text) HelpEnv where has = heDirTexts
instance Has (Glue.Texts Text) HelpEnv where has = heGlueTexts
instance Has (E.Texts Text) HelpEnv where has = heEventMapTexts
instance Has EventMapHelp.Config HelpEnv where has = heConfig
instance Has EventMapHelp.Style HelpEnv where has = heStyle
instance Has (MomentuTexts.Texts Text) HelpEnv where has = heCommonTexts
addHelp ::
Config ModKey -> Theme -> Language -> Font ->
Widget.Size -> Widget f -> Widget f
addHelp config theme language font size widget =
widget
& Widget.wState . Widget._StateFocused . Lens.mapped %~
(EventMapHelp.addHelpView size ?? env)
where
env =
HelpEnv
{ _heConfig =
EventMapHelp.Config
{ EventMapHelp._configOverlayDocKeys = config ^. Config.helpKeys
}
, _heAnimIdPrefix = ["help box"]
, _heDirLayout = language ^. has
, _heDirTexts = language ^. has
, _heEventMapTexts = language ^. has
, _heGlueTexts = language ^. has
, _heStyle = helpStyle font (theme ^. Theme.help)
, _heCommonTexts = language ^. has
}
mainLoopConfig ::
MkProperty IO o EventMapHelp.IsHelpShown ->
(Zoom -> IO (Fonts Font)) -> IO (Config ModKey, Theme, Language) -> MainConfig.Config
mainLoopConfig helpProp getFonts getConfig =
MainConfig.Config
{ _cAnim =
getConfig <&> (^. _2)
<&> \theme ->
Anim.Config
{ _acTimePeriod = theme ^. Theme.animationTimePeriodSec & realToFrac
, _acRemainingRatioInPeriod = theme ^. Theme.animationRemainInPeriod
, _acSpiral = Anim.SpiralAnimConf 0 0
}
, _cCursor =
\zoom ->
(,) <$> getFonts zoom <*> (getConfig <&> (^. _2))
<&> \(fonts, theme) ->
Cursor.Config
{ cursorColor = theme ^. Theme.cursorColor
, Cursor.decay = Just Cursor.Decay
{ Cursor.heightUnit = fonts ^. Fonts.base & Font.height
, Cursor.heightExponent = theme ^. Theme.cursorDecayExponent
}
}
, _cZoom = getConfig <&> (^. _1 . Config.zoom)
, _cPostProcess =
\zoom size widget ->
Property.getP helpProp
>>= \case
HelpNotShown -> pure widget
HelpShown ->
do
helpFont <- getFonts zoom <&> (^. Fonts.help)
(config, theme, language) <- getConfig
addHelp config theme language helpFont size widget & pure
, _cInvalidCursorOverlayColor =
getConfig <&> (^. _2)
<&> \theme ->
theme ^. Theme.invalidCursorOverlayColor
}
| Peaker/lamdu | src/Lamdu/Style/Make.hs | gpl-3.0 | 6,687 | 0 | 17 | 1,720 | 1,600 | 928 | 672 | -1 | -1 |
module Wordy2 where
import Text.ParserCombinators.Parsec
-- In reality you'd use an Either here so you could cpature the error
answer :: String -> Maybe Integer
answer eq =
case parse parseLine "FAIL" eq of
Right a -> Just a
Left e -> Nothing -- error (show e)
-- Take a line of input, and parse into a what is token, at least 1 number and 0 or more
-- operation/number pairs to perform. Then perform the ops and return the result.
-- the important trick is that we simply compose any operations together, along with
-- the id for the first number
parseLine :: Parser Integer
parseLine = do
spaces
_ <- pWhatIs
n0 <- pNum
opNs <- many pOpN
return $ compose opNs n0
where
compose = foldr (flip (.)) id
-- An operation followed by a number (an expression may have 0 or more occurences)
-- returns a function for the represented operation which can be composed in parseLine
pOpN :: Parser (Integer -> Integer)
pOpN = do
spaces
op <- try (string "multiplied by")
<|> try (string "times")
<|> try (string "divided by")
<|> try (string "plus")
<|> string "minus"
spaces
n <- pNum
case op of
"multiplied by" -> return (*n)
"times" -> return (*n)
"divided by" -> return (`div`n)
"plus" -> return (+n)
"minus" -> return (\x -> x-n)
-- Just a number -- there should always be at least 1 number
pNum :: Parser Integer
pNum = do
sign <- option ' ' (char '-')
n <- many1 digit
return (read $ sign : n)
-- The string "What is" or "what is" if you don't capitalize.
pWhatIs :: Parser ()
pWhatIs = do
try (string "What is") <|> string "what is"
spaces
return ()
| ciderpunx/exercismo | src/Wordy2.hs | gpl-3.0 | 1,714 | 0 | 14 | 459 | 424 | 210 | 214 | 41 | 5 |
{-# LANGUAGE CPP, DeriveDataTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Module : Hie.Language.Haskell.Exts.Syntax
-- Copyright : (c) Niklas Broberg 2004-2009,
-- (c) The GHC Team, 1997-2000
-- License : BSD-style (see the file LICENSE.txt)
--
-- Maintainer : Niklas Broberg, [email protected]
-- Stability : stable
-- Portability : portable
--
-- A suite of datatypes describing the abstract syntax of Haskell 98
-- <http://www.haskell.org/onlinereport/> plus registered extensions, including:
--
-- * multi-parameter type classes with functional dependencies (MultiParamTypeClasses, FunctionalDependencies)
--
-- * parameters of type class assertions are unrestricted (FlexibleContexts)
--
-- * 'forall' types as universal and existential quantification (RankNTypes, ExistentialQuantification, etc)
--
-- * pattern guards (PatternGuards)
--
-- * implicit parameters (ImplicitParameters)
--
-- * generalised algebraic data types (GADTs)
--
-- * template haskell (TemplateHaskell)
--
-- * empty data type declarations (EmptyDataDecls)
--
-- * unboxed tuples (UnboxedTuples)
--
-- * regular patterns (RegularPatterns)
--
-- * HSP-style XML expressions and patterns (XmlSyntax)
--
-----------------------------------------------------------------------------
module Hie.Language.Haskell.Exts.Syntax (
-- * Modules
Module(..), WarningText(..), ExportSpec(..),
ImportDecl(..), ImportSpec(..), Assoc(..),
-- * Declarations
Decl(..), Binds(..), IPBind(..),
-- ** Type classes and instances
ClassDecl(..), InstDecl(..), Deriving,
-- ** Data type declarations
DataOrNew(..), ConDecl(..), QualConDecl(..), GadtDecl(..), BangType(..),
-- ** Function bindings
Match(..), Rhs(..), GuardedRhs(..),
-- * Class Assertions and Contexts
Context, FunDep(..), Asst(..),
-- * Types
Type(..), Boxed(..), Kind(..), TyVarBind(..),
-- * Expressions
Exp(..), Stmt(..), QualStmt(..), FieldUpdate(..),
Alt(..), GuardedAlts(..), GuardedAlt(..), XAttr(..),
-- * Patterns
Pat(..), PatField(..), PXAttr(..), RPat(..), RPatOp(..),
-- * Literals
Literal(..),
-- * Variables, Constructors and Operators
ModuleName(..), QName(..), Name(..), QOp(..), Op(..),
SpecialCon(..), CName(..), IPName(..), XName(..),
-- * Template Haskell
Bracket(..), Splice(..),
-- * FFI
Safety(..), CallConv(..),
-- * Pragmas
ModulePragma(..), Tool(..),
Rule(..), RuleVar(..), Activation(..),
Annotation(..),
-- * Builtin names
-- ** Modules
prelude_mod, main_mod,
-- ** Main function of a program
main_name,
-- ** Constructors
unit_con_name, tuple_con_name, list_cons_name, unboxed_singleton_con_name,
unit_con, tuple_con, unboxed_singleton_con,
-- ** Special identifiers
as_name, qualified_name, hiding_name, minus_name, bang_name, dot_name, star_name,
export_name, safe_name, unsafe_name, threadsafe_name, stdcall_name, ccall_name,
-- ** Type constructors
unit_tycon_name, fun_tycon_name, list_tycon_name, tuple_tycon_name, unboxed_singleton_tycon_name,
unit_tycon, fun_tycon, list_tycon, tuple_tycon, unboxed_singleton_tycon,
-- * Source coordinates
SrcLoc(..),
) where
#ifdef __GLASGOW_HASKELL__
#ifdef BASE4
import Data.Data
#else
import Data.Generics (Data(..),Typeable(..))
#endif
#endif
import Hie.Language.Haskell.Exts.SrcLoc (SrcLoc(..))
import Hie.Language.Haskell.Exts.Annotated.Syntax (Boxed(..), Tool(..))
-- | The name of a Haskell module.
newtype ModuleName = ModuleName String
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | Constructors with special syntax.
-- These names are never qualified, and always refer to builtin type or
-- data constructors.
data SpecialCon
= UnitCon -- ^ unit type and data constructor @()@
| ListCon -- ^ list type constructor @[]@
| FunCon -- ^ function type constructor @->@
| TupleCon Boxed Int -- ^ /n/-ary tuple type and data
-- constructors @(,)@ etc, possibly boxed @(\#,\#)@
| Cons -- ^ list data constructor @(:)@
| UnboxedSingleCon -- ^ unboxed singleton tuple constructor @(\# \#)@
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | This type is used to represent qualified variables, and also
-- qualified constructors.
data QName
= Qual ModuleName Name -- ^ name qualified with a module name
| UnQual Name -- ^ unqualified local name
| Special SpecialCon -- ^ built-in constructor with special syntax
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | This type is used to represent variables, and also constructors.
data Name
= Ident String -- ^ /varid/ or /conid/.
| Symbol String -- ^ /varsym/ or /consym/
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | An implicit parameter name.
data IPName
= IPDup String -- ^ ?/ident/, non-linear implicit parameter
| IPLin String -- ^ %/ident/, linear implicit parameter
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | Possibly qualified infix operators (/qop/), appearing in expressions.
data QOp
= QVarOp QName -- ^ variable operator (/qvarop/)
| QConOp QName -- ^ constructor operator (/qconop/)
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | Operators appearing in @infix@ declarations are never qualified.
data Op
= VarOp Name -- ^ variable operator (/varop/)
| ConOp Name -- ^ constructor operator (/conop/)
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A name (/cname/) of a component of a class or data type in an @import@
-- or export specification.
data CName
= VarName Name -- ^ name of a method or field
| ConName Name -- ^ name of a data constructor
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A complete Haskell source module.
data Module = Module SrcLoc ModuleName [ModulePragma] (Maybe WarningText)
(Maybe [ExportSpec]) [ImportDecl] [Decl]
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | An item in a module's export specification.
data ExportSpec
= EVar QName -- ^ variable
| EAbs QName -- ^ @T@:
-- a class or datatype exported abstractly,
-- or a type synonym.
| EThingAll QName -- ^ @T(..)@:
-- a class exported with all of its methods, or
-- a datatype exported with all of its constructors.
| EThingWith QName [CName] -- ^ @T(C_1,...,C_n)@:
-- a class exported with some of its methods, or
-- a datatype exported with some of its constructors.
| EModuleContents ModuleName -- ^ @module M@:
-- re-export a module.
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | An import declaration.
data ImportDecl = ImportDecl
{ importLoc :: SrcLoc -- ^ position of the @import@ keyword.
, importModule :: ModuleName -- ^ name of the module imported.
, importQualified :: Bool -- ^ imported @qualified@?
, importSrc :: Bool -- ^ imported with @{-\# SOURCE \#-}@?
, importPkg :: Maybe String -- ^ imported with explicit package name
, importAs :: Maybe ModuleName -- ^ optional alias name in an @as@ clause.
, importSpecs :: Maybe (Bool,[ImportSpec])
-- ^ optional list of import specifications.
-- The 'Bool' is 'True' if the names are excluded
-- by @hiding@.
}
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | An import specification, representing a single explicit item imported
-- (or hidden) from a module.
data ImportSpec
= IVar Name -- ^ variable
| IAbs Name -- ^ @T@:
-- the name of a class, datatype or type synonym.
| IThingAll Name -- ^ @T(..)@:
-- a class imported with all of its methods, or
-- a datatype imported with all of its constructors.
| IThingWith Name [CName] -- ^ @T(C_1,...,C_n)@:
-- a class imported with some of its methods, or
-- a datatype imported with some of its constructors.
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | Associativity of an operator.
data Assoc
= AssocNone -- ^ non-associative operator (declared with @infix@)
| AssocLeft -- ^ left-associative operator (declared with @infixl@).
| AssocRight -- ^ right-associative operator (declared with @infixr@)
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A single derived instance, which may have arguments since it may be a MPTC.
type Deriving = (QName, [Type])
-- | A top-level declaration.
data Decl
= TypeDecl SrcLoc Name [TyVarBind] Type
-- ^ A type declaration
| TypeFamDecl SrcLoc Name [TyVarBind] (Maybe Kind)
-- ^ A type family declaration
| DataDecl SrcLoc DataOrNew Context Name [TyVarBind] [QualConDecl] [Deriving]
-- ^ A data OR newtype declaration
| GDataDecl SrcLoc DataOrNew Context Name [TyVarBind] (Maybe Kind) [GadtDecl] [Deriving]
-- ^ A data OR newtype declaration, GADT style
| DataFamDecl SrcLoc {-data-} Context Name [TyVarBind] (Maybe Kind)
-- ^ A data family declaration
| TypeInsDecl SrcLoc Type Type
-- ^ A type family instance declaration
| DataInsDecl SrcLoc DataOrNew Type [QualConDecl] [Deriving]
-- ^ A data family instance declaration
| GDataInsDecl SrcLoc DataOrNew Type (Maybe Kind) [GadtDecl] [Deriving]
-- ^ A data family instance declaration, GADT style
| ClassDecl SrcLoc Context Name [TyVarBind] [FunDep] [ClassDecl]
-- ^ A declaration of a type class
| InstDecl SrcLoc Context QName [Type] [InstDecl]
-- ^ An declaration of a type class instance
| DerivDecl SrcLoc Context QName [Type]
-- ^ A standalone deriving declaration
| InfixDecl SrcLoc Assoc Int [Op]
-- ^ A declaration of operator fixity
| DefaultDecl SrcLoc [Type]
-- ^ A declaration of default types
| SpliceDecl SrcLoc Exp
-- ^ A Template Haskell splicing declaration
| TypeSig SrcLoc [Name] Type
-- ^ A type signature declaration
| FunBind [Match]
-- ^ A set of function binding clauses
| PatBind SrcLoc Pat (Maybe Type) Rhs {-where-} Binds
-- ^ A pattern binding
| ForImp SrcLoc CallConv Safety String Name Type
-- ^ A foreign import declaration
| ForExp SrcLoc CallConv String Name Type
-- ^ A foreign export declaration
| RulePragmaDecl SrcLoc [Rule]
-- ^ A RULES pragma
| DeprPragmaDecl SrcLoc [([Name], String)]
-- ^ A DEPRECATED pragma
| WarnPragmaDecl SrcLoc [([Name], String)]
-- ^ A WARNING pragma
| InlineSig SrcLoc Bool Activation QName
-- ^ An INLINE pragma
| InlineConlikeSig SrcLoc Activation QName
-- ^ An INLINE CONLIKE pragma
| SpecSig SrcLoc QName [Type]
-- ^ A SPECIALISE pragma
| SpecInlineSig SrcLoc Bool Activation QName [Type]
-- ^ A SPECIALISE INLINE pragma
| InstSig SrcLoc Context QName [Type]
-- ^ A SPECIALISE instance pragma
| AnnPragma SrcLoc Annotation
-- ^ An ANN pragma
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | An annotation through an ANN pragma.
data Annotation
= Ann Name Exp
-- ^ An annotation for a declared name.
| TypeAnn Name Exp
-- ^ An annotation for a declared type.
| ModuleAnn Exp
-- ^ An annotation for the defining module.
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A flag stating whether a declaration is a data or newtype declaration.
data DataOrNew = DataType | NewType
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A binding group inside a @let@ or @where@ clause.
data Binds
= BDecls [Decl] -- ^ An ordinary binding group
| IPBinds [IPBind] -- ^ A binding group for implicit parameters
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A binding of an implicit parameter.
data IPBind = IPBind SrcLoc IPName Exp
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | Clauses of a function binding.
data Match
= Match SrcLoc Name [Pat] (Maybe Type) Rhs {-where-} Binds
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A single constructor declaration within a data type declaration,
-- which may have an existential quantification binding.
data QualConDecl
= QualConDecl SrcLoc
{-forall-} [TyVarBind] {- . -} Context
{- => -} ConDecl
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | Declaration of an ordinary data constructor.
data ConDecl
= ConDecl Name [BangType]
-- ^ ordinary data constructor
| InfixConDecl BangType Name BangType
-- ^ infix data constructor
| RecDecl Name [([Name],BangType)]
-- ^ record constructor
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A single constructor declaration in a GADT data type declaration.
data GadtDecl
= GadtDecl SrcLoc Name Type
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | Declarations inside a class declaration.
data ClassDecl
= ClsDecl Decl
-- ^ ordinary declaration
| ClsDataFam SrcLoc Context Name [TyVarBind] (Maybe Kind)
-- ^ declaration of an associated data type
| ClsTyFam SrcLoc Name [TyVarBind] (Maybe Kind)
-- ^ declaration of an associated type synonym
| ClsTyDef SrcLoc Type Type
-- ^ default choice for an associated type synonym
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | Declarations inside an instance declaration.
data InstDecl
= InsDecl Decl
-- ^ ordinary declaration
| InsType SrcLoc Type Type
-- ^ an associated type definition
| InsData SrcLoc DataOrNew Type [QualConDecl] [Deriving]
-- ^ an associated data type implementation
| InsGData SrcLoc DataOrNew Type (Maybe Kind) [GadtDecl] [Deriving]
-- ^ an associated data type implemented using GADT style
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | The type of a constructor argument or field, optionally including
-- a strictness annotation.
data BangType
= BangedTy Type -- ^ strict component, marked with \"@!@\"
| UnBangedTy Type -- ^ non-strict component
| UnpackedTy Type -- ^ unboxed component, marked with an UNPACK pragma
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | The right hand side of a function or pattern binding.
data Rhs
= UnGuardedRhs Exp -- ^ unguarded right hand side (/exp/)
| GuardedRhss [GuardedRhs]
-- ^ guarded right hand side (/gdrhs/)
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A guarded right hand side @|@ /stmts/ @=@ /exp/.
-- The guard is a series of statements when using pattern guards,
-- otherwise it will be a single qualifier expression.
data GuardedRhs
= GuardedRhs SrcLoc [Stmt] Exp
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A type qualified with a context.
-- An unqualified type has an empty context.
data Type
= TyForall
(Maybe [TyVarBind])
Context
Type -- ^ qualified type
| TyFun Type Type -- ^ function type
| TyTuple Boxed [Type] -- ^ tuple type, possibly boxed
| TyList Type -- ^ list syntax, e.g. [a], as opposed to [] a
| TyApp Type Type -- ^ application of a type constructor
| TyVar Name -- ^ type variable
| TyCon QName -- ^ named type or type constructor
| TyParen Type -- ^ type surrounded by parentheses
| TyInfix Type QName Type -- ^ infix type constructor
| TyKind Type Kind -- ^ type with explicit kind signature
-- CHRIS
| TySplice Splice
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A type variable declaration, optionally with an explicit kind annotation.
data TyVarBind
= KindedVar Name Kind -- ^ variable binding with kind annotation
| UnkindedVar Name -- ^ ordinary variable binding
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | An explicit kind annotation.
data Kind
= KindStar -- ^ @*@, the kind of types
| KindBang -- ^ @!@, the kind of unboxed types
| KindFn Kind Kind -- ^ @->@, the kind of a type constructor
| KindParen Kind -- ^ a kind surrounded by parentheses
| KindVar Name -- ^ a kind variable (as of yet unsupported by compilers)
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A functional dependency, given on the form
-- l1 l2 ... ln -> r2 r3 .. rn
data FunDep
= FunDep [Name] [Name]
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A context is a set of assertions
type Context = [Asst]
-- | Class assertions.
-- In Haskell 98, the argument would be a /tyvar/, but this definition
-- allows multiple parameters, and allows them to be /type/s.
-- Also extended with support for implicit parameters and equality constraints.
data Asst = ClassA QName [Type] -- ^ ordinary class assertion
| InfixA Type QName Type -- ^ class assertion where the class name is given infix
| IParam IPName Type -- ^ implicit parameter assertion
| EqualP Type Type -- ^ type equality constraint
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | /literal/
-- Values of this type hold the abstract value of the literal, not the
-- precise string representation used. For example, @10@, @0o12@ and @0xa@
-- have the same representation.
data Literal
= Char Char -- ^ character literal
| String String -- ^ string literal
| Int Integer -- ^ integer literal
| Frac Rational -- ^ floating point literal
| PrimInt Integer -- ^ unboxed integer literal
| PrimWord Integer -- ^ unboxed word literal
| PrimFloat Rational -- ^ unboxed float literal
| PrimDouble Rational -- ^ unboxed double literal
| PrimChar Char -- ^ unboxed character literal
| PrimString String -- ^ unboxed string literal
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | Haskell expressions.
data Exp
= Var QName -- ^ variable
| IPVar IPName -- ^ implicit parameter variable
| Con QName -- ^ data constructor
| Lit Literal -- ^ literal constant
| InfixApp Exp QOp Exp -- ^ infix application
| App Exp Exp -- ^ ordinary application
| NegApp Exp -- ^ negation expression @-/exp/@ (unary minus)
| Lambda SrcLoc [Pat] Exp -- ^ lambda expression
| Let Binds Exp -- ^ local declarations with @let@ ... @in@ ...
| If Exp Exp Exp -- ^ @if@ /exp/ @then@ /exp/ @else@ /exp/
| Case Exp [Alt] -- ^ @case@ /exp/ @of@ /alts/
| Do [Stmt] -- ^ @do@-expression:
-- the last statement in the list
-- should be an expression.
| MDo [Stmt] -- ^ @mdo@-expression
| Tuple Boxed [Exp] -- ^ tuple expression
| TupleSection Boxed [Maybe Exp] -- ^ tuple section expression, e.g. @(,,3)@
| List [Exp] -- ^ list expression
| Paren Exp -- ^ parenthesised expression
| LeftSection Exp QOp -- ^ left section @(@/exp/ /qop/@)@
| RightSection QOp Exp -- ^ right section @(@/qop/ /exp/@)@
| RecConstr QName [FieldUpdate]
-- ^ record construction expression
| RecUpdate Exp [FieldUpdate]
-- ^ record update expression
| EnumFrom Exp -- ^ unbounded arithmetic sequence,
-- incrementing by 1: @[from ..]@
| EnumFromTo Exp Exp -- ^ bounded arithmetic sequence,
-- incrementing by 1 @[from .. to]@
| EnumFromThen Exp Exp -- ^ unbounded arithmetic sequence,
-- with first two elements given @[from, then ..]@
| EnumFromThenTo Exp Exp Exp
-- ^ bounded arithmetic sequence,
-- with first two elements given @[from, then .. to]@
| ListComp Exp [QualStmt] -- ^ ordinary list comprehension
| ParComp Exp [[QualStmt]] -- ^ parallel list comprehension
| ExpTypeSig SrcLoc Exp Type -- ^ expression with explicit type signature
| VarQuote QName -- ^ @'x@ for template haskell reifying of expressions
| TypQuote QName -- ^ @''T@ for template haskell reifying of types
| BracketExp Bracket -- ^ template haskell bracket expression
| SpliceExp Splice -- ^ template haskell splice expression
| QuasiQuote String String -- ^ quasi-quotaion: @[$/name/| /string/ |]@
-- Hsx
| XTag SrcLoc XName [XAttr] (Maybe Exp) [Exp]
-- ^ xml element, with attributes and children
| XETag SrcLoc XName [XAttr] (Maybe Exp)
-- ^ empty xml element, with attributes
| XPcdata String -- ^ PCDATA child element
| XExpTag Exp -- ^ escaped haskell expression inside xml
| XChildTag SrcLoc [Exp] -- ^ children of an xml element
-- Pragmas
| CorePragma String Exp -- ^ CORE pragma
| SCCPragma String Exp -- ^ SCC pragma
| GenPragma String (Int, Int) (Int, Int) Exp
-- ^ GENERATED pragma
-- Arrows
| Proc SrcLoc Pat Exp -- ^ arrows proc: @proc@ /pat/ @->@ /exp/
| LeftArrApp Exp Exp -- ^ arrow application (from left): /exp/ @-<@ /exp/
| RightArrApp Exp Exp -- ^ arrow application (from right): /exp/ @>-@ /exp/
| LeftArrHighApp Exp Exp -- ^ higher-order arrow application (from left): /exp/ @-<<@ /exp/
| RightArrHighApp Exp Exp -- ^ higher-order arrow application (from right): /exp/ @>>-@ /exp/
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | The name of an xml element or attribute,
-- possibly qualified with a namespace.
data XName
= XName String -- <name ...
| XDomName String String -- <dom:name ...
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | An xml attribute, which is a name-expression pair.
data XAttr = XAttr XName Exp
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A template haskell bracket expression.
data Bracket
= ExpBracket Exp -- ^ expression bracket: @[| ... |]@
| PatBracket Pat -- ^ pattern bracket: @[p| ... |]@
| TypeBracket Type -- ^ type bracket: @[t| ... |]@
| DeclBracket [Decl] -- ^ declaration bracket: @[d| ... |]@
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A template haskell splice expression
data Splice
= IdSplice String -- ^ variable splice: @$var@
| ParenSplice Exp -- ^ parenthesised expression splice: @$(/exp/)@
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | The safety of a foreign function call.
data Safety
= PlayRisky -- ^ unsafe
| PlaySafe Bool -- ^ safe ('False') or threadsafe ('True')
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | The calling convention of a foreign function call.
data CallConv
= StdCall
| CCall
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A top level options pragma, preceding the module header.
data ModulePragma
= LanguagePragma SrcLoc [Name] -- ^ LANGUAGE pragma
| OptionsPragma SrcLoc (Maybe Tool) String
-- ^ OPTIONS pragma, possibly qualified with a tool, e.g. OPTIONS_GHC
| AnnModulePragma SrcLoc Annotation
-- ^ ANN pragma with module scope
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | Activation clause of a RULES pragma.
data Activation
= AlwaysActive
| ActiveFrom Int
| ActiveUntil Int
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | The body of a RULES pragma.
data Rule
= Rule String Activation (Maybe [RuleVar]) Exp Exp
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | Variables used in a RULES pragma, optionally annotated with types
data RuleVar
= RuleVar Name
| TypedRuleVar Name Type
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | Warning text to optionally use in the module header of e.g.
-- a deprecated module.
data WarningText
= DeprText String
| WarnText String
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A pattern, to be matched against a value.
data Pat
= PVar Name -- ^ variable
| PLit Literal -- ^ literal constant
| PNeg Pat -- ^ negated pattern
| PNPlusK Name Integer -- ^ n+k pattern
| PInfixApp Pat QName Pat -- ^ pattern with an infix data constructor
| PApp QName [Pat] -- ^ data constructor and argument patterns
| PTuple Boxed [Pat] -- ^ tuple pattern
| PList [Pat] -- ^ list pattern
| PParen Pat -- ^ parenthesized pattern
| PRec QName [PatField] -- ^ labelled pattern, record style
| PAsPat Name Pat -- ^ @\@@-pattern
| PWildCard -- ^ wildcard pattern: @_@
| PIrrPat Pat -- ^ irrefutable pattern: @~/pat/@
| PatTypeSig SrcLoc Pat Type -- ^ pattern with type signature
| PViewPat Exp Pat -- ^ view patterns of the form @(/exp/ -> /pat/)@
| PRPat [RPat] -- ^ regular list pattern
| PXTag SrcLoc XName [PXAttr] (Maybe Pat) [Pat]
-- ^ XML element pattern
| PXETag SrcLoc XName [PXAttr] (Maybe Pat)
-- ^ XML singleton element pattern
| PXPcdata String -- ^ XML PCDATA pattern
| PXPatTag Pat -- ^ XML embedded pattern
| PXRPats [RPat] -- ^ XML regular list pattern
| PExplTypeArg QName Type -- ^ Explicit generics style type argument e.g. @f {| Int |} x = ...@
| PQuasiQuote String String -- ^ quasi quote patter: @[$/name/| /string/ |]@
| PBangPat Pat -- ^ strict (bang) pattern: @f !x = ...@
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | An XML attribute in a pattern.
data PXAttr = PXAttr XName Pat
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A regular pattern operator.
data RPatOp
= RPStar -- ^ @*@ = 0 or more
| RPStarG -- ^ @*!@ = 0 or more, greedy
| RPPlus -- ^ @+@ = 1 or more
| RPPlusG -- ^ @+!@ = 1 or more, greedy
| RPOpt -- ^ @?@ = 0 or 1
| RPOptG -- ^ @?!@ = 0 or 1, greedy
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | An entity in a regular pattern.
data RPat
= RPOp RPat RPatOp -- ^ operator pattern, e.g. pat*
| RPEither RPat RPat -- ^ choice pattern, e.g. (1 | 2)
| RPSeq [RPat] -- ^ sequence pattern, e.g. (| 1, 2, 3 |)
| RPGuard Pat [Stmt] -- ^ guarded pattern, e.g. (| p | p < 3 |)
| RPCAs Name RPat -- ^ non-linear variable binding, e.g. (foo\@:(1 | 2))*
| RPAs Name RPat -- ^ linear variable binding, e.g. foo\@(1 | 2)
| RPParen RPat -- ^ parenthesised pattern, e.g. (2*)
| RPPat Pat -- ^ an ordinary pattern
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | An /fpat/ in a labeled record pattern.
data PatField
= PFieldPat QName Pat -- ^ ordinary label-pattern pair
| PFieldPun Name -- ^ record field pun
| PFieldWildcard -- ^ record field wildcard
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A statement, representing both a /stmt/ in a @do@-expression,
-- an ordinary /qual/ in a list comprehension, as well as a /stmt/
-- in a pattern guard.
data Stmt
= Generator SrcLoc Pat Exp
-- ^ a generator: /pat/ @<-@ /exp/
| Qualifier Exp -- ^ an /exp/ by itself: in a @do@-expression,
-- an action whose result is discarded;
-- in a list comprehension and pattern guard,
-- a guard expression
| LetStmt Binds -- ^ local bindings
| RecStmt [Stmt] -- ^ a recursive binding group for arrows
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A general /transqual/ in a list comprehension,
-- which could potentially be a transform of the kind
-- enabled by TransformListComp.
data QualStmt
= QualStmt Stmt -- ^ an ordinary statement
| ThenTrans Exp -- ^ @then@ /exp/
| ThenBy Exp Exp -- ^ @then@ /exp/ @by@ /exp/
| GroupBy Exp -- ^ @then@ @group@ @by@ /exp/
| GroupUsing Exp -- ^ @then@ @group@ @using@ /exp/
| GroupByUsing Exp Exp -- ^ @then@ @group@ @by@ /exp/ @using@ /exp/
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | An /fbind/ in a labeled construction or update expression.
data FieldUpdate
= FieldUpdate QName Exp -- ^ ordinary label-expresion pair
| FieldPun Name -- ^ record field pun
| FieldWildcard -- ^ record field wildcard
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | An /alt/ alternative in a @case@ expression.
data Alt
= Alt SrcLoc Pat GuardedAlts Binds
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | The right-hand sides of a @case@ alternative,
-- which may be a single right-hand side or a
-- set of guarded ones.
data GuardedAlts
= UnGuardedAlt Exp -- ^ @->@ /exp/
| GuardedAlts [GuardedAlt] -- ^ /gdpat/
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-- | A guarded case alternative @|@ /stmts/ @->@ /exp/.
data GuardedAlt
= GuardedAlt SrcLoc [Stmt] Exp
#ifdef __GLASGOW_HASKELL__
deriving (Eq,Ord,Show,Typeable,Data)
#else
deriving (Eq,Ord,Show)
#endif
-----------------------------------------------------------------------------
-- Builtin names.
prelude_mod, main_mod :: ModuleName
prelude_mod = ModuleName "Prelude"
main_mod = ModuleName "Main"
main_name :: Name
main_name = Ident "main"
unit_con_name :: QName
unit_con_name = Special UnitCon
tuple_con_name :: Boxed -> Int -> QName
tuple_con_name b i = Special (TupleCon b (i+1))
list_cons_name :: QName
list_cons_name = Special Cons
unboxed_singleton_con_name :: QName
unboxed_singleton_con_name = Special UnboxedSingleCon
unit_con :: Exp
unit_con = Con unit_con_name
tuple_con :: Boxed -> Int -> Exp
tuple_con b i = Con (tuple_con_name b i)
unboxed_singleton_con :: Exp
unboxed_singleton_con = Con (unboxed_singleton_con_name)
as_name, qualified_name, hiding_name, minus_name, bang_name, dot_name, star_name :: Name
as_name = Ident "as"
qualified_name = Ident "qualified"
hiding_name = Ident "hiding"
minus_name = Symbol "-"
bang_name = Symbol "!"
dot_name = Symbol "."
star_name = Symbol "*"
export_name, safe_name, unsafe_name, threadsafe_name, stdcall_name, ccall_name :: Name
export_name = Ident "export"
safe_name = Ident "safe"
unsafe_name = Ident "unsafe"
threadsafe_name = Ident "threadsafe"
stdcall_name = Ident "stdcall"
ccall_name = Ident "ccall"
unit_tycon_name, fun_tycon_name, list_tycon_name, unboxed_singleton_tycon_name :: QName
unit_tycon_name = unit_con_name
fun_tycon_name = Special FunCon
list_tycon_name = Special ListCon
unboxed_singleton_tycon_name = Special UnboxedSingleCon
tuple_tycon_name :: Boxed -> Int -> QName
tuple_tycon_name b i = tuple_con_name b i
unit_tycon, fun_tycon, list_tycon, unboxed_singleton_tycon :: Type
unit_tycon = TyCon unit_tycon_name
fun_tycon = TyCon fun_tycon_name
list_tycon = TyCon list_tycon_name
unboxed_singleton_tycon = TyCon unboxed_singleton_tycon_name
tuple_tycon :: Boxed -> Int -> Type
tuple_tycon b i = TyCon (tuple_tycon_name b i)
| monsanto/hie | Hie/Language/Haskell/Exts/Syntax.hs | gpl-3.0 | 35,625 | 0 | 11 | 9,442 | 5,317 | 3,309 | 2,008 | 450 | 1 |
{-|
Module : Photostrip
Description : Photostrip Module
Copyright : (c) Chris Tetreault, 2014
License : GPL-3
Stability : experimental
The Photostrip module exposes functions relating to transforming images into
a photostrip.
When the camera module begins capturing images, these images will be passed to
the photostrip module. At this point, it is the photostrip module's
responsability To transform these images into one single photostrip image.
This file represents the functions that must be implemented in order to
provide a complete implementation.
-}
module DMP.Photobooth.Module.Photostrip where
import qualified Data.ByteString.Lazy as BS
import DMP.Photobooth.Module.Types
import DMP.Photobooth.Monads
{-|
Transform a list of multiple foreground images into a completed photostrip.
All images are loaded into binary blobs
-}
process ::
[BS.ByteString]
-> ModuleT s BS.ByteString
process =
undefined
{-|
Initializes the module with its configuration. Returns a Result
object that will contain the module's initial state
If the implementation needs to perform some imperative-style
"initialization", it should be done here. The return value of this function
will be stored by the core and used for this module's functions.
-}
init ::
ModuleT s ()
init =
undefined
{-|
Finalizes the module. If this module has any sort of resources that need
cleaning up, it should be done here.
-}
finalize ::
ModuleT s ()
finalize =
undefined
{-|
Request the default configuration of this module.
-}
defaultConfig ::
Persistable
defaultConfig =
undefined
{-|
The initial state of the printer module
-}
initialState ::
Maybe s
initialState =
undefined
| christetreault/dmp-photo-booth-prime | DMP/Photobooth/Module/Photostrip.hs | gpl-3.0 | 1,689 | 0 | 7 | 291 | 119 | 73 | 46 | 25 | 1 |
{---------------------------------------------------------------------}
{- Copyright 2015 Nathan Bloomfield -}
{- -}
{- This file is part of Feivel. -}
{- -}
{- Feivel is free software: you can redistribute it and/or modify -}
{- it under the terms of the GNU General Public License version 3, -}
{- as published by the Free Software Foundation. -}
{- -}
{- Feivel is distributed in the hope that it will be useful, but -}
{- WITHOUT ANY WARRANTY; without even the implied warranty of -}
{- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -}
{- GNU General Public License for more details. -}
{- -}
{- You should have received a copy of the GNU General Public License -}
{- along with Feivel. If not, see <http://www.gnu.org/licenses/>. -}
{---------------------------------------------------------------------}
{-# LANGUAGE FlexibleContexts #-}
module Feivel.Eval.Util (
module Feivel.Eval.EvalM,
module Feivel.Eval.Eval,
module Carl,
module Feivel.Grammar,
module Feivel.Error,
module Feivel.Store,
pInteger, pRat,
-- Utilities
eKey, eIfThenElse, eAtIdx, eMacro, evalWith,
-- Constants
suchThat, hasSameTypeAs,
zeroZZ, zeroQQ, zeroBB, zeroMod
) where
import Feivel.Eval.EvalM
import Feivel.Eval.Eval
import Feivel.Store
import Feivel.Error
import Feivel.Grammar
import Carl
import Feivel.Parse (pInteger, pRat)
{--------------}
{- :Utilities -}
{--------------}
-- eval with a specified store
evalWith :: (Eval t) => t -> Store Expr -> EvalM t
evalWith t st = do
old <- getState
putState st
u <- eval t
putState old
return u
eKey :: (Eval a, Get a) => Key -> Locus -> EvalM a
eKey key loc = lookupKey loc key >>= getVal >>= eval
eIfThenElse :: (ToExpr a, Get a, Eval a, Eval b, ToExpr b, HasLocus a, HasLocus b) => b -> a -> a -> EvalM a
eIfThenElse b t f = do
test <- eval b >>= getVal
true <- eval t >>= getVal
false <- eval f >>= getVal
if test then (eval true) else (eval false)
eAtIdx :: (ToExpr a, ToExpr b, ToExpr c, Get (Matrix d), Eval a, Eval b, Eval c, HasLocus a, HasLocus b, HasLocus c)
=> c -> a -> b -> Locus -> EvalM d
eAtIdx m h k loc = do
i <- eval h >>= getVal
j <- eval k >>= getVal
p <- eval m >>= getVal
tryEvalM loc $ mEntryOf (i,j) p
eMacro :: (Get b, Eval b, Eval Expr) => [(Type, Key, Expr)] -> Expr -> Locus -> EvalM b
eMacro vals mac loc = do
old <- getState
ctx <- tryEvalM loc $ toStateT vals
(defaultVals, e) <- evalWith mac (mergeStores [ctx, old]) >>= getVal :: EvalM (Store Expr, Expr)
let newSt = mergeStores [ctx, defaultVals, old]
evalWith e newSt >>= getVal >>= (`evalWith` newSt)
{--------------}
{- :Constants -}
{--------------}
hasSameTypeAs :: a -> a -> ()
hasSameTypeAs _ _ = ()
suchThat :: (Monad m) => a -> m a
suchThat = return
zeroZZ :: Integer
zeroZZ = 0
zeroQQ :: Rat
zeroQQ = 0 :/: 1
zeroBB :: Bool
zeroBB = False
zeroMod :: Integer -> ZZModulo
zeroMod n = 0 `zzmod` n
| nbloomf/feivel | src/Feivel/Eval/Util.hs | gpl-3.0 | 3,327 | 0 | 12 | 985 | 884 | 476 | 408 | 60 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.MapsEngine.Rasters.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Return metadata for a single raster.
--
-- /See:/ <https://developers.google.com/maps-engine/ Google Maps Engine API Reference> for @mapsengine.rasters.get@.
module Network.Google.Resource.MapsEngine.Rasters.Get
(
-- * REST Resource
RastersGetResource
-- * Creating a Request
, rastersGet
, RastersGet
-- * Request Lenses
, rgId
) where
import Network.Google.MapsEngine.Types
import Network.Google.Prelude
-- | A resource alias for @mapsengine.rasters.get@ method which the
-- 'RastersGet' request conforms to.
type RastersGetResource =
"mapsengine" :>
"v1" :>
"rasters" :>
Capture "id" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Raster
-- | Return metadata for a single raster.
--
-- /See:/ 'rastersGet' smart constructor.
newtype RastersGet = RastersGet'
{ _rgId :: Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RastersGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rgId'
rastersGet
:: Text -- ^ 'rgId'
-> RastersGet
rastersGet pRgId_ =
RastersGet'
{ _rgId = pRgId_
}
-- | The ID of the raster.
rgId :: Lens' RastersGet Text
rgId = lens _rgId (\ s a -> s{_rgId = a})
instance GoogleRequest RastersGet where
type Rs RastersGet = Raster
type Scopes RastersGet =
'["https://www.googleapis.com/auth/mapsengine",
"https://www.googleapis.com/auth/mapsengine.readonly"]
requestClient RastersGet'{..}
= go _rgId (Just AltJSON) mapsEngineService
where go
= buildClient (Proxy :: Proxy RastersGetResource)
mempty
| rueshyna/gogol | gogol-maps-engine/gen/Network/Google/Resource/MapsEngine/Rasters/Get.hs | mpl-2.0 | 2,551 | 0 | 12 | 605 | 302 | 186 | 116 | 47 | 1 |
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
-- Module : Gen.Types.Pager
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : This Source Code Form is subject to the terms of
-- the Mozilla xtPublic License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : provisional
-- Portability : non-portable (GHC extensions)
module Gen.Types.Pager where
import Control.Applicative
import Control.Lens
import Control.Monad
import Data.Aeson
import Data.List.NonEmpty (NonEmpty (..))
import qualified Data.List.NonEmpty as NE
import Gen.Types.Id
import Gen.Types.Notation
data Token a = Token
{ _tokenInput :: Notation a
, _tokenOutput :: Notation a
} deriving (Eq, Show, Functor, Foldable)
makeLenses ''Token
instance FromJSON (Token Id) where
parseJSON = withObject "token" $ \o -> Token
<$> o .: "input_token"
<*> o .: "output_token"
data Pager a
= Next (NonEmpty (Notation a)) (Token a)
| Many (Notation a) (NonEmpty (Token a))
deriving (Eq, Show, Functor, Foldable)
instance FromJSON (Pager Id) where
parseJSON = withObject "pager" $ \o -> more o <|> next o
where
next o = Next
<$> oneOrMany o "result_key"
<*> parseJSON (Object o)
more o = do
inp <- oneOrMany o "input_token"
out <- oneOrMany o "output_token"
unless (NE.length inp == NE.length out) $
fail "input_token and output_token contain differing number of keys."
Many <$> o .: "more_results"
<*> pure (NE.zipWith Token inp out)
oneOrMany o k = o .: k <|> ((:|[]) <$> o .: k)
| olorin/amazonka | gen/src/Gen/Types/Pager.hs | mpl-2.0 | 2,060 | 0 | 15 | 603 | 454 | 244 | 210 | 41 | 0 |
{-
Модуль, отвечающий за работу с тематическими тегами и с именами авторов статей.
https://github.com/denisshevchenko/ruhaskell
Все права принадлежат русскоязычному сообществу Haskell-разработчиков, 2015 г.
-}
{-# LANGUAGE OverloadedStrings #-}
module Tags (
buildPostsTags,
buildPostsAuthors,
createPageWithAllTags,
createPageWithAllAuthors,
convertTagsToLinks,
convertAuthorsToLinks
) where
import Data.Monoid (mconcat)
import Network.HTTP (urlEncode)
import Context (postContext)
import Misc (TagsReader, TagsAndAuthors, getNameOfAuthor)
import Control.Monad.Reader
import Hakyll
-- Функция извлекает из всех статей значения поля tags и собирает их в кучу.
-- Функция urlEncode необходима для корректного формирования неанглийских меток.
buildPostsTags :: MonadMetadata m => m Tags
buildPostsTags = buildTags "posts/*" $ fromCapture "tags/*.html" . urlEncode
-- Функция извлекает из всех статей значения поля author и собирает их в кучу.
-- Функция urlEncode необходима для корректного формирования неанглийских имён авторов.
buildPostsAuthors :: MonadMetadata m => m Tags
buildPostsAuthors = buildTagsWith getNameOfAuthor "posts/*" $ fromCapture "authors/*.html" . urlEncode
-- Вспомогательная функция, формирующая страницу с облаком определённых тегов.
createPageWithTagsCloud :: Tags
-> Identifier
-> Double
-> Double
-> String
-> String
-> Identifier
-> Rules ()
createPageWithTagsCloud specificTags
pageWithSpecificTags
smallestFontSizeInPercent
biggestFontSizeInPercent
pageTitle
cloudName
specificTemplate =
create [pageWithSpecificTags] $ do
route idRoute
compile $ do
let renderedCloud = \_ -> renderTagCloud smallestFontSizeInPercent
biggestFontSizeInPercent
specificTags
tagsContext = mconcat [ constField "title" pageTitle
, field cloudName renderedCloud
, defaultContext
]
makeItem "" >>= loadAndApplyTemplate specificTemplate tagsContext
>>= loadAndApplyTemplate "templates/default.html" tagsContext
>>= relativizeUrls
-- Формируем страницу с облаком тематических тегов.
createPageWithAllTags :: TagsReader
createPageWithAllTags = do
tagsAndAuthors <- ask
lift $ createPageWithTagsCloud (fst tagsAndAuthors)
"tags.html"
110
220
"Темы публикаций"
"tagsCloud"
"templates/tags.html"
return ()
-- Формируем страницу с облаком авторов публикаций.
createPageWithAllAuthors :: TagsReader
createPageWithAllAuthors = do
tagsAndAuthors <- ask
lift $ createPageWithTagsCloud (snd tagsAndAuthors)
"authors.html"
110
220
"Наши авторы"
"authorsCloud"
"templates/authors.html"
return ()
convertSpecificTagsToLinks :: TagsAndAuthors
-> Tags
-> String
-> Rules ()
convertSpecificTagsToLinks tagsAndAuthors specificTags aTitle =
tagsRules specificTags $ \tag pattern -> do
let title = aTitle ++ " `" ++ tag ++ "`"
route idRoute
compile $ do
posts <- recentFirst =<< loadAll pattern
let taggedPostsContext = mconcat [ listField "posts" (postContext tagsAndAuthors) (return posts)
, constField "title" title
, defaultContext
]
makeItem "" >>= loadAndApplyTemplate "templates/posts.html" taggedPostsContext
>>= loadAndApplyTemplate "templates/default.html" taggedPostsContext
>>= relativizeUrls
-- Делаем тематические теги ссылками, что позволит отфильтровать статьи по темам.
convertTagsToLinks :: TagsReader
convertTagsToLinks = do
tagsAndAuthors <- ask
lift $ convertSpecificTagsToLinks tagsAndAuthors
(fst tagsAndAuthors)
"Все статьи по теме"
return ()
-- Делаем имена авторов ссылками, что позволит отфильтровать статьи по авторам.
convertAuthorsToLinks :: TagsReader
convertAuthorsToLinks = do
tagsAndAuthors <- ask
lift $ convertSpecificTagsToLinks tagsAndAuthors
(snd tagsAndAuthors)
"Все статьи автора"
return ()
| akamch/ruhaskell-old | src/Tags.hs | unlicense | 6,042 | 0 | 19 | 2,140 | 678 | 338 | 340 | 97 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module Mdb.Serve.Resource.File ( WithFile, fileResource, File(..), Container(..), Stream(..) ) where
import Control.Monad.Catch (MonadMask)
import Control.Monad.IO.Class (MonadIO)
import Control.Monad.Reader (ReaderT)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Except
import Data.Aeson
import Data.JSON.Schema (JSONSchema (..), gSchema)
import Data.Monoid ((<>))
import qualified Data.Text as T
import Database.SQLite.Simple (FromRow (..), field)
import Generics.Generic.Aeson
import GHC.Generics
import Rest
import qualified Rest.Resource as R
import Mdb.Database
import Mdb.Serve.Auth as AUTH
import Mdb.Types
data File = File
{ fileId :: ! FileId
, fileSize :: ! Integer
, fileMime :: ! T.Text
, fileDuration :: Maybe Double
} deriving ( Generic, Show )
instance FromRow File where
fromRow = File <$> field <*> field <*> field <*> field
instance ToJSON File where
toJSON = gtoJson
instance FromJSON File where
parseJSON = gparseJson
instance JSONSchema File where
schema = gSchema
data FileListSelector
= AllFiles
| FilesInAlbum AlbumId
| PersonNoAlbum PersonId
type WithFile m = ReaderT FileId (Authenticated m)
fileResource :: (MonadMask m, MonadIO m) => Resource (Authenticated m) (WithFile m) FileId FileListSelector Void
fileResource = mkResourceReader
{ R.name = "file"
, R.description = "Access file info"
, R.schema = withListing AllFiles $ named
[ ( "inAlbum" , listingBy (FilesInAlbum . read) )
, ( "personNoAlbum" , listingBy (PersonNoAlbum . read) )
, ( "byId" , singleBy read)
]
, R.list = fileListHandler
, R.get = Nothing
, R.selects =
[ ( "container", getContainerInfo )
]
}
fileListHandler :: (MonadMask m, MonadIO m) => FileListSelector -> ListHandler (Authenticated m)
fileListHandler which = mkListing jsonO handler where
handler :: (MonadMask m, MonadIO m) => Range -> ExceptT Reason_ (Authenticated m) [File]
handler r = case which of
AllFiles -> lift $ AUTH.query
( "SELECT f.file_id, f.file_size, f.file_mime, c.container_duration FROM auth_file f "
<> "LEFT JOIN container AS c ON f.file_id = c.file_id "
<> "LIMIT ? OFFSET ?"
) (count r, offset r)
FilesInAlbum aid -> lift $ AUTH.query
( "SELECT f.file_id, f.file_size, file_mime, c.container_duration FROM auth_file f "
<> "LEFT JOIN container AS c ON f.file_id = c.file_id "
<> "NATURAL JOIN album_file "
<> "WHERE album_file.album_id = ? "
<> "ORDER BY f.file_name ASC "
<> "LIMIT ? OFFSET ?"
) (aid, count r, offset r)
-- files assigned to a person but not part of an album.
PersonNoAlbum pid -> lift $ AUTH.query
( "SELECT DISTINCT f.file_id, f.file_size, f.file_mime, c.container_duration FROM auth_file f "
<> "NATURAL JOIN person_file "
<> "LEFT JOIN container AS c ON f.file_id = c.file_id "
<> "WHERE person_file.person_id = ? AND NOT EXISTS ("
<> "SELECT 1 FROM auth_album a "
<> "NATURAL JOIN person_file "
<> "NATURAL JOIN album_file "
<> "WHERE person_file.person_id = ? AND album_file.file_id = f.file_id ) "
<> "ORDER BY f.file_name ASC "
<> "LIMIT ? OFFSET ?"
) (pid, pid, count r, offset r)
------------------------------------------------------------------------------------------------------------------------
-- Containers
------------------------------------------------------------------------------------------------------------------------
data Stream = Stream
{ streamId :: Int
, streamMediaType :: T.Text
, streamCodec :: T.Text
, streamBitRate :: Int
, streamWidth :: Int
, streamHeight :: Int
} deriving ( Generic, Show )
instance ToJSON Stream where
toJSON = gtoJson
instance JSONSchema Stream where
schema = gSchema
instance FromRow Stream where
fromRow = Stream <$> field <*> field <*> field <*> field <*> field <*> field
data Container = Container
{ duration :: Double -- ^ duration in seconds
, format :: T.Text -- ^ container format ("avi", "mkv", ...)
, streams :: [Stream]
} deriving ( Generic, Show )
instance ToJSON Container where
toJSON = gtoJson
instance JSONSchema Container where
schema = gSchema
getContainerInfo :: (MonadMask m, MonadIO m) => Handler (WithFile m)
getContainerInfo = mkIdHandler jsonO handler where
handler :: (MonadMask m, MonadIO m) => () -> FileId -> ExceptT Reason_ (WithFile m) Container
handler () fid = do
(d, fmt) <- ExceptT $ lift $ AUTH.queryOne
"SELECT container_duration, container_format FROM container WHERE file_id=?" (Only fid)
ss <- lift . lift $ AUTH.query
("SELECT stream_id, stream_media_type, stream_codec, stream_bit_rate, stream_width, stream_height " <>
"FROM stream WHERE file_id=?") (Only fid)
return $ Container d fmt ss
| waldheinz/mdb | src/lib/Mdb/Serve/Resource/File.hs | apache-2.0 | 5,540 | 6 | 21 | 1,625 | 1,170 | 658 | 512 | 119 | 3 |
{- |
This module provides utilities for easily performing current operations.
Specifically, it provides a way to operate on lists of IO actions much
like `sequence` and `mapM`, but in such a way where the actions are all
performed concurrently. All other semantics are the same: exceptions
encountered concurrently will be thrown in the calling thread (but
possibly not at the same time you would expect if you were to use
`sequence`, obviously), functions in this module block the calling
thread until all of the concurrent threads have returned, and the
order of the return values are the same as in their counterparts.
-}
module Control.Concurrent.ConcurrentLists (
concurrently,
concurrently_,
concurrentN,
concurrentN_,
cMapM,
cMapM_,
cMapMN,
cMapMN_
) where
import Control.Concurrent (forkIO)
import Control.Concurrent.MVar (MVar, newEmptyMVar, takeMVar, putMVar)
import Control.Exception (try, throw, SomeException)
import Control.Monad (void)
-- Public Types ---------------------------------------------------------------
-- Semi-Public Types ----------------------------------------------------------
-- Public Functions -----------------------------------------------------------
{- |
Like `sequence`, but all io operations are executed concurrently.
-}
concurrently :: [IO a] -> IO [a]
concurrently ios = do
jobs <- mapM async ios
results <- mapM takeMVar jobs
case sequence results of
Right vals -> return vals
Left err -> throw err
{- |
Like `sequence_`, but all io operations are executed concurrently.
-}
concurrently_ :: [IO a] -> IO ()
concurrently_ = void . concurrently
{- |
Like `concurrently`, but use at most N threads.
-}
concurrentN :: Int -> [IO a] -> IO [a]
concurrentN n ios = do
chan <- newEmptyMVar
mapM_ (forkIO . const (handler chan)) [1 .. n]
packages <- mapM package ios
mapM_ (putMVar chan . Just) packages
putMVar chan Nothing -- send the kill signal
results <- mapM (takeMVar . snd) packages
case sequence results of
Right vals -> return vals
Left err -> throw (err :: SomeException)
where
package io = do
response <- newEmptyMVar
return (io, response)
handler chan = do
job <- takeMVar chan
case job of
Nothing ->
-- put back the end marker and terminate
putMVar chan Nothing
Just (io, response) -> do
result <- try io
putMVar response result
-- loop until we get `Nothing`
handler chan
{- |
Like `concurrently_`, but use at most N threads.
-}
concurrentN_ :: Int -> [IO a] -> IO ()
concurrentN_ n =
-- we could probably be more memory efficient here.
void . concurrentN n
{- |
Like `mapM`, but all io operations are executed concurrently.
-}
cMapM :: (a -> IO b) -> [a] -> IO [b]
cMapM f = concurrently . map f
{- |
Like `mapM_`, but all io operations are executed concurrently.
-}
cMapM_ :: (a -> IO b) -> [a] -> IO ()
cMapM_ f = concurrently_ . map f
{- |
Like `cMapM`, but use at most N threads.
-}
cMapMN :: Int -> (a -> IO b) -> [a] -> IO [b]
cMapMN n f = concurrentN n . map f
{- |
Like `cMapM_`, but use at most N threads.
-}
cMapMN_ :: Int -> (a -> IO b) -> [a] -> IO ()
cMapMN_ n f = concurrentN_ n . map f
-- Private Types --------------------------------------------------------------
-- Private Functions ----------------------------------------------------------
{- |
Kicks off an IO in a new thread
-}
async :: IO a -> IO (MVar (Either SomeException a))
async io = do
job <- newEmptyMVar
forkIO $ do
result <- try io
putMVar job result
return job
-- Tests ----------------------------------------------------------------------
| SumAll/haskell-concurrent-lists | src/Control/Concurrent/ConcurrentLists.hs | apache-2.0 | 3,720 | 0 | 15 | 804 | 816 | 411 | 405 | 63 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
-- | Note: tests here are the same as the tests of the ordinary
-- `Earley` module.
module NLP.TAG.Vanilla.Earley.Pred.Tests where
import Test.Tasty (TestTree)
import NLP.TAG.Vanilla.Earley.Pred (recognize')
import qualified NLP.TAG.Vanilla.Tests as T
-- | All the tests of the parsing algorithm.
tests :: TestTree
tests = T.testTree "NLP.TAG.Vanilla.Earley.Pred" recognize' Nothing Nothing
| kawu/tag-vanilla | src/NLP/TAG/Vanilla/Earley/Pred/Tests.hs | bsd-2-clause | 488 | 0 | 6 | 87 | 69 | 47 | 22 | 8 | 1 |
-- | Generate Attribute-Relation File Format (ARFF) files.
--
-- ARFF files are used by the WEKA data mining and machine learning framework.
--
-- <http://www.cs.waikato.ac.nz/~ml/weka/>
--
module Text.ARFF (
-- * ARFF relations
Relation,
relation,
encode,
-- * Attribute constructors
Attribute,
a_string,
a_real,
a_int,
a_nominal,
a_nominalFromTo,
a_dateFormat,
a_date,
-- * Value constructors
Value,
missing,
string,
real,
int,
nominal,
date
) where
import Control.Arrow ((<<<))
import Data.Binary.Put (Put, runPut)
import Data.Maybe (mapMaybe)
import Data.ByteString.Lazy (ByteString)
import Data.ByteString.Lex.Double (readDouble)
import Data.Time.Format (FormatTime, formatTime)
import System.Locale (defaultTimeLocale)
import Text.Show.ByteString (putAscii, putAsciiStr, showp, unlinesP, unwordsP)
import qualified Text.Show.ByteString as B
-- | Show a 'Value' in the 'Put' monad.
type Putter = Value -> Put
-- | ARFF attribute type.
data Type =
StringAttribute { put :: Putter }
| RealAttribute { put :: Putter }
| IntegerAttribute { put :: Putter }
| forall a . Show a => NominalAttribute { put :: Putter, enum :: [a] }
| DateAttribute { put :: Putter, format :: String }
-- | Attribute with associated type and name.
data Attribute = Attribute { attrType :: Type, attrName :: String }
-- | ARFF value type.
data Value =
MissingValue
| StringValue { fromStringValue :: String }
| RealValue { fromRealValue :: Double }
| IntegerValue { fromIntegerValue :: Integer }
| forall a . (Show a) => NominalValue { fromNominalValue :: a }
| forall a . (FormatTime a) => DateValue { fromDateValue :: a }
-- | ARFF relation.
data Relation = Relation
{ name :: String,
attributes :: [Attribute],
values :: [[Value]] }
-- | Construct a relation from a name, attributes and values.
relation :: String -> [Attribute] -> [[Value]] -> Relation
relation = Relation
-- | Convert a 'Relation' to its textual representation.
encode :: Relation -> ByteString
encode = runPut . putRelation
intersperseP :: Put -> [Put] -> Put
intersperseP _ [] = return ()
intersperseP _ (x:[]) = showp x
intersperseP d (x:xs) = x >> d >> intersperseP d xs
uncommaP :: [Put] -> Put
uncommaP = intersperseP (putAscii ',')
-- encloseP :: Put -> Put -> Put
-- encloseP e p = e >> p >> e
defaultDateFormat :: String
defaultDateFormat = "yyyy-MM-dd'T'HH:mm:ss"
fromISODateFormat :: String -> String
fromISODateFormat fmt | fmt == defaultDateFormat = "%Y-%m-%dT%X"
fromISODateFormat _ = error "BUG[fromISODateFormat]: ISO date formats not yet implemented"
putType :: Type -> Put
putType (StringAttribute _) = putAsciiStr "string"
putType (RealAttribute _) = putAsciiStr "real"
putType (IntegerAttribute _) = putAsciiStr "integer"
putType (NominalAttribute _ es) = putAscii '{' >> (uncommaP <<< map (putAsciiStr.show)) es >> putAscii '}'
putType (DateAttribute _ fmt) = unwordsP [putAsciiStr "date", showp fmt]
putAttribute :: Attribute -> Put
putAttribute (Attribute atype name) = unwordsP [putAsciiStr "@attribute", putAsciiStr name, putType atype]
putMissingValue :: Put
putMissingValue = putAscii '?'
instance Show (Value) where
show (StringValue _) = "String"
show (RealValue _) = "Real"
show (IntegerValue _) = "Integer"
show (NominalValue _) = "Nominal"
show (DateValue _) = "Date"
typeError :: String -> String -> Value -> Put
typeError attrName attrType value =
error $ "Text.ARFF.encode: type mismatch -- attribute \"" ++ attrName ++ "\""
++ " has type " ++ attrType ++ ","
++ " but got " ++ (show value)
putString :: String -> Value -> Put
putString _ (StringValue x) = showp x
putString _ MissingValue = putMissingValue
putString a v = typeError a "String" v
putReal :: String -> Value -> Put
putReal _ (RealValue x) = showp x
putReal _ MissingValue = putMissingValue
putReal a v = typeError a "Real" v
putInteger :: String -> Value -> Put
putInteger _ (IntegerValue x) = showp x
putInteger _ MissingValue = putMissingValue
putInteger a v = typeError a "Integer" v
putNominal :: String -> Value -> Put
putNominal _ (NominalValue x) = putAsciiStr (show x)
putNominal _ MissingValue = putMissingValue
putNominal a v = typeError a "Nominal" v
putDate :: String -> String -> Value -> Put
putDate fmt a (DateValue d) = showp (formatTime defaultTimeLocale fmt d)
putDate _ a MissingValue = putMissingValue
putDate _ a v = typeError a "Date" v
putRelation :: Relation -> Put
putRelation (Relation name attrs values) =
unlinesP $
[ unwordsP [putAsciiStr "@relation", putAsciiStr name] ]
++ map putAttribute attrs
++ [ putAsciiStr "@data" ]
++ map (uncommaP . zipWith ($) putters) values
where putters = map (put . attrType) attrs
-- | String attribute constructor.
a_string :: String -> Attribute
a_string a = Attribute (StringAttribute (putString a)) a
-- | Real attribute constructor.
a_real :: String -> Attribute
a_real a = Attribute (RealAttribute (putReal a)) a
-- | Integer attribute constructor.
a_int :: String -> Attribute
a_int a = Attribute (IntegerAttribute (putInteger a)) a
-- | Nominal attribute constructor.
a_nominal :: (Show a) => [a] -> String -> Attribute
a_nominal xs a = Attribute (NominalAttribute (putNominal a) xs) a
-- | Nominal attribute constructor.
a_nominalFromTo :: (Enum a, Show a) => a -> a -> String -> Attribute
a_nominalFromTo lo hi = a_nominal (enumFromTo lo hi)
-- | Date attribute constructor.
-- Currently only supports a default date format, since we're lacking an
-- ISO-8601 format string parser.
a_dateFormat :: String -> String -> Attribute
a_dateFormat fmt a = Attribute (DateAttribute (putDate (fromISODateFormat fmt) a) fmt) a
-- | Construct a date attribute with the default date format
-- @yyyy-MM-dd'T'HH:mm:ss@.
a_date :: String -> Attribute
a_date = a_dateFormat defaultDateFormat
-- | Missing value.
missing :: Value
missing = MissingValue
-- | String value constructor.
string :: String -> Value
string = StringValue
-- | Real value constructor.
real :: Double -> Value
real = RealValue
-- | Integer value constructor.
int :: Integer -> Value
int = IntegerValue
-- | Nominal value constructor.
nominal :: (Show a) => a -> Value
nominal = NominalValue
-- | Date value constructor
date :: (FormatTime a) => a -> Value
date = DateValue
| kaoskorobase/arff | Text/ARFF.hs | bsd-2-clause | 6,699 | 0 | 12 | 1,521 | 1,813 | 991 | 822 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables, DoAndIfThenElse, OverloadedStrings #-}
import System.Environment (getArgs)
import qualified Data.ByteString.Lazy.Char8 as B
import Data.Maybe (fromJust)
import qualified Data.Map as M
import qualified Data.List as L
data Symbol = Procedure Bool B.ByteString
| Variable Bool Int B.ByteString
deriving (Show, Eq)
type SymbolTable = M.Map B.ByteString (Symbol, Int)
type Instruction = [B.ByteString]
--Helper functions
isHeaderLine = B.isPrefixOf "."
isVariable (Variable _ _ _) = True
isVariable _ = False
isExternal (Variable a _ _) = a
isExternal (Procedure a _) = a
isInternalVariable v = (not $ isExternal v) && (isVariable v)
getName (Variable _ _ a) = a
getName (Procedure _ a) = a
numGlobalsExported :: SymbolTable -> Int
numGlobalsExported t = length $ M.elems $ M.filter (isInternalVariable . fst) t
dumpAssembler :: [[Instruction]] -> B.ByteString
dumpAssembler files =
let smushedFiles = map smush files
in B.intercalate "\n" smushedFiles
where
smush file = B.intercalate "\n" $ map (B.intercalate " ") file
parseSymbol :: B.ByteString -> Symbol
parseSymbol s =
let chunks = B.words s
in
if (length chunks > 0) then
case (head chunks) of
".var" -> Variable False
(convert $ B.readInteger $ (chunks !! 1))
(chunks !! 2)
".proc" -> Procedure False (chunks !! 1)
".external" ->
case (chunks !! 1) of
"var" -> Variable True
(convert $ B.readInteger $ (chunks !! 2)) (chunks !! 3)
"proc" -> Procedure True (chunks !! 2)
_ -> error $ "Could not parse " ++ (show s) ++ " as a symbol"
_ -> error $ "Could not parse " ++ (show s) ++ " as a symbol"
else error $ "Could not parse " ++ (show s) ++ " as a symbol"
where
convert = fromIntegral . fst . fromJust
buildTableForFile :: String -> IO SymbolTable
buildTableForFile fname = do
assemblerFile <- B.readFile fname
let assemblerFileLines = filter (not . B.null) $ B.lines assemblerFile
let numberOfSymbols = head assemblerFileLines
let headerLines = filter isHeaderLine (tail assemblerFileLines)
let symbols = map parseSymbol headerLines
let internalVariableSymbols = filter isInternalVariable symbols
let addresses = [3..((length internalVariableSymbols)+3)]
let addressedSymbols = zip (reverse internalVariableSymbols) addresses
let otherSymbols = (symbols L.\\ internalVariableSymbols)
let dummySymbols = zip otherSymbols (replicate (length otherSymbols) (-1))
let totalSymbols = addressedSymbols ++ dummySymbols
let names = map (getName . fst) totalSymbols
return $ M.fromList $ zip names totalSymbols
buildProgramForFile :: String -> IO [Instruction]
buildProgramForFile fname = do
assemblerFile <- B.readFile fname
let assemblerFileLines = filter (not . B.null) $ B.lines assemblerFile
let codeLines = filter (not . isHeaderLine) assemblerFileLines
return $ map B.words codeLines
buildGlobalIncrements :: [SymbolTable] -> [Int]
buildGlobalIncrements tables =
let numbers = map numGlobalsExported tables
takeAmounts = [0..((length numbers)-1)]
sums = map ($numbers) (map take takeAmounts)
in map sum sums
replaceLoadsAndStores :: [Instruction] -> B.ByteString -> B.ByteString -> [Instruction]
replaceLoadsAndStores program oldValue newValue =
map (update oldValue newValue) program
where
update old new instruction =
case (instruction !! 0) of
"LoadG" ->
if (instruction !! 1) == oldValue then
["LoadG", newValue]
else instruction
"StoG" ->
if (instruction !! 1) == oldValue then
["StoG", newValue]
else instruction
_ -> instruction
{-
Uniqify: increment the address of every global load and store
-}
uniqify :: ([Instruction], SymbolTable) -> Int -> ([Instruction], SymbolTable)
uniqify (code, table) increment =
let exportedGlobals = M.elems $ M.filter (isInternalVariable . fst) table
addresses = map snd exportedGlobals
newAddresses = map (+increment) addresses
fixedProgram = doRecurse code addresses newAddresses
newExportedGlobals = map (updateEntry (M.fromList $ zip addresses newAddresses)) exportedGlobals
names = map (getName . fst) newExportedGlobals
newTableEntries = M.fromList $ zip names newExportedGlobals
fixedTable = M.union newTableEntries table
in (fixedProgram, fixedTable)
where
doRecurse code [] [] = code
doRecurse code old new =
doRecurse (replaceLoadsAndStores code
(B.pack $ show $ head old)
(B.pack $ show $ head new))
(tail old)
(tail new)
updateEntry mapping (a, i) = (a, (mapping M.! i))
{-
The reason we do (tail linkFiles) in here is that we don't want to uniqify
the first linkFile, because we assume that is the point of origin of the
increments, so we can't change any addresses in there. We also discard the
first increment, which is always going to be zero for the same reason.
-}
uniqifyLoadsAndStores :: [([Instruction], SymbolTable)] -> [Int] -> [([Instruction], SymbolTable)]
uniqifyLoadsAndStores linkFiles increments = zipWith uniqify (tail linkFiles) (tail increments)
{-
Steps required:
1. Uniqify all load and store addresses for non-external globals by incrementing everything
1. build for each file the increment for variable locations for that file
2. For each external variable in each file
1. Find the file which declares it (or error)
2. Get the (now unique) address of that global
3. Replace all loads and stores to the external variable name in the current file with that address
3. Uniquify all loop labels by incrementing everything
functions required:
numloops(file)
numGlobalsExported(file)
replaceLoadsStores(file, ext_name/address, newaddress)
-}
main = do
args <- getArgs
if length args > 1 then do
tables <- mapM buildTableForFile args
files <- mapM buildProgramForFile args
let filesAndTables = zip files tables
let globalIncrements = buildGlobalIncrements tables
let uniqified = uniqifyLoadsAndStores filesAndTables globalIncrements
--putStrLn $ concatMap show tables
--putStrLn $ concatMap show $ map numGlobalsExported tables
--mapM_ (putStrLn . show) files
--mapM_ (putStrLn . show) globalIncrements
--mapM_ (putStrLn . show) uniqified
let outputCode = [(head files)] ++ (map fst uniqified)
putStrLn $ B.unpack $ dumpAssembler outputCode
else do
error $ "Usage: tld <1.asm> <2.asm> ... <n.asm> <output.asm>"
| houli/TastierMachine | src/TastierLinker/Main.hs | bsd-3-clause | 6,695 | 4 | 18 | 1,513 | 1,770 | 916 | 854 | 118 | 7 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -O #-}
module Main where
import qualified Data.ByteString.Lazy as BSL
import Data.List ( sortBy )
import Data.Ord ( comparing )
import Data.Char
import Data.Ratio
#ifdef DEBUG
import Debug.Trace
import System.IO
#endif
transUnit :: String -> Integer
transUnit "G" = 1024 * transUnit "M"
transUnit "M" = 1024 * transUnit "K"
transUnit "K" = 1024
transUnit "g" = 1000 * transUnit "m"
transUnit "m" = 1000 * transUnit "k"
transUnit "k" = 1000
transUnit "" = 1
firstColumnSize :: BSL.ByteString -> Integer
firstColumnSize line = let
(pre,_) = BSL.break (isSpace . e2e) line
(num,unit) = BSL.break (isAlpha . e2e) pre
(preComm,postComm) = BSL.break ((`elem` ".,") . e2e) num
toString = map e2e . BSL.unpack
readSafe x = (fst . last) $ (0,"") : reads x
a,b,c :: Ratio Integer
a = readSafe (toString preComm) % 1
b = let l = fromIntegral $ 10 * BSL.length postComm in
case l of
0 -> 0
_ -> readSafe (toString postComm) % l
c = (transUnit (toString unit)) % 1
result = round ((a+b) * c)
in
#ifdef DEBUG
traceShow (map round [a,b,c], map toString [line,pre,num,preComm,postComm,unit]) result
#else
result
#endif
e2e :: (Enum a, Enum b) => a -> b
e2e = toEnum . fromEnum
main = do
#ifdef DEBUG
hPutStrLn stderr "DEBUG MODE!"
#endif
input <- BSL.getContents
mapM_ BSL.putStrLn (sortBy (comparing firstColumnSize) $ BSL.split (e2e '\n') input)
{- TEST:
1
2
3
1,1k
2,1m
3,1g
1,1K
2,1M
3,1G
-} | Tener/haskell-sorty | sorty.hs | bsd-3-clause | 1,906 | 12 | 18 | 732 | 521 | 297 | 224 | 37 | 2 |
module Tisp.CodeGen (codegen, CodeGen) where
import Data.Map.Strict (Map)
import qualified Data.Text as T
import qualified Data.Map.Strict as M
import Data.Maybe (fromJust)
import Control.Monad.Reader hiding (void)
import LLVM.General.AST
import LLVM.General.AST.DataLayout
import LLVM.General.AST.Type
import LLVM.General.AST.Global
import qualified LLVM.General.AST.Constant as C
import qualified LLVM.General.AST.CallingConvention as CallingConvention
import qualified LLVM.General.AST.Linkage as Linkage
import qualified LLVM.General.AST.Attribute as A
import Tisp.LLVMMonad
import Tisp.AST as AST
import Tisp.Tokenize (Symbol)
import Tisp.LLVMTypes (typeOf)
newtype CGS = CGS [Operand]
newtype CodeGen a = CodeGen (ReaderT CGS FunctionGen a)
deriving (Functor, Applicative, Monad, MonadReader CGS)
instance MonadModuleGen CodeGen where
getMGS = CodeGen (lift getMGS)
putMGS = CodeGen . lift . putMGS
askTarget = CodeGen (lift askTarget)
instance MonadFunctionGen CodeGen where
getFGS = CodeGen (lift getFGS)
putFGS = CodeGen . lift . putFGS
tellFGO = CodeGen . lift . tellFGO
runCodeGen :: CodeGen a -> FunctionGen a
runCodeGen (CodeGen cg) = runReaderT cg (CGS [])
defaultTarget :: Target
defaultTarget = Target "x86_64-unknown-linux-gnu" (defaultDataLayout { endianness = Just LittleEndian })
codegen :: Record -> Module
codegen defs = runModuleGen "" (sequence (map (codegen' . snd) (M.toList defs))) defaultTarget
codegen' :: AST.Definition -> ModuleGen ()
codegen' (AST.Definition name tyExpr value) = do
let ty = cgType tyExpr
runFunctionGen (T.unpack name) ty (tyArgs ty) False functionDefaults (runCodeGen . funcgen value)
pure ()
funcgen :: AST -> [Operand] -> CodeGen ()
funcgen x args = do
startBlock (Name "entry")
local (\(CGS vars) -> CGS (args ++ vars)) $ cgExpr x
pure ()
cgType :: AST -> Type
cgType (AST _ (Ref (AST.Global "unit"))) = void
cgType (AST _ (Ref (AST.Global "i32"))) = i32
cgType (AST _ (App (AST _ (Ref (AST.Global "func"))) (ret:args))) = FunctionType (cgType ret) (map cgType args) False
cgType x = error $ "unrecognized type " ++ show x
tyArgs :: Type -> [(String, Type, [A.ParameterAttribute])]
tyArgs (FunctionType _ args False) = zip3 (repeat "") args (repeat [])
tyArgs _ = []
cgExpr :: AST -> CodeGen Operand
cgExpr (AST _ v) = cg v
where
cg (App (AST _ (Ref (AST.Global f))) xs) = fromJust <$> (call' f =<< mapM cgExpr xs)
cg (Ref v) = fromJust <$> lookupVar v
cg (Num n) = pure . ConstantOperand $ C.Int 32 (round n)
cg (ASTError _ _) = call intrinsicTrap []
cg (Abs args value) = do
func <- runFunctionGen "lambda" i32 (map (\(name, ty) -> (T.unpack name, cgType ty, [])) args) False functionDefaults (runCodeGen . funcgen value)
pure . ConstantOperand . C.GlobalReference (typeOf func) $ (name func)
intrinsicTrap :: Global
intrinsicTrap = functionDefaults { returnType = void, name = Name "llvm.trap", parameters = ([], False) }
call :: Global -> [Operand] -> CodeGen Operand
call fn@(Function {..}) args = inst $ Call { isTailCall = False
, callingConvention = callingConvention
, returnAttributes = []
, function = Right (ConstantOperand (C.GlobalReference (typeOf fn) name))
, arguments = map (\op -> (op, [])) args
, functionAttributes = []
, metadata = []
}
call _ _ = error "call: Tried to call non-function"
call' :: Symbol -> [Operand] -> CodeGen (Maybe Operand)
call' f xs = do
let name = (Name (T.unpack f))
global <- findGlobal name
case global of
Nothing -> pure Nothing
Just g -> Just <$> call g xs
lookupVar :: Var -> CodeGen (Maybe Operand)
lookupVar (AST.Local i) = do
CGS vars <- ask
if (fromIntegral $ length vars) <= i
then pure Nothing
else pure . Just $ vars !! (fromIntegral i)
lookupVar (AST.Global name) = do
let name' = Name $ T.unpack name
result <- findGlobal name'
case result of
Just value -> pure $ Just (ConstantOperand . C.GlobalReference (typeOf value) $ name')
Nothing -> pure Nothing
| Ralith/tisp | src/Tisp/CodeGen.hs | bsd-3-clause | 4,292 | 4 | 20 | 1,023 | 1,636 | 854 | 782 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE QuasiQuotes #-}
-- |
module Network.Libtorrent.Bencode (Bencoded
, entryToBencoded
, bencodedData
) where
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Exception (bracket)
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import Foreign.Marshal.Alloc (alloca)
import Foreign.Ptr ( Ptr )
import Foreign.Storable (peek)
import qualified Language.C.Inline as C
import qualified Language.C.Inline.Cpp as C
import qualified Language.C.Inline.Unsafe as CU
import Network.Libtorrent.Inline
C.context libtorrentCtx
C.include "<libtorrent/bencode.hpp>"
C.verbatim "typedef std::vector<char> VectorChar;"
C.using "namespace libtorrent"
C.using "namespace std"
newtype Bencoded = Bencoded ByteString
entryToBencoded :: MonadIO m => Ptr C'BencodeEntry -> m Bencoded
entryToBencoded ePtr =
liftIO $ bracket
[CU.exp| VectorChar * { new std::vector<char>() } |]
(\bufPtr -> [CU.exp| void { delete $(VectorChar * bufPtr)} |]) $
\bufPtr ->
alloca $ \clenPtr ->
alloca $ \cstrPtr -> do
[C.block| void {
bencode(std::back_inserter(*$(VectorChar * bufPtr)), *$(entry * ePtr));
*$(size_t * clenPtr) = $(VectorChar * bufPtr)->size();
*$(char ** cstrPtr) = $(VectorChar * bufPtr)->data();
}
|]
clen <- peek clenPtr
cstr <- peek cstrPtr
Bencoded <$> BS.packCStringLen (cstr, fromIntegral clen)
bencodedData :: Bencoded -> ByteString
bencodedData (Bencoded v) = v
| eryx67/haskell-libtorrent | src/Network/Libtorrent/Bencode.hs | bsd-3-clause | 1,703 | 0 | 16 | 426 | 338 | 194 | 144 | 37 | 1 |
-- | Color and formatting for terminal output.
-- For Windows systems, any and all formatting are no-ops due to
-- compatibility reasons.
--
-- While formattings can be stacked (i.e. @bold (underline "Hello Joe")@),
-- they do not properly nest: the escape code used to reset formatting at the
-- end of a formatted string removes *all* formatting. Thus,
-- @color Red ("Hello" ++ color Blue "Mike")@ will print \"Hello\" in red and
-- \"Mike\" in blue, but @color Red (color Blue "Hello" ++ "Robert")@ will
-- print \"Robert\" in the terminal's default color, and not in red as one
-- might expect.
module Control.Shell.Color
( Color (..)
, color, background
, highlight, bold, underline
) where
import System.Info (os)
data Color
= Black
| Red
| Green
| Yellow
| Blue
| Purple
| Magenta
| White
deriving (Show, Read, Eq, Ord, Enum)
colorNum :: Color -> Int
colorNum = fromEnum
-- | Apply the given color to the given string.
color :: Color -> String -> String
color c s = concat [esc ("0" ++ show (30+colorNum c)), s, normal]
-- | Apply the given background color to the given string.
background :: Color -> String -> String
background c s = concat [esc ("0" ++ show (40+colorNum c)), s, normal]
-- | Apply the terminal's default highlighting to the given string.
highlight :: String -> String
highlight s = esc "7" ++ s ++ normal
-- | Output the given string in bold font.
bold :: String -> String
bold s = esc "1" ++ s ++ normal
-- | Underline the given string.
underline :: String -> String
underline s = esc "4" ++ s ++ normal
-- | Escape sequence to reset formatting back to normal.
normal :: String
normal = esc "0"
-- | An escape character.
{-# INLINE esc #-}
esc :: String -> String
esc s
| os == "mingw32" = ""
| otherwise = "\ESC[" ++ s ++ "m"
| valderman/shellmate | shellmate/Control/Shell/Color.hs | bsd-3-clause | 1,821 | 0 | 13 | 394 | 382 | 214 | 168 | 34 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Control.Concurrent.SampleVar
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (concurrency)
--
-- Sample variables
--
-----------------------------------------------------------------------------
module Control.Concurrent.SampleVar
(
-- * Sample Variables
SampleVar, -- :: type _ =
newEmptySampleVar, -- :: IO (SampleVar a)
newSampleVar, -- :: a -> IO (SampleVar a)
emptySampleVar, -- :: SampleVar a -> IO ()
readSampleVar, -- :: SampleVar a -> IO a
writeSampleVar -- :: SampleVar a -> a -> IO ()
) where
import Prelude
import Control.Concurrent.MVar
-- |
-- Sample variables are slightly different from a normal 'MVar':
--
-- * Reading an empty 'SampleVar' causes the reader to block.
-- (same as 'takeMVar' on empty 'MVar')
--
-- * Reading a filled 'SampleVar' empties it and returns value.
-- (same as 'takeMVar')
--
-- * Writing to an empty 'SampleVar' fills it with a value, and
-- potentially, wakes up a blocked reader (same as for 'putMVar' on
-- empty 'MVar').
--
-- * Writing to a filled 'SampleVar' overwrites the current value.
-- (different from 'putMVar' on full 'MVar'.)
type SampleVar a
= MVar (Int, -- 1 == full
-- 0 == empty
-- <0 no of readers blocked
MVar a)
-- |Build a new, empty, 'SampleVar'
newEmptySampleVar :: IO (SampleVar a)
newEmptySampleVar = do
v <- newEmptyMVar
newMVar (0,v)
-- |Build a 'SampleVar' with an initial value.
newSampleVar :: a -> IO (SampleVar a)
newSampleVar a = do
v <- newEmptyMVar
putMVar v a
newMVar (1,v)
-- |If the SampleVar is full, leave it empty. Otherwise, do nothing.
emptySampleVar :: SampleVar a -> IO ()
emptySampleVar v = do
(readers, var) <- takeMVar v
if readers >= 0 then
putMVar v (0,var)
else
putMVar v (readers,var)
-- |Wait for a value to become available, then take it and return.
readSampleVar :: SampleVar a -> IO a
readSampleVar svar = do
--
-- filled => make empty and grab sample
-- not filled => try to grab value, empty when read val.
--
(readers,val) <- takeMVar svar
putMVar svar (readers-1,val)
takeMVar val
-- |Write a value into the 'SampleVar', overwriting any previous value that
-- was there.
writeSampleVar :: SampleVar a -> a -> IO ()
writeSampleVar svar v = do
--
-- filled => overwrite
-- not filled => fill, write val
--
(readers,val) <- takeMVar svar
case readers of
1 ->
swapMVar val v >>
putMVar svar (1,val)
_ ->
putMVar val v >>
putMVar svar (min 1 (readers+1), val)
| OS2World/DEV-UTIL-HUGS | libraries/Control/Concurrent/SampleVar.hs | bsd-3-clause | 2,853 | 8 | 15 | 666 | 465 | 263 | 202 | 43 | 2 |
{-# LANGUAGE FlexibleInstances #-}
-- |
-- Module : Simulation.Aivika.Branch.Br
-- Copyright : Copyright (c) 2016-2017, David Sorokin <[email protected]>
-- License : BSD3
-- Maintainer : David Sorokin <[email protected]>
-- Stability : experimental
-- Tested with: GHC 7.10.3
--
-- This module defines that 'BR' can be an instance of the 'MonadDES' and 'EventIOQueueing' type classes.
--
module Simulation.Aivika.Branch.BR
(BR,
runBR,
branchLevel) where
import Simulation.Aivika.Trans.Comp
import Simulation.Aivika.Trans.DES
import Simulation.Aivika.Trans.Exception
import Simulation.Aivika.Trans.Generator
import Simulation.Aivika.Trans.Event
import Simulation.Aivika.Trans.Ref.Base
import Simulation.Aivika.Trans.QueueStrategy
import Simulation.Aivika.IO
import Simulation.Aivika.Branch.Internal.BR
import Simulation.Aivika.Branch.Event
import Simulation.Aivika.Branch.Generator
import Simulation.Aivika.Branch.Ref.Base.Lazy
import Simulation.Aivika.Branch.Ref.Base.Strict
import Simulation.Aivika.Branch.QueueStrategy
instance MonadDES (BR IO)
instance MonadComp (BR IO)
-- | An implementation of the 'EventIOQueueing' type class.
instance EventIOQueueing (BR IO) where
enqueueEventIO = enqueueEvent
| dsorokin/aivika-branches | Simulation/Aivika/Branch/Br.hs | bsd-3-clause | 1,262 | 0 | 7 | 162 | 179 | 122 | 57 | 23 | 0 |
{-# LANGUAGE DeriveGeneric #-}
module Cataskell.GameData.Resources where
import Cataskell.GameData.Basics
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as Set
import GHC.Generics (Generic)
data ResourceCount = ResourceCount
{ lumber :: Int
, wool :: Int
, wheat :: Int
, brick :: Int
, ore :: Int
} deriving (Eq, Ord, Show, Read,Generic)
instance Monoid ResourceCount where
mempty = ResourceCount 0 0 0 0 0
mappend r1 r2 = ResourceCount
{ lumber = lumber r1 + lumber r2
, wool = wool r1 + wool r2
, wheat = wheat r1 + wheat r2
, brick = brick r1 + brick r2
, ore = ore r1 + ore r2
}
data HarborDiscount = HarborDiscount { consumes :: ResourceCount }
deriving (Eq, Ord, Show, Read)
totalResources :: ResourceCount -> Int
totalResources r = lumber r + wool r + wheat r + brick r + ore r
mulResources :: ResourceCount -> Int -> ResourceCount
mulResources r i = ResourceCount
{ lumber = i * lumber r
, wool = i * wool r
, wheat = i * wheat r
, brick = i * brick r
, ore = i * ore r
}
mkNeg :: ResourceCount -> ResourceCount
mkNeg r = mulResources r (-1)
sufficient :: ResourceCount -> ResourceCount -> Bool
sufficient r c = and [lumber', wool', wheat', brick', ore']
where lumber' = lumber r >= lumber c
wool' = wool r >= wool c
wheat' = wheat r >= wheat c
brick' = brick r >= brick c
ore' = ore r >= ore c
cost :: Item -> ResourceCount
cost c = byItemType itemType'
where itemType' = case c of
Building x -> case x of
Edifice (OnPoint _ _ h) -> H h
Roadway (OnEdge _ _) -> Road
Card _ -> DevelopmentCard
Potential x -> x
byItemType x = case x of
H Settlement -> mempty { lumber = 1, brick = 1, wool = 1, wheat = 1 }
H City -> mempty { wheat = 2, ore = 3 }
Road -> mempty { lumber = 1, brick = 1 }
DevelopmentCard -> mempty { wool = 1, wheat = 1, ore = 1 }
nonNegative :: ResourceCount -> Bool
nonNegative = (flip sufficient) mempty
resourceFromTerrain :: Terrain -> ResourceCount
resourceFromTerrain t
= case t of
Forest -> mempty { lumber = 1 }
Pasture -> mempty { wool = 1 }
Field -> mempty { wheat = 1 }
Hill -> mempty { brick = 1 }
Mountain -> mempty { ore = 1 }
Desert -> mempty
filteredResCount :: ResourceType -> ResourceCount -> ResourceCount
filteredResCount resType res
= case resType of
Lumber -> mempty { lumber = lumber res}
Wool -> mempty { wool = wool res }
Wheat -> mempty { wheat = wheat res }
Brick -> mempty { brick = brick res }
Ore -> mempty { ore = ore res }
resCountToList :: ResourceCount -> [ResourceType]
resCountToList res = lumbers ++ wools ++ wheats ++ bricks ++ ores
where lumbers = replicate (lumber res) Lumber
wools = replicate (wool res) Wool
wheats = replicate (wheat res) Wheat
bricks = replicate (brick res) Brick
ores = replicate (ore res) Ore
nResOf :: Int -> ResourceType -> ResourceCount
nResOf i resType
= case resType of
Lumber -> mempty { lumber = i}
Wool -> mempty { wool = i }
Wheat -> mempty { wheat = i }
Brick -> mempty { brick = i }
Ore -> mempty { ore = i }
genericHarborDiscount :: Int -> Set HarborDiscount
genericHarborDiscount i
= Set.fromList $ map (mkDiscount i) [Lumber, Wool, Wheat, Brick, Ore]
mkDiscount :: Int -> ResourceType -> HarborDiscount
mkDiscount i resType = HarborDiscount (nResOf i resType)
applyDiscount :: ResourceCount -> HarborDiscount -> (Int, ResourceCount)
applyDiscount playerRes harbor = go 0 playerRes
where res = consumes harbor
go n remainingRes = if not $ sufficient remainingRes res
then (n, remainingRes)
else go (n+1) (remainingRes <> mkNeg res)
harborDiscount :: Harbor -> Set HarborDiscount
harborDiscount harbor'
= case harbor' of
Harbor Hill -> Set.singleton (mkDiscount 2 Brick)
Harbor Forest -> Set.singleton (mkDiscount 2 Lumber)
Harbor Pasture -> Set.singleton (mkDiscount 2 Wool)
Harbor Field -> Set.singleton (mkDiscount 2 Wheat)
Harbor Mountain -> Set.singleton (mkDiscount 2 Ore)
Harbor Desert -> Set.empty
ThreeToOne -> genericHarborDiscount 3
| corajr/cataskell | src/Cataskell/GameData/Resources.hs | bsd-3-clause | 4,346 | 0 | 15 | 1,198 | 1,545 | 811 | 734 | 110 | 7 |
module Text.RSS.Tests where
import Test.Framework (Test, mutuallyExclusive, testGroup)
import Text.RSS.Export.Tests (rssExportTests)
import Text.RSS.Import.Tests (rssImportTests)
import Test.HUnit (Assertion, assertBool)
import Test.Framework.Providers.HUnit (testCase)
import Text.Feed.Export
import Text.Feed.Import
import Text.XML.Light
import Paths_feed
rssTests :: Test
rssTests = testGroup "Text.RSS"
[ mutuallyExclusive $ testGroup "RSS"
[ rssExportTests
, rssImportTests
, testFullRss20Parse
]
]
testFullRss20Parse :: Test
testFullRss20Parse = testCase "parse a complete rss 2.0 file" testRss20
where
testRss20 :: Assertion
testRss20 = do
putStrLn . ppTopElement . xmlFeed =<< parseFeedFromFile =<< getDataFileName "tests/files/rss20.xml"
assertBool "OK" True
| danfran/feed | tests/Text/RSS/Tests.hs | bsd-3-clause | 838 | 0 | 12 | 152 | 186 | 109 | 77 | 22 | 1 |
module Internal.SystemZ.StorableSpec where
import Foreign
import Foreign.C.Types
import Test.Hspec
import Test.QuickCheck
import Hapstone.Internal.SystemZ
import Internal.SystemZ.Default
-- | main spec
spec :: Spec
spec = describe "Hapstone.Internal.SysZ" $ do
syszOpMemStructSpec
csSysZOpSpec
csSysZSpec
getSyszOpMemStruct :: IO SysZOpMemStruct
getSyszOpMemStruct = do
ptr <- mallocArray (sizeOf syszOpMemStruct) :: IO (Ptr Word8)
poke (castPtr ptr) (0x1 :: Word8)
poke (plusPtr ptr 1) (0x2 :: Word8)
poke (plusPtr ptr 8) (0x0123456789abcdef :: Word64)
poke (plusPtr ptr 16) (0x1032547698badcfe :: Word64)
peek (castPtr ptr) <* free ptr
syszOpMemStruct :: SysZOpMemStruct
syszOpMemStruct = SysZOpMemStruct 0x1 0x2 0x0123456789abcdef 0x1032547698badcfe
-- | SysZOpMemStruct spec
syszOpMemStructSpec :: Spec
syszOpMemStructSpec = describe "Storable SysZOpMemStruct" $ do
it "has a memory layout we can handle" $
sizeOf (undefined :: SysZOpMemStruct) == 2 + 6 + 8 * 2
it "has matching peek- and poke-implementations" $ property $
\s@SysZOpMemStruct{} ->
alloca (\p -> poke p s >> peek p) `shouldReturn` s
it "parses correctly" $ getSyszOpMemStruct `shouldReturn` syszOpMemStruct
getCsSyszOp :: IO CsSysZOp
getCsSyszOp = do
ptr <- mallocArray (sizeOf csSyszOp) :: IO (Ptr Word8)
poke (castPtr ptr) (fromIntegral $ fromEnum SyszOpImm :: Int32)
poke (plusPtr ptr 8) (19237 :: Int64)
peek (castPtr ptr) <* free ptr
csSyszOp :: CsSysZOp
csSyszOp = Imm 19237
-- | CsSysZipsOp spec
csSysZOpSpec :: Spec
csSysZOpSpec = describe "Storable CsSysZOp" $ do
it "has a memory layout we can handle" $
sizeOf (undefined :: CsSysZOp) == 4 + 4 + 24
it "has matching peek- and poke-implementations" $ property $
\s ->
alloca (\p -> poke p s >> (peek p :: IO CsSysZOp)) `shouldReturn` s
it "parses correctly" $ getCsSyszOp `shouldReturn` csSyszOp
getCsSysz :: IO CsSysZ
getCsSysz = do
ptr <- mallocArray (sizeOf csSysz) :: IO (Ptr Word8)
poke (castPtr ptr) (fromIntegral $ fromEnum SyszCcL :: Int32)
poke (plusPtr ptr 4) (1 :: Word8)
poke (plusPtr ptr 8) csSyszOp
peek (castPtr ptr) <* free ptr
csSysz :: CsSysZ
csSysz = CsSysZ SyszCcL [csSyszOp]
-- | CsSysZ spec
csSysZSpec :: Spec
csSysZSpec = describe "Storable CsSysZ" $ do
it "has a memory-layout we can handle" $
sizeOf (undefined :: CsSysZ) == 4 + 1 + 3 + 32 * 6
it "has matching peek- and poke-implementations" $ property $
\s@CsSysZ{} ->
alloca (\p -> poke p s >> peek p) `shouldReturn` s
it "parses correctly" $ getCsSysz `shouldReturn` csSysz
| ibabushkin/hapstone | test/Internal/SystemZ/StorableSpec.hs | bsd-3-clause | 2,689 | 0 | 16 | 587 | 869 | 437 | 432 | 63 | 1 |
module Task6
( task6
) where
import Data.Bool
import Text.Printf
type Point = (Double, Double)
task6 :: IO ()
task6
| snd result = printf "Минимум найден в точке (%.4f, %.4f)\n" x y
| otherwise = printf "Минимум не найден, последняя найденная точка: (%.4f, %.4f)\n" x y
where
result = stepDivision (103.0, 34.0) 0.000001 10000 1
(x, y) = fst result
f (x, y) = (x - 4) ^ 2 + (y + 7) ^ 2 - 4
f_der_x x y = 2 * x - 8
f_der_y x y = 2 * y + 14
gradient (x, y) = (f_der_x x y, f_der_y x y)
measure (x1, y1) (x2, y2) = sqrt $ (x1 - x2) ^ 2 + (y1 - y2) ^ 2
getNextPointAndStep currentPoint currentStep
| (f nextPoint) <= (f currentPoint) - nextStep * gradientLength ^ 2 = (nextPoint, nextStep)
| otherwise = getNextPointAndStep currentPoint nextStep
where
x = fst currentPoint
y = snd currentPoint
(gradX, gradY) = gradient currentPoint
gradientLength = measure (0, 0) (gradX, gradY)
nextPoint = (x - currentStep * gradX, y - currentStep * gradY)
nextStep = currentStep / 2;
stepDivision :: Point -> Double -> Integer -> Double -> (Point, Bool)
stepDivision startPoint eps iterCount step
| iterCount == 0 = (startPoint, False)
| measure startPoint nextPoint < eps = (nextPoint, True)
| otherwise = stepDivision nextPoint eps (iterCount - 1) nextStep
where
(nextPoint, nextStep) = getNextPointAndStep startPoint step
| vsulabs/OptimizationMethodsHs | src/Task6.hs | bsd-3-clause | 1,670 | 0 | 13 | 542 | 558 | 294 | 264 | 31 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Handlers.Actions.Common(
api
,view
,withParams
,withGeneratedCSRF
,withCSRFVerified
,withAuthorization
,withUser
,preloadUser
,generateAuthURL
) where
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import qualified Data.Map as M
import qualified Data.ByteString.Char8 as C8
import Data.Maybe
import Control.Applicative
import Control.Monad.Trans.Maybe
import Control.Monad.Trans(lift)
import Control.Monad.IO.Class(MonadIO,liftIO)
import Control.Monad.Reader(asks)
import Control.Monad.State (get,put,gets,modify)
import Data.Aeson ((.=), object, FromJSON, ToJSON)
import Network.HTTP.Types.Status
import Network.Wai (rawPathInfo,rawQueryString)
import qualified Web.Scotty.Trans as Web
import App.Types
import qualified Utils.Scotty.Auth as Auth
import qualified Utils.Scotty.CSRF as CSRF
import qualified Utils.Scotty.Cookie as Cookie
import Utils.URI.String
import Utils.URI.Params
import Models.Schemas
import qualified Models.DB as DB
class FromParams a where
fromParams :: M.Map T.Text T.Text -> Maybe a
data ParamsMap = ParamsMap (M.Map T.Text T.Text)
instance FromParams ParamsMap where
fromParams m = Just $ ParamsMap m
type Processor request response = request -> Response (Status, response)
type Render response = Response (Status,response) -> Response response
type Authorized auth request response = auth -> Processor request response
pathAndQuery :: Response ()
pathAndQuery = do
r <- Web.request
let path = rawPathInfo r
let query = rawQueryString r
let url = C8.concat [path,query]
lift $ modify $ \s -> s{urlPath = C8.unpack url}
api :: (ToJSON response) => Response (Status, response) -> Response ()
api with = do
pathAndQuery
(stat, resp) <- with
Web.status stat
Web.json resp
view :: Response (Status,LT.Text) -> Response ()
view with = do
pathAndQuery
(stat, resp) <- with
Web.status stat
if stat == status302 || stat == status301
then Web.redirect resp
else Web.html resp
withJSON :: (FromJSON request, ToJSON response) => (Processor request response) -> Response (Status,response)
withJSON with = do
req <- Web.jsonData
with req
withParams :: (FromParams request) => (Processor request response) -> Response (Status,response)
withParams with = do
paramAssoc <- M.fromList <$> Web.params
let ps = M.mapKeys LT.toStrict $ LT.toStrict <$> paramAssoc
case fromParams ps of
Nothing -> Web.raise $ Exception status400 "Expected request in params"
Just req -> with req
withGeneratedCSRF :: Processor request response -> Processor request response
withGeneratedCSRF with req = do
sessionCSRF <- Cookie.getCookie "_csrf_token"
case sessionCSRF of
Just token -> do
setTplValue "_csrf_token" token
lift $ modify $ \s -> s{csrfToken = token}
Nothing -> makeCSRFToken
with req
where
makeCSRFToken = do
s <- lift (asks site)
csrf <- liftIO $ CSRF.generateCSRF $ csrfSecret s
setTplValue "_csrf_token" csrf
Cookie.setCookie $ Cookie.makeRootSimpleCookie "_csrf_token" csrf
lift $ modify $ \s -> s {csrfToken = csrf}
withCSRFVerified :: Processor request response -> Processor request response
withCSRFVerified with req = do
sessionCSRF <- Cookie.getCookie "_csrf_token"
token <- Web.rescue (Web.param "_csrf_token")
(\e -> Web.header "x-csrf-token" >>= \t -> return $ fromMaybe "" t)
if (token == "") && (isNothing sessionCSRF )
then with req
else case sessionCSRF of
Nothing -> Web.raise $ Exception status500 "Expected request in CSRF"
Just csrf ->
if csrf == ( LT.toStrict token)
then with req
else Web.raise $ Exception status500 "Expected request in CSRF"
authWithHeader :: Response (Maybe T.Text)
authWithHeader = do
liftIO $ putStrLn "authWithHeader"
auth <- Web.header "Authorization"
s <- lift (asks site)
liftIO $ Auth.headerSecure (jwtSecret s) auth
authWithCookie :: Response (Maybe T.Text)
authWithCookie = do
cookie <- Cookie.getCookie "Authorization"
s <- lift (asks site)
liftIO $ Auth.cookieSecure (jwtSecret s) cookie
withAuthorization :: Authorized T.Text request response -> Processor request response
withAuthorization with req =
(runMaybeT
$ MaybeT authWithHeader
<|> MaybeT authWithCookie
) >>= authAction
where
authAction info = do
case info of
Nothing -> Web.raise $ Exception status401 "Authorization required"
Just payload -> with payload req
withUser :: Authorized (Maybe User) request response -> Processor request response
withUser with req = do
(runMaybeT $ MaybeT authWithHeader <|> MaybeT authWithCookie) >>= authAction
where
authAction info = do
case info of
Nothing -> with Nothing req
Just u -> preloadUser u >>= (\user -> with user req)
preloadUser :: T.Text -> Response (Maybe User)
preloadUser u = do
let userID = read $ T.unpack u
users <- DB.runDBTry $ DB.retrieveUserByID userID
setUser users
where
setUser [] = return Nothing
setUser [user] = do
setTplValue "user" user
return $ Just user
generateAuthURL :: Response ()
generateAuthURL = do
me <- lift $ gets urlPath
let url = show $ updateUrlParam "_r" me (toURI "/auth/login")
setTplValue (T.pack "auth_url") url
| DavidAlphaFox/sblog | src/Handlers/Actions/Common.hs | bsd-3-clause | 5,349 | 0 | 15 | 1,058 | 1,772 | 905 | 867 | 144 | 4 |
{-# LANGUAGE FlexibleContexts #-}
module Diagrams.QRCode (pathList, pathMatrix, pathArray, stroke) where
import Control.Arrow ((***))
import Data.Array (assocs, Array, Ix)
import Data.Colour.Names (black, white)
import Data.Monoid (Any, mempty)
import qualified Diagrams.Attributes as D
import qualified Diagrams.Core as D
import qualified Diagrams.Located as D
import qualified Diagrams.Path as D
import qualified Diagrams.Trail as D
import qualified Diagrams.TwoD as D
-- | Stroke using default QR code colors (black on white) and
-- with the \"quiet\" region.
stroke :: (D.Backend b D.V2 Double, D.Renderable (D.Path D.V2 Double) b)
=> D.Path D.V2 Double
-> D.QDiagram b D.V2 Double Any
stroke = D.bg white . quiet . D.fc black . D.lw D.none . D.stroke
where
zoneX = D.strutX 4
zoneY = D.strutY 4
quiet d =
zoneY
D.===
(zoneX D.||| d D.||| zoneX)
D.===
zoneY
-- | Convert a QR code represented as a list of bounded values
-- into a 'Path'. 'minBound' values are considered to be
-- \"off\", while every other value is considered to be \"on\".
pathList :: (Bounded a, Eq a, Integral ix) => [((ix, ix), a)] -> D.Path D.V2 Double
pathList = D.Path . fmap (uncurry (flip D.at) . (p2int *** toTrail))
where p2int = D.p2 . (fromIntegral *** fromIntegral)
-- | Same as 'pathList', but from a matrix represented as a list
-- of lists.
pathMatrix :: (Bounded a, Eq a) => [[a]] -> D.Path D.V2 Double
pathMatrix matrix =
pathList $ do
(r, row) <- count matrix
(c, val) <- count row
return ((r,c), val)
where count = zip [(0::Int)..]
-- | Same as 'pathList', but from an array.
pathArray :: (Bounded a, Eq a, Integral ix, Ix ix) => Array (ix, ix) a -> D.Path D.V2 Double
pathArray = pathList . assocs
-- | Convert a value into a 'Trail'.
toTrail :: (Bounded a, Eq a) => a -> D.Trail D.V2 Double
toTrail x = if x == minBound then mempty else D.square 1
| prowdsponsor/diagrams-qrcode | src/Diagrams/QRCode.hs | bsd-3-clause | 1,986 | 0 | 12 | 460 | 656 | 366 | 290 | 38 | 2 |
{-# LANGUAGE ScopedTypeVariables #-}
module Example.Monad.DataTypes where
import Z3.Monad
import Control.Monad.Trans (liftIO)
run :: IO ()
run = evalZ3 datatypeScript
mkCellDatatype :: Z3 Sort
mkCellDatatype = do
-- Create a cell data type of the form:
-- data Cell = Nil | Cons {car :: Cell, cdr :: Cell}
-- Nil constructor
nil <- mkStringSymbol "Nil"
isNil <- mkStringSymbol "is_Nil"
nilConst <- mkConstructor nil isNil []
-- Cons constructor
car <- mkStringSymbol "car"
cdr <- mkStringSymbol "cdr"
cons <- mkStringSymbol "Cons"
isCons <- mkStringSymbol "is_Cons"
-- In the following, car and cdr are the field names. The second argument,
-- their sort, is Nothing, since this is a recursive sort. The third argument is
-- 0, since the type is not mutually recursive.
consConst <- mkConstructor cons isCons [(car,Nothing,0),(cdr,Nothing,0)]
-- Cell datatype
cell <- mkStringSymbol "Cell"
mkDatatype cell [nilConst, consConst]
datatypeScript :: Z3 ()
datatypeScript = do
cell <- mkCellDatatype
liftIO $ putStrLn "Cell constructors are:"
[nilConst, consConst] <- getDatatypeSortConstructors cell
mapM_ (\c -> getDeclName c >>= getSymbolString >>= liftIO . putStrLn) [nilConst, consConst]
nil <- mkApp nilConst []
-- t1 = Cons (Nil,Nil)
t1 <- mkApp consConst [nil, nil]
liftIO $ putStrLn "prove (nil != cons (nil,nil)) //Expect Unsat"
p <- (mkEq nil t1 >>= mkNot)
push
mkNot p >>= assert
check >>= liftIO . print
pop 1
liftIO $ putStrLn "prove (cons (x,u) = cons(y,v) => x = y && u = v) //Expect Unsat"
[u,v,x,y] <- mapM (flip mkFreshConst cell) ["u","v","x","y"]
t1 <- mkApp consConst [x,u]
t2 <- mkApp consConst [y,v]
p1 <- mkEq t1 t2
p2 <- mkEq x y
p3 <- mkEq u v
p4 <- mkAnd [p2, p3]
p5 <- mkImplies p1 p4
push
mkNot p5 >>= assert
check >>= liftIO . print
pop 1
| sukwon0709/z3-haskell | examples/Example/Monad/DataTypes.hs | bsd-3-clause | 1,864 | 0 | 13 | 390 | 562 | 274 | 288 | 45 | 1 |
{-# LANGUAGE BangPatterns #-}
--
-- (c) The University of Glasgow 2003-2006
--
-- Functions for constructing bitmaps, which are used in various
-- places in generated code (stack frame liveness masks, function
-- argument liveness masks, SRT bitmaps).
module GHC.Data.Bitmap (
Bitmap, mkBitmap,
intsToBitmap, intsToReverseBitmap,
mAX_SMALL_BITMAP_SIZE,
seqBitmap,
) where
import GhcPrelude
import GHC.Runtime.Layout
import DynFlags
import Util
import Data.Bits
{-|
A bitmap represented by a sequence of 'StgWord's on the /target/
architecture. These are used for bitmaps in info tables and other
generated code which need to be emitted as sequences of StgWords.
-}
type Bitmap = [StgWord]
-- | Make a bitmap from a sequence of bits
mkBitmap :: DynFlags -> [Bool] -> Bitmap
mkBitmap _ [] = []
mkBitmap dflags stuff = chunkToBitmap dflags chunk : mkBitmap dflags rest
where (chunk, rest) = splitAt (wORD_SIZE_IN_BITS dflags) stuff
chunkToBitmap :: DynFlags -> [Bool] -> StgWord
chunkToBitmap dflags chunk =
foldl' (.|.) (toStgWord dflags 0) [ oneAt n | (True,n) <- zip chunk [0..] ]
where
oneAt :: Int -> StgWord
oneAt i = toStgWord dflags 1 `shiftL` i
-- | Make a bitmap where the slots specified are the /ones/ in the bitmap.
-- eg. @[0,1,3], size 4 ==> 0xb@.
--
-- The list of @Int@s /must/ be already sorted.
intsToBitmap :: DynFlags
-> Int -- ^ size in bits
-> [Int] -- ^ sorted indices of ones
-> Bitmap
intsToBitmap dflags size = go 0
where
word_sz = wORD_SIZE_IN_BITS dflags
oneAt :: Int -> StgWord
oneAt i = toStgWord dflags 1 `shiftL` i
-- It is important that we maintain strictness here.
-- See Note [Strictness when building Bitmaps].
go :: Int -> [Int] -> Bitmap
go !pos slots
| size <= pos = []
| otherwise =
(foldl' (.|.) (toStgWord dflags 0) (map (\i->oneAt (i - pos)) these)) :
go (pos + word_sz) rest
where
(these,rest) = span (< (pos + word_sz)) slots
-- | Make a bitmap where the slots specified are the /zeros/ in the bitmap.
-- eg. @[0,1,3], size 4 ==> 0x4@ (we leave any bits outside the size as zero,
-- just to make the bitmap easier to read).
--
-- The list of @Int@s /must/ be already sorted and duplicate-free.
intsToReverseBitmap :: DynFlags
-> Int -- ^ size in bits
-> [Int] -- ^ sorted indices of zeros free of duplicates
-> Bitmap
intsToReverseBitmap dflags size = go 0
where
word_sz = wORD_SIZE_IN_BITS dflags
oneAt :: Int -> StgWord
oneAt i = toStgWord dflags 1 `shiftL` i
-- It is important that we maintain strictness here.
-- See Note [Strictness when building Bitmaps].
go :: Int -> [Int] -> Bitmap
go !pos slots
| size <= pos = []
| otherwise =
(foldl' xor (toStgWord dflags init) (map (\i->oneAt (i - pos)) these)) :
go (pos + word_sz) rest
where
(these,rest) = span (< (pos + word_sz)) slots
remain = size - pos
init
| remain >= word_sz = -1
| otherwise = (1 `shiftL` remain) - 1
{-
Note [Strictness when building Bitmaps]
========================================
One of the places where @Bitmap@ is used is in in building Static Reference
Tables (SRTs) (in @GHC.Cmm.Info.Build.procpointSRT@). In #7450 it was noticed
that some test cases (particularly those whose C-- have large numbers of CAFs)
produced large quantities of allocations from this function.
The source traced back to 'intsToBitmap', which was lazily subtracting the word
size from the elements of the tail of the @slots@ list and recursively invoking
itself with the result. This resulted in large numbers of subtraction thunks
being built up. Here we take care to avoid passing new thunks to the recursive
call. Instead we pass the unmodified tail along with an explicit position
accumulator, which get subtracted in the fold when we compute the Word.
-}
{- |
Magic number, must agree with @BITMAP_BITS_SHIFT@ in InfoTables.h.
Some kinds of bitmap pack a size\/bitmap into a single word if
possible, or fall back to an external pointer when the bitmap is too
large. This value represents the largest size of bitmap that can be
packed into a single word.
-}
mAX_SMALL_BITMAP_SIZE :: DynFlags -> Int
mAX_SMALL_BITMAP_SIZE dflags
| wORD_SIZE dflags == 4 = 27
| otherwise = 58
seqBitmap :: Bitmap -> a -> a
seqBitmap = seqList
| sdiehl/ghc | compiler/GHC/Data/Bitmap.hs | bsd-3-clause | 4,528 | 0 | 17 | 1,104 | 790 | 431 | 359 | 61 | 1 |
module Bustle.QL.API.Bustle
( BustleAPI(..)
) where
import Control.Monad.IO.Class (liftIO)
import qualified Data.ByteString as B
import Data.Foldable
import Bustle.Env
data BustleAPI = BustleAPI deriving (Eq, Show)
instance GraphQLValue Haxl BustleAPI
instance GraphQLType OBJECT Haxl BustleAPI where
def = defineObject "Bustle" $ do
describe "Core Bustle API"
field "hello" $ return ("world" :: B.ByteString)
field "monadic" $ do
test <- arg "test"
@> "monadic code is really neat because instead of memorizing functions"
|.. "and their parameters, you everything is a generalization of function composition."
|-- "you can think of it as the builder pattern on steroids"
|-- "this input is only valid if it's a positive prime"
validate test $ greaterThan 0
validate test $ lessThan 1000
return [1..test]
greaterThan :: Int -> Int -> Validation
greaterThan m n | n > m = OK
| otherwise = ERR "number too small!"
lessThan :: Int -> Int -> Validation
lessThan m n | n < m = OK
| otherwise = ERR "number too big!"
| jqyu/bustle-chi | src/Bustle/QL/API/Bustle.hs | bsd-3-clause | 1,161 | 0 | 17 | 314 | 288 | 143 | 145 | -1 | -1 |
module Quickpull.Render where
import Quickpull.Types
import Quickpull.Formatting
import Data.List
import Data.Ord
metaQual
:: Char
-- ^ Leader character
-> (Meta, Qual)
-> String
metaQual ldr (m, q) = indent 1 $ [ldr] <+> "Decree (" <+>
show m <+> ")" <+> "(" <+> i <+> ")"
where
kind = case q of
QTree -> "Multi"
QProp -> "Single"
i = kind <+> qualName
qualName = (concat . intersperse "." . modName . modDesc $ m)
++ "." ++ qName m
metaQuals :: [(Meta, Qual)] -> String
metaQuals ls = case ls of
[] -> indent 1 "[]"
x:xs -> metaQual '[' x ++ concatMap (metaQual ',') xs
++ indent 1 "]"
imports :: [ModDesc] -> String
imports = concatMap mkImport . nub . sortBy (comparing modName)
where
mkImport m = "import qualified " ++
(concat . intersperse "." . modName $ m) ++ "\n"
-- | Summarizes a Meta in a single line.
metaLine :: Meta -> String
metaLine m = qName m <+> "from file" <+> (modPath . modDesc $ m)
<+> "at line" <+> show (linenum m) ++ "\n"
topComments :: String
topComments = unlines
[ "-- | This module generated by the Quickpull package."
, "-- Quickpull is available at:"
, "-- <http://www.github.com/massysett/quickpull>"
, ""
]
testModule
:: String
-- ^ Name to use for module
-> [(Meta, Qual)]
-> String
testModule name ls = topComments ++ rest
where
rest = concat . intersperse "\n" $
[ "module" <+> name <+> "where\n"
, "import Quickpull"
, imports . map (modDesc . fst) $ ls
, unlines
[ "decrees :: [Decree]"
, "decrees ="
]
, metaQuals ls
]
summary :: Summary -> String
summary s = unlines
[ "success: " ++ show (success s)
, "gave up: " ++ show (gaveUp s)
, "failure: " ++ show (failure s)
, "no expected failure: " ++ show (noExpectedFailure s)
, "total: " ++ show
(success s + gaveUp s + failure s +
noExpectedFailure s)
]
| massysett/quickpull | lib/Quickpull/Render.hs | bsd-3-clause | 1,926 | 0 | 14 | 508 | 639 | 333 | 306 | 57 | 2 |
module Entity.Velocity where
import qualified GameData.Entity as E
import qualified GameData.Animation as A
import qualified Gamgine.Math.Vect as V
velocity :: E.Entity -> V.Vect
velocity E.Player {E.playerVelocity = v } = v
velocity E.Enemy {E.enemyPosition = Left _} = V.v3 0 0 0
velocity E.Enemy {E.enemyPosition = Right ani} = A.currentVelocity ani
velocity E.Star { } = V.v3 0 0 0
velocity E.Platform {E.platformPosition = Left _} = V.v3 0 0 0
velocity E.Platform {E.platformPosition = Right ani} = A.currentVelocity ani
| dan-t/layers | src/Entity/Velocity.hs | bsd-3-clause | 591 | 0 | 9 | 145 | 212 | 112 | 100 | 11 | 1 |
module Kata.WonderlandNumberSpec (main, spec) where
import Test.Hspec
import Kata.WonderlandNumber (wonderlandNumber, toDigitSet)
import Data.Foldable (for_)
main :: IO ()
main = hspec spec
spec :: Spec
spec =
describe "WonderlandNumber" $
[2..6] `for_` \n ->
it ("has the same digits when multiplied by " ++ show n) $
(wonderlandNumber * n) `hasAllTheSameDigitsAs` wonderlandNumber
hasAllTheSameDigitsAs :: Int -> Int -> Expectation
hasAllTheSameDigitsAs n m = toDigitSet n `shouldBe` toDigitSet m
| FranklinChen/wonderland-katas-haskell | test/Kata/WonderlandNumberSpec.hs | bsd-3-clause | 525 | 0 | 12 | 94 | 158 | 88 | 70 | 14 | 1 |
module Math.Probably.GlobalRandoms where
import Data.IORef
import System.IO.Unsafe
import Math.Probably.Sampler
{-# NOINLINE globalSeed #-}
globalSeed :: IORef Seed
globalSeed = unsafePerformIO $ getSeedIO >>= newIORef
{-# NOINLINE sampleN #-}
sampleN :: Int -> Prob a -> [a]
sampleN n = unsafePerformIO . fmap (take n) . runSamplerIO
{- withGlobalRnds (\rs -> sam n rs sf [])
where sam 0 rs _ xs = (xs, rs)
sam n rs s@(Sam sf) xs = let (x, rs') = sf rs
in sam (n-1) rs' s (x:xs) -}
{-sampleNV :: Int -> Sampler a -> Vector a
sampleNV n sf =
withGlobalRnds (\rs -> sam n rs sf [])
where sam 0 rs _ xs = (xs, rs)
sam n rs s@(Sam sf) xs = let (x, rs') = sf rs
in sam (n-1) rs' s (x:xs) -}
| glutamate/probably-base | Math/Probably/GlobalRandoms.hs | bsd-3-clause | 836 | 0 | 9 | 283 | 93 | 53 | 40 | 10 | 1 |
module AST.Module where
import Data.Binary
import qualified Data.List as List
import qualified Data.Map as Map
import Control.Applicative ((<$>),(<*>))
import qualified AST.Expression.Canonical as Canonical
import qualified AST.Declaration as Decl
import qualified AST.Type as Type
import qualified AST.Variable as Var
import AST.PrettyPrint
--import qualified Elm.Compiler.Version as Compiler
import Text.PrettyPrint as P
-- HELPFUL TYPE ALIASES
type Interfaces = Map.Map Name Interface
type Types = Map.Map String Type.CanonicalType
type Aliases = Map.Map String ([String], Type.CanonicalType)
type ADTs = Map.Map String (AdtInfo String)
type AdtInfo v = ( [String], [(v, [Type.CanonicalType])] )
type CanonicalAdt = (Var.Canonical, AdtInfo Var.Canonical)
-- MODULES
type SourceModule =
Module (Var.Listing Var.Value) [Decl.SourceDecl]
type ValidModule =
Module (Var.Listing Var.Value) [Decl.ValidDecl]
type CanonicalModule =
Module [Var.Value] CanonicalBody
data Module exports body = Module
{ names :: Name
, path :: FilePath
, exports :: exports
, imports :: [(Name, ImportMethod)]
, body :: body
}
data CanonicalBody = CanonicalBody
{ program :: Canonical.Expr
, types :: Types
, fixities :: [(Decl.Assoc, Int, String)]
, aliases :: Aliases
, datatypes :: ADTs
, ports :: [String]
}
-- HEADERS
{-| Basic info needed to identify modules and determine dependencies. -}
data HeaderAndImports = HeaderAndImports
{ _names :: Name
, _exports :: Var.Listing Var.Value
, _imports :: [(Name, ImportMethod)]
}
type Name = [String] -- must be non-empty
nameToString :: Name -> String
nameToString = List.intercalate "."
nameIsNative :: Name -> Bool
nameIsNative name =
case name of
"Native" : _ -> True
_ -> False
-- INTERFACES
{-| Key facts about a module, used when reading info from .elmi files. -}
data Interface = Interface
{ iVersion :: String
, iExports :: [Var.Value]
, iTypes :: Types
, iImports :: [(Name, ImportMethod)]
, iAdts :: ADTs
, iAliases :: Aliases
, iFixities :: [(Decl.Assoc, Int, String)]
, iPorts :: [String]
}
toInterface :: CanonicalModule -> Interface
toInterface modul =
let body' = body modul in
Interface
{ iVersion = "Haskelm"--Compiler.version
, iExports = exports modul
, iTypes = types body'
, iImports = imports modul
, iAdts = datatypes body'
, iAliases = aliases body'
, iFixities = fixities body'
, iPorts = ports body'
}
instance Binary Interface where
get = Interface <$> get <*> get <*> get <*> get <*> get <*> get <*> get <*> get
put modul = do
put (iVersion modul)
put (iExports modul)
put (iTypes modul)
put (iImports modul)
put (iAdts modul)
put (iAliases modul)
put (iFixities modul)
put (iPorts modul)
-- IMPORT METHOD
data ImportMethod
= As !String
| Open !(Var.Listing Var.Value)
open :: ImportMethod
open = Open (Var.openListing)
importing :: [Var.Value] -> ImportMethod
importing xs = Open (Var.Listing xs False)
instance Binary ImportMethod where
put method =
case method of
As alias -> putWord8 0 >> put alias
Open listing -> putWord8 1 >> put listing
get = do tag <- getWord8
case tag of
0 -> As <$> get
1 -> Open <$> get
_ -> error "Error reading valid ImportMethod type from serialized string"
-- PRETTY PRINTING
instance (Pretty exs, Pretty body) => Pretty (Module exs body) where
pretty (Module names _ exs ims body) =
P.vcat [modul, P.text "", prettyImports, P.text "", pretty body]
where
modul = P.text "module" <+> name <+> pretty exs <+> P.text "where"
name = P.text (List.intercalate "." names)
prettyImports =
P.vcat $ map prettyMethod ims
prettyMethod :: (Name, ImportMethod) -> Doc
prettyMethod import' =
case import' of
([name], As alias)
| name == alias -> P.empty
(_, As alias) -> P.text "as" <+> P.text alias
(_, Open listing) -> pretty listing
| JoeyEremondi/haskelm | src/AST/Module.hs | bsd-3-clause | 4,209 | 0 | 13 | 1,084 | 1,292 | 716 | 576 | 115 | 3 |
{-# LANGUAGE OverloadedStrings, Safe #-}
module Evalso.Cruncher.Language.Haskell (haskell) where
import Evalso.Cruncher.Language (Language (..))
haskell :: Language
haskell = Language {
_codeFilename = "program.hs"
, _compileCommand = Just ["ghc", "program.hs"]
, _compileTimeout = Just 3
, _runCommand = ["./program"]
, _runTimeout = 5
, _codemirror = "haskell"
, _rpm = "ghc-compiler"
, _displayName = "Haskell"
}
| eval-so/cruncher | src/Evalso/Cruncher/Language/Haskell.hs | bsd-3-clause | 437 | 0 | 8 | 76 | 104 | 67 | 37 | 13 | 1 |
-- | Combinators for executing IO actions in parallel on a thread pool.
--
-- This module just reexports "Control.Concurrent.ParallelIO.Global": this contains versions of
-- the combinators that make use of a single global thread pool with as many threads as there are
-- capabilities.
--
-- For finer-grained control, you can use "Control.Concurrent.ParallelIO.Local" instead, which
-- gives you control over the creation of the pool.
module Control.Concurrent.ParallelIO (
module Control.Concurrent.ParallelIO.Global
) where
-- By default, just export the user-friendly Global interface.
-- Those who want more power can import Local explicitly.
import Control.Concurrent.ParallelIO.Global
| batterseapower/parallel-io | Control/Concurrent/ParallelIO.hs | bsd-3-clause | 705 | 0 | 5 | 107 | 34 | 27 | 7 | 3 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module Text.WildCardMatcher where
import Control.Applicative ((<$>), (<|>), (*>))
import qualified Data.List as L
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import qualified Data.ByteString.Lazy.Char8 as BSLC
import qualified Data.Attoparsec.ByteString.Char8 as ABC
class AppendParseResult a where
appendPR :: a -> a -> a
concatPR :: [a] -> a
emptyPR :: a
instance AppendParseResult BSL.ByteString where
appendPR = BSL.append
concatPR = BSL.concat
emptyPR = BSL.empty
instance AppendParseResult [a] where
appendPR = (++)
concatPR = L.concat
emptyPR = []
instance AppendParseResult a => AppendParseResult (ABC.Parser a) where
appendPR pa qa = do
a1 <- pa
a2 <- qa
return $ appendPR a1 a2
concatPR [] = emptyPR
concatPR (pa:pas) = do
a <- pa
appendPR a <$> concatPR pas
emptyPR = return emptyPR
instance AppendParseResult a => AppendParseResult (ABC.Parser a -> ABC.Parser a) where
appendPR npa nqa = npa . nqa
concatPR [] = emptyPR
concatPR (npa:npas) = do
appendPR npa $ concatPR npas
emptyPR = id
(+>) :: AppendParseResult a => ABC.Parser a -> ABC.Parser a -> ABC.Parser a
p +> q = p >>= \ a -> appendPR a <$> (p **> q)
(**>) :: AppendParseResult a => ABC.Parser a -> ABC.Parser a -> ABC.Parser a
p **> q = (p +> q) <|> q
{-
(+>>) :: AppendParseResult a => ABC.Parser b -> ABC.Parser a -> ABC.Parser a
p +>> q = p *> (p **>> q)
(**>>) :: AppendParseResult a => ABC.Parser b -> ABC.Parser a -> ABC.Parser a
p **>> q = (p +>> q) <|> q
(+<<) :: AppendParseResult a => ABC.Parser a -> ABC.Parser b -> ABC.Parser a
p +<< q = p >>= \ a -> appendPR a <$> (p **<< q)
(**<<) :: AppendParseResult a => ABC.Parser a -> ABC.Parser b -> ABC.Parser a
p **<< q = (p +<< q) <|> (q *> return emptyPR)
---}
-------------------------------
pLbs :: BS.ByteString -> ABC.Parser BSL.ByteString
pLbs lbs = BSL.fromStrict <$> ABC.string lbs
pAnyChar :: ABC.Parser BSL.ByteString
pAnyChar = BSLC.singleton <$> ABC.anyChar
asteriskParserN :: ABC.Parser (ABC.Parser BSL.ByteString -> ABC.Parser BSL.ByteString)
asteriskParserN = ABC.takeWhile1 ('*' ==) *> (return $ \ r -> (pAnyChar **> r))
asteriskParser :: ABC.Parser (ABC.Parser BSL.ByteString)
asteriskParser = ($ emptyPR) <$> asteriskParserN
stringParserN :: ABC.Parser (ABC.Parser BSL.ByteString -> ABC.Parser BSL.ByteString)
stringParserN = appendPR <$> stringParser
stringParser :: ABC.Parser (ABC.Parser BSL.ByteString)
stringParser = pLbs <$> ABC.takeWhile1 ('*' /=)
wildCardParserN :: ABC.Parser (ABC.Parser BSL.ByteString -> ABC.Parser BSL.ByteString)
wildCardParserN = concatPR <$> (ABC.many' $ asteriskParserN <|> stringParserN)
wildCardParserGenerater :: ABC.Parser (ABC.Parser BSL.ByteString)
wildCardParserGenerater = ($ emptyPR) <$> wildCardParserN
generateParser :: ABC.Parser (ABC.Parser BSL.ByteString) -> BS.ByteString -> ABC.Parser BSL.ByteString
generateParser pg bs = case ABC.parse pg bs of
ABC.Done _ r -> r
ABC.Partial f -> case f "" of
ABC.Done _ r -> r
_ -> emptyPR
_ -> emptyPR
type Matcher = BS.ByteString -> Bool
matchWildCardString :: BS.ByteString -> Matcher
matchWildCardString "" "" = True
matchWildCardString _ "" = False
matchWildCardString wildCardString targetString = case ABC.parse parser targetString of
ABC.Done "" _ -> True
ABC.Partial f -> case f "" of
ABC.Done "" _ -> True
_ -> False
_ -> False
where
parser = generateParser wildCardParserGenerater wildCardString
| asakamirai/hs-wildcard | src/Text/WildCardMatcher.hs | bsd-3-clause | 3,700 | 0 | 12 | 741 | 1,084 | 565 | 519 | 76 | 4 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE StandaloneDeriving #-}
module RequestSpec (main, spec) where
import Network.Wai.Handler.Warp.Request
import Network.Wai.Handler.Warp.RequestHeader (parseByteRanges)
import Network.Wai.Handler.Warp.Types
import Test.Hspec
import Test.Hspec.QuickCheck
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S8
import qualified Data.ByteString.Lazy as L
import qualified Network.HTTP.Types.Header as HH
import Data.IORef
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "headerLines" $ do
it "takes until blank" $
blankSafe >>= (`shouldBe` ("", ["foo", "bar", "baz"]))
it "ignored leading whitespace in bodies" $
whiteSafe >>= (`shouldBe` (" hi there", ["foo", "bar", "baz"]))
it "throws OverLargeHeader when too many" $
tooMany `shouldThrow` overLargeHeader
it "throws OverLargeHeader when too large" $
tooLarge `shouldThrow` overLargeHeader
it "known bad chunking behavior #239" $ do
let chunks =
[ "GET / HTTP/1.1\r\nConnection: Close\r"
, "\n\r\n"
]
(actual, src) <- headerLinesList' chunks
leftover <- readLeftoverSource src
leftover `shouldBe` S.empty
actual `shouldBe` ["GET / HTTP/1.1", "Connection: Close"]
prop "random chunking" $ \breaks extraS -> do
let bsFull = "GET / HTTP/1.1\r\nConnection: Close\r\n\r\n" `S8.append` extra
extra = S8.pack extraS
chunks = loop breaks bsFull
loop [] bs = [bs, undefined]
loop (x:xs) bs =
bs1 : loop xs bs2
where
(bs1, bs2) = S8.splitAt ((x `mod` 10) + 1) bs
(actual, src) <- headerLinesList' chunks
leftover <- consumeLen (length extraS) src
actual `shouldBe` ["GET / HTTP/1.1", "Connection: Close"]
leftover `shouldBe` extra
describe "parseByteRanges" $ do
let test x y = it x $ parseByteRanges (S8.pack x) `shouldBe` y
test "bytes=0-499" $ Just [HH.ByteRangeFromTo 0 499]
test "bytes=500-999" $ Just [HH.ByteRangeFromTo 500 999]
test "bytes=-500" $ Just [HH.ByteRangeSuffix 500]
test "bytes=9500-" $ Just [HH.ByteRangeFrom 9500]
test "foobytes=9500-" Nothing
test "bytes=0-0,-1" $ Just [HH.ByteRangeFromTo 0 0, HH.ByteRangeSuffix 1]
where
blankSafe = headerLinesList ["f", "oo\n", "bar\nbaz\n\r\n"]
whiteSafe = headerLinesList ["foo\r\nbar\r\nbaz\r\n\r\n hi there"]
tooMany = headerLinesList $ repeat "f\n"
tooLarge = headerLinesList $ repeat "f"
headerLinesList orig = do
(res, src) <- headerLinesList' orig
leftover <- readLeftoverSource src
return (leftover, res)
headerLinesList' orig = do
ref <- newIORef orig
let src = do
x <- readIORef ref
case x of
[] -> return S.empty
y:z -> do
writeIORef ref z
return y
src' <- mkSource src
res <- headerLines src'
return (res, src')
consumeLen len0 src =
loop id len0
where
loop front len
| len <= 0 = return $ S.concat $ front []
| otherwise = do
bs <- readSource src
if S.null bs
then loop front 0
else do
let (x, y) = S.splitAt len bs
loop (front . (x:)) (len - S.length x)
overLargeHeader :: Selector InvalidRequest
overLargeHeader e = e == OverLargeHeader
| sol/wai | warp/test/RequestSpec.hs | mit | 3,566 | 0 | 23 | 1,016 | 1,060 | 543 | 517 | 88 | 2 |
-- | Module containing explorer-specific datatypes
module Pos.Explorer.Core.Types
( TxExtra (..)
, AddrHistory
) where
import Universum
import Pos.Binary.Class (Cons (..), Field (..), deriveSimpleBi)
import Pos.Chain.Block (HeaderHash)
import Pos.Chain.Txp (TxId, TxUndo)
import Pos.Core (Timestamp)
import Pos.Core.Chrono (NewestFirst)
type AddrHistory = NewestFirst [] TxId
data TxExtra = TxExtra
{ teBlockchainPlace :: !(Maybe (HeaderHash, Word32))
, teReceivedTime :: !(Maybe Timestamp)
-- non-strict on purpose, see comment in `processTxDo` in Pos.Explorer.Txp.Local
, teInputOutputs :: TxUndo
} deriving (Show, Generic, Eq)
deriveSimpleBi ''TxExtra [
Cons 'TxExtra [
Field [| teBlockchainPlace :: Maybe (HeaderHash, Word32) |],
Field [| teReceivedTime :: Maybe Timestamp |],
Field [| teInputOutputs :: TxUndo |]
]]
| input-output-hk/pos-haskell-prototype | explorer/src/Pos/Explorer/Core/Types.hs | mit | 1,003 | 0 | 12 | 287 | 217 | 133 | 84 | -1 | -1 |
{- |
Module : ./Taxonomy/MMiSSOntologyGraph.hs
Copyright : (c) Uni Bremen 2004-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable(uni)
displays an ontology graph
-}
module Taxonomy.MMiSSOntologyGraph (displayClassGraph) where
import Data.List
import Control.Monad
import Data.IORef
import GUI.UDGUtils
import qualified GUI.HTkUtils as S
import Taxonomy.MMiSSOntology
import qualified Data.Map as Map
import Common.Lib.Graph
import Data.Graph.Inductive.Graph
import Data.Graph.Inductive.Basic
import Data.Graph.Inductive.Query.DFS
import qualified Data.Foldable
import qualified Taxonomy.AbstractGraphView as A
displayClassGraph :: MMiSSOntology -> Maybe String -> IO A.OurGraph
displayClassGraph onto startClass = do
S.initHTk []
ginfo <- A.initgraphs
classGraph <- case startClass of
Nothing -> return $ getPureClassGraph $ getClassGraph onto
Just className -> case gselName className $ getClassGraph onto of
[] -> return $ getPureClassGraph $ getClassGraph onto
(_, v, l, _) : _ -> return $ ([], v, l, []) & empty
A.Result gid _ <-
A.makegraph (getOntologyName onto) Nothing Nothing Nothing
[GlobalMenu (Button "Button2" (putStrLn "Button2 was pressed"))]
(map ( \ ( nam, col) -> (getTypeLabel nam, Box $$$ Color col $$$
createLocalMenu onto ginfo
$$$ ValueTitle ( \ (name, _, _) -> return name) $$$
emptyNodeTypeParms :: DaVinciNodeTypeParms (String, Int, Int)
)) [ (OntoClass, "#e0eeee")
, (OntoPredicate, "#ffd300")
, (OntoObject, "#ffffA0") ])
(createEdgeTypes $ getClassGraph onto)
[]
ginfo
updateDaVinciGraph classGraph gid ginfo
setEmptyRelationSpecs gid ginfo onto
A.Result gid' _ <- A.redisplay gid ginfo
A.getGraphid gid' ginfo
setEmptyRelationSpecs :: A.Descr -> A.GraphInfo -> MMiSSOntology -> IO ()
setEmptyRelationSpecs gid gv onto = do
(gs, _) <- readIORef gv
case lookup gid gs of
Nothing -> return ()
_ -> do
A.writeRelViewSpecs gid
(map ( \ relname -> A.RelViewSpec relname False False)
$ getRelationNames onto) gv
return ()
updateDaVinciGraph :: Gr (String, String, OntoObjectType) String -> A.Descr
-> A.GraphInfo -> IO ()
updateDaVinciGraph nGraph gid gv = do
(gs, _) <- readIORef gv
case lookup gid gs of
Nothing -> return ()
Just g -> do
let oldGraph = A.ontoGraph g
nMap = A.nodeMap g
nodeMap1 <- foldM (createNode gid gv oldGraph) nMap
$ labNodes nGraph
nodeMap2 <- foldM (createLink gid gv) nodeMap1 $ labEdges nGraph
A.Result gid' err <- A.writeOntoGraph gid nGraph gv
A.writeNodeMap gid' nodeMap2 gv
Data.Foldable.forM_ err putStr
getTypeLabel :: OntoObjectType -> String
getTypeLabel t = case t of
OntoClass -> "class"
OntoObject -> "object"
OntoPredicate -> "predicate"
createNode :: Int -> A.GraphInfo -> ClassGraph -> A.NodeMapping
-> LNode (String, String, OntoObjectType) -> IO A.NodeMapping
createNode gid ginfo _ nMap (nodeID, (name, _, objectType)) =
case Map.lookup nodeID nMap of
Just _ -> return nMap
Nothing ->
do A.Result nid err <-
A.addnode gid (getTypeLabel objectType) name ginfo
case err of
Nothing -> return (Map.insert nodeID nid nMap)
Just str -> do
putStr str
return $ Map.insert nodeID nid nMap
createLink :: A.Descr -> A.GraphInfo -> A.NodeMapping -> LEdge String
-> IO A.NodeMapping
createLink gid ginfo nMap (node1, node2, edgeLabel) = do
dNodeID_1 <- case Map.lookup node1 nMap of
Nothing -> return (-1)
Just n -> return n
dNodeID_2 <- case Map.lookup node2 nMap of
Nothing -> return (-1)
Just n -> return n
if dNodeID_1 == -1 || dNodeID_2 == -1 then return nMap else do
A.Result _ err <-
if elem edgeLabel ["isa", "instanceOf", "livesIn", "proves"]
then A.addlink gid edgeLabel edgeLabel Nothing
dNodeID_2 dNodeID_1 ginfo
else A.addlink gid edgeLabel edgeLabel Nothing
dNodeID_1 dNodeID_2 ginfo
Data.Foldable.forM_ err putStr
return nMap
showRelationsForVisible :: MMiSSOntology -> A.GraphInfo -> (String, Int, Int)
-> IO ()
showRelationsForVisible onto gv (_, _, gid) =
do (gs, _) <- readIORef gv
case lookup gid gs of
Nothing -> return ()
Just g ->
do let oldGraph = A.ontoGraph g
nodesInOldGraph = map fst $ labNodes oldGraph
newGr = restrict (`elem` nodesInOldGraph) $ getClassGraph onto
purgeGraph gid gv
updateDaVinciGraph newGr gid gv
A.redisplay gid gv
return ()
showObjectsForVisible :: MMiSSOntology -> A.GraphInfo -> (String, Int, Int)
-> IO ()
showObjectsForVisible onto gv (_, _, gid) =
do (gs, _) <- readIORef gv
case lookup gid gs of
Nothing -> return ()
Just g ->
do let oldGraph = A.ontoGraph g
classesInOldGraph =
map ( \ (_, _, (className, _, _), _) -> className)
$ filter ( \ (_, _, (_, _, objectType), _)
-> objectType == OntoClass)
$ map (context oldGraph) $ nodes oldGraph
findObjectsOfClass classList (_, (_, className, _)) =
elem className classList
objectList =
map fst $ filter (findObjectsOfClass classesInOldGraph)
$ getTypedNodes [OntoObject] $ getClassGraph onto
objectGr = restrict (`elem` objectList) (getClassGraph onto)
updateDaVinciGraph (makeObjectGraph oldGraph
(getPureClassGraph (getClassGraph onto))
objectGr) gid gv
A.redisplay gid gv
return ()
showWholeObjectGraph :: MMiSSOntology -> A.GraphInfo -> (String, Int, Int)
-> IO ()
showWholeObjectGraph onto gv (_, _, gid) =
do oldGv <- readIORef gv
A.Result _ err <- purgeGraph gid gv
let objectList = map fst $ getTypedNodes [OntoObject] $ getClassGraph onto
objectGraph = restrict (`elem` objectList) $ getClassGraph onto
updateDaVinciGraph (makeObjectGraph empty
(getClassGraph onto) objectGraph) gid gv
case err of
Just _ -> writeIORef gv oldGv
Nothing -> do
A.redisplay gid gv
return ()
makeObjectGraph :: ClassGraph -> ClassGraph -> ClassGraph -> ClassGraph
makeObjectGraph oldGr classGr objectGr =
let newGr = insNodes (labNodes objectGr) oldGr
newGr2 = foldl insEdgeSecurely newGr (labEdges objectGr)
newGr3 = foldl insInstanceOfEdge newGr2 (labNodes objectGr)
in newGr3
where
insEdgeSecurely gr (node1, node2, label) = case match node1 gr of
(Nothing, _) -> gr
_ -> case match node2 gr of
(Nothing, _) -> gr
_ -> insEdge (node1, node2, label) gr
insInstanceOfEdge gr (_, (objectName, className, _)) =
case findLNode gr className of
Nothing -> case findLNode classGr className of
Nothing -> gr
Just classNodeID -> insInstanceOfEdge1
(insNode (classNodeID, (className, "", OntoClass)) gr)
classNodeID objectName
Just classNodeID -> insInstanceOfEdge1 gr classNodeID objectName
insInstanceOfEdge1 gr classNodeID objectName =
case findLNode gr objectName of
Nothing -> gr
Just objectNodeID -> insEdge
(objectNodeID, classNodeID, "instanceOf") gr
showWholeClassGraph :: MMiSSOntology -> A.GraphInfo -> (String, Int, Int)
-> IO ()
showWholeClassGraph onto gv (_, _, gid) =
do oldGv <- readIORef gv
A.Result _ err <- purgeGraph gid gv
updateDaVinciGraph (getPureClassGraph (getClassGraph onto)) gid gv
case err of
Just _ -> writeIORef gv oldGv
Nothing -> do
A.redisplay gid gv
return ()
showRelationsToNeighbors :: MMiSSOntology -> A.GraphInfo -> [String]
-> (String, Int, Int) -> IO ()
showRelationsToNeighbors onto gv rels (name, _, gid) =
do oldGv <- readIORef gv
updateDaVinciGraph (reduceToNeighbors (getClassGraph onto)
name rels) gid gv
writeIORef gv oldGv
reduceToNeighbors :: ClassGraph -> String -> [String] -> ClassGraph
reduceToNeighbors g name forbiddenRels =
case findLNode g name of
Nothing -> g
Just node ->
let (p, v, l, s) = context g node
noForbidden = not . (`elem` forbiddenRels) . fst
p' = filter noForbidden p
s' = filter noForbidden s
ns = map snd p' ++ map snd s'
myInsNode gr newGr nodeID = case match nodeID newGr of
(Nothing, _) ->
([], nodeID, lab' (context gr nodeID), []) & newGr
_ -> newGr
in (p', v, l, s') & foldl (myInsNode g) empty ns
showAllRelations :: MMiSSOntology -> A.GraphInfo -> Bool -> [String]
-> (String, Int, Int) -> IO ()
showAllRelations onto gv withIncoming rels (name, _, gid) =
do oldGv <- readIORef gv
let newGr = reduceToRelations (getClassGraph onto)
empty withIncoming rels name
updateDaVinciGraph newGr gid gv
writeIORef gv oldGv
reduceToRelations :: ClassGraph -> ClassGraph -> Bool -> [String] -> String
-> ClassGraph
reduceToRelations wholeGraph g withIncoming forbiddenRels name =
let g1 = elfilter (not . (`elem` forbiddenRels)) wholeGraph
in case findLNode g1 name of
Nothing -> g
Just selectedNode ->
let nodeList = if withIncoming
then udfs [selectedNode] g1
else dfs [selectedNode] g1
toDelete = nodes g1 \\ nodeList
g1' = delNodes toDelete g1
g2 = mergeGraphs g1' g
newNodesList = nodeList \\ nodes g
in if null newNodesList
then g2
else foldl (followRelationOverSubClasses wholeGraph
withIncoming forbiddenRels) g2 newNodesList
followRelationOverSubClasses :: ClassGraph -> Bool -> [String] -> ClassGraph
-> Node -> ClassGraph
followRelationOverSubClasses wholeGraph withIncoming forbiddenRels g node =
let g1 = elfilter (== "isa") wholeGraph
in case match node g1 of
(Nothing, _) -> g
_ ->
let subclasses = rdfs [node] g1
newNs = subclasses \\ nodes g
in if null newNs
then g
else
let
toDelete = nodes g1 \\ subclasses
g2 = mergeGraphs (delNodes toDelete g1) g
in foldl transClosureForNode g2 newNs
where
transClosureForNode g' n =
let (name, _, _) = lab' $ context wholeGraph n
in reduceToRelations wholeGraph g' withIncoming forbiddenRels name
mergeGraphs :: ClassGraph -> ClassGraph -> ClassGraph
mergeGraphs g1 g2 =
insEdges (labEdges g2) $ insNodes (labNodes g2 \\ labNodes g1) g1
showSuperSubClassesForVisible :: MMiSSOntology -> A.GraphInfo -> Bool -> Bool
-> (String, Int, Int) -> IO ()
showSuperSubClassesForVisible onto gv showSuper transitive (_, _, gid) =
do nodeList <- myGetNodes gid gv
if transitive
then updateDaVinciGraph
(foldl (getSubSuperClosure (getClassGraph onto) showSuper)
empty nodeList) gid gv
else updateDaVinciGraph
(foldl (getSubSuperSingle (getClassGraph onto) showSuper)
empty nodeList) gid gv
A.redisplay gid gv
return ()
reduceToThisNode :: MMiSSOntology -> A.GraphInfo -> (String, Int, Int) -> IO ()
reduceToThisNode onto gv (name, _, gid) = do
purgeGraph gid gv
case gselName name $ getClassGraph onto of
[] -> return ()
(_, v, l, _) : _ -> do
updateDaVinciGraph (([], v, l, []) & empty) gid gv
A.redisplay gid gv
return ()
purgeThisNode :: A.GraphInfo -> (String, Int, Int) -> IO ()
purgeThisNode gv (name, _, gid) =
do (gs, _) <- readIORef gv
case lookup gid gs of
Nothing -> return ()
Just g ->
do let oldGraph = A.ontoGraph g
nMap = A.nodeMap g
(_, mayNodeID) <-
case findLNode oldGraph name of
Nothing -> return (oldGraph, Nothing)
Just nodeID -> return (delNode nodeID oldGraph, Just nodeID)
case mayNodeID of
Nothing -> return ()
Just nodeID ->
case Map.lookup nodeID nMap of
Nothing -> return ()
Just node -> do
A.delnode gid node gv
A.redisplay gid gv
return ()
showSuperSubClasses :: MMiSSOntology -> A.GraphInfo -> Bool -> Bool
-> (String, Int, Int) -> IO ()
showSuperSubClasses onto gv showSuper transitive (name, _, gid) =
do if transitive
then updateDaVinciGraph
(getSubSuperClosure (getClassGraph onto) showSuper empty name)
gid gv
else updateDaVinciGraph (getSubSuperSingle (getClassGraph onto)
showSuper empty name) gid gv
A.redisplay gid gv
return ()
getSubSuperSingle :: ClassGraph -> Bool -> ClassGraph -> String -> ClassGraph
getSubSuperSingle g showSuper newGr name =
case findLNode g name of
Nothing -> g
Just nodeID ->
let isaPred (_, _, a) = a == "isa"
subClassEdges = filter isaPred $ inn g nodeID
ng = foldl (insPredecessorAndEdge g)
(insertInitialNode nodeID newGr) subClassEdges
in if showSuper
then let superClassEdges = filter isaPred $ out g nodeID
in foldl (insSuccessorAndEdge g) ng superClassEdges
else ng
where
insertInitialNode :: Node -> ClassGraph -> ClassGraph
insertInitialNode nodeID gr =
case match nodeID gr of
(Nothing, _) -> ([], nodeID, (name, "", OntoClass), []) & gr
_ -> gr
insPredecessorAndEdge :: ClassGraph -> ClassGraph -> LEdge String
-> ClassGraph
insPredecessorAndEdge oldGr newGr' (fromNode, toNode, edgeLabel) =
case fst $ match fromNode oldGr of
Nothing -> newGr'
Just (_, _, nodeLabel1, _) ->
case match fromNode newGr' of
(Nothing, _) ->
([], fromNode, nodeLabel1, [(edgeLabel, toNode)]) & newGr'
(Just (p, fromNodeID, nodeLabel2, s), newGr2) ->
(p, fromNodeID, nodeLabel2, (edgeLabel, toNode) : s) & newGr2
insSuccessorAndEdge :: ClassGraph -> ClassGraph -> LEdge String
-> ClassGraph
insSuccessorAndEdge oldGr newGr' (fromNode, toNode, edgeLabel) =
case fst $ match toNode oldGr of
Nothing -> newGr'
Just (_, _, (nodeLabel1, _, _), _) ->
case match toNode newGr' of
(Nothing, _) ->
([(edgeLabel, fromNode)], toNode, (nodeLabel1, "", OntoClass), [])
& newGr'
(Just (p, toNodeID, nodeLabel2, s), newGr2) ->
((edgeLabel, fromNode) : p, toNodeID, nodeLabel2, s) & newGr2
getSubSuperClosure :: ClassGraph -> Bool -> ClassGraph -> String -> ClassGraph
getSubSuperClosure g showSuper newGr name =
case findLNode g name of
Nothing -> g
Just nodeID ->
let ng = foldl subClassClosure newGr [nodeID]
in if showSuper
then foldl (superClassClosure nodeID) ng [nodeID]
else ng
where
superClassClosure :: Node -> ClassGraph -> Node -> ClassGraph
superClassClosure specialNodeID ng nodeID =
case fst $ match nodeID g of
Nothing -> ng
Just (_, _, (label, _, _), outAdj) ->
let isaAdj = filter ((== "isa") . fst) outAdj
ng1 = foldl (superClassClosure specialNodeID) ng
$ map snd isaAdj
in if nodeID == specialNodeID
then case match specialNodeID ng1 of
-- This should never be the case, but we somehow have to deal with it
(Nothing, _) -> (isaAdj, nodeID, (label, "", OntoClass), [])
& ng1
(Just (inAdj, _, _, _), ng2) ->
(inAdj, nodeID, (label, "", OntoClass), isaAdj) & ng2
else case match nodeID ng1 of
(Nothing, _) -> ([], nodeID, (label, "", OntoClass), isaAdj)
& ng1
(Just (inAdj, _, _, outAdj2), ng2) ->
(inAdj ++ isaAdj, nodeID, (label, "", OntoClass), outAdj2)
& ng2
{- - subClassClosure hunts transitively all isa-Ajacencies that goes
into the given node (nodeID). For all nodes collected, their
outgoing adjacencies are ignored because we only want to show the
isa-Relation to the superclass. The given specialNodeID is the ID
of the node from which the search for subclasses startet. Because
this node is already in the graph, we have to delete and reinsert
it with its outgoing adjacencies (which consists of the
isa-relations to it's superclasses, build by superClassClosure
beforehand). - -}
subClassClosure :: ClassGraph -> Node -> ClassGraph
subClassClosure ng nodeID =
case fst $ match nodeID g of
Nothing -> ng
Just (inAdj, _, (label, _, _), _) ->
let isaAdj = filter ((== "isa") . fst) inAdj
ng1 = foldl subClassClosure ng $ map snd isaAdj
in case fst $ match nodeID ng1 of
Nothing -> (isaAdj, nodeID, (label, "", OntoClass), []) & ng1
_ -> ng1
hideObjectsForVisible :: A.GraphInfo -> (String, Int, Int) -> IO ()
hideObjectsForVisible gv (_, _, gid) =
do (gs, _) <- readIORef gv
case lookup gid gs of
Nothing -> return ()
Just g ->
do let oldGraph = A.ontoGraph g
objectNodeIDs = map ( \ (_, v, _, _) -> v) $
gselType (== OntoObject) oldGraph
purgeGraph gid gv
updateDaVinciGraph (restrict (`notElem` objectNodeIDs) oldGraph)
gid gv
A.redisplay gid gv
return ()
createEdgeTypes :: ClassGraph -> [(String, DaVinciArcTypeParms A.EdgeValue)]
createEdgeTypes g = map createEdgeType
$ nub (map ( \ (_, _, l) -> l) $ labEdges g) ++ ["instanceOf"]
where
createEdgeType str =
case str of
"isa" ->
("isa",
Thick
$$$ Head "oarrow"
$$$ Dir "first"
$$$ emptyArcTypeParms :: DaVinciArcTypeParms A.EdgeValue)
"instanceOf" ->
("instanceOf",
Dotted
$$$ Dir "first"
$$$ emptyArcTypeParms :: DaVinciArcTypeParms A.EdgeValue)
_ -> -- "contains"
(str,
Solid
$$$ Head "arrow"
$$$ ValueTitle (\ (name, _, _) -> return name)
$$$ emptyArcTypeParms :: DaVinciArcTypeParms A.EdgeValue)
createLocalMenu :: MMiSSOntology -> A.GraphInfo -> LocalMenu (String, Int, Int)
createLocalMenu onto ginfo =
let relMenus b =
createRelationMenuButtons b (getRelationNames onto) onto ginfo
allRels f b = [ Button "All relations" $ f onto ginfo b ["isa"]
, Blank ] ++ relMenus b
superSub' f b1 = Button
(if b1 then "Sub/Superclasses" else "Subclasses")
. f onto ginfo b1
superSub = superSub' showSuperSubClasses
superSubForVis = superSub' showSuperSubClassesForVisible
relMen f = Menu (Just "Show relations")
[ Menu (Just "Outgoing") $ f False
, Menu (Just "Out + In") $ f True ]
nodeMen f b = Menu (Just $ "Show "
++ if b then "transitively" else "adjacent")
. ([ f False b, f True b ] ++)
in LocalMenu $ Menu Nothing
[ Menu (Just "For this node")
[ nodeMen superSub True [relMen $ allRels showAllRelations]
, nodeMen superSub False
[relMen $ allRels ( \ o g _ -> showRelationsToNeighbors o g)]
]
, Menu (Just "For visible nodes")
[ nodeMen superSubForVis True []
, nodeMen superSubForVis False []
, Blank
, Button "Show relations" $ showRelationsForVisible onto ginfo
, Blank
, Button "Show objects" $ showObjectsForVisible onto ginfo
, Button "Hide objects" $ hideObjectsForVisible ginfo
]
, Button "Show whole class graph" $ showWholeClassGraph onto ginfo
, Button "Show whole object graph" $ showWholeObjectGraph onto ginfo
, Button "Show relations" $ showRelationDialog ginfo
, Button "Reduce to this node" $ reduceToThisNode onto ginfo
, Button "Delete this node" $ purgeThisNode ginfo
]
createRelationMenuButtons :: Bool -> [String] -> MMiSSOntology -> A.GraphInfo
-> [MenuPrim a ((String, Int, Int) -> IO ())]
createRelationMenuButtons withIncomingRels relNames onto ginfo =
map createButton relNames
where
createButton name = Button name
$ showAllRelations onto ginfo withIncomingRels
$ delete name $ relNames ++ ["isa"]
myDeleteNode :: A.Descr -> A.GraphInfo -> A.Result
-> (Int, (String, DaVinciNode (String, Int, Int)))
-> IO A.Result
myDeleteNode gid gv _ node = A.delnode gid (fst node) gv
purgeGraph :: Int -> A.GraphInfo -> IO A.Result
purgeGraph gid gv =
do (gs, _) <- readIORef gv
case lookup gid gs of
Just g -> do
A.writeOntoGraph gid empty gv
A.writeNodeMap gid Map.empty gv
foldM (myDeleteNode gid gv) (A.Result 0 Nothing)
$ Map.toList $ A.nodes g
Nothing -> return $ A.Result 0 $ Just $
"Graph id " ++ show gid ++ " not found"
myGetNodes :: Int -> A.GraphInfo -> IO [String]
myGetNodes gid gv =
do (gs, _) <- readIORef gv
case lookup gid gs of
Just g -> return $ map ( \ (_, (name, _, _)) -> name)
$ labNodes $ A.ontoGraph g
Nothing -> return []
getPureClassGraph :: ClassGraph -> ClassGraph
getPureClassGraph g =
let classNodeList = map fst
$ getTypedNodes [OntoClass, OntoPredicate] g
in restrict (`elem` classNodeList) g
restrict :: DynGraph gr => (Node -> Bool) -> gr a b -> gr a b
restrict f = ufold cfilter empty
where cfilter (p, v, l, s) g =
if f v then (p', v, l, s') & g else g
where p' = filter (f . snd) p
s' = filter (f . snd) s
getTypedNodes :: [OntoObjectType] -> ClassGraph
-> [LNode (String, String, OntoObjectType)]
getTypedNodes ts = map labNode' . gselType (`elem` ts)
showRelationDialog :: A.GraphInfo -> (String, Int, Int) -> IO ()
showRelationDialog gv (_ , _, gid) =
do (gs, _) <- readIORef gv
case lookup gid gs of
Nothing -> return ()
Just g ->
do let rvs = A.relViewSpecs g
specEntries = S.row $ map relSpecToFormEntry rvs
form = firstRow S.// specEntries
S.doForm "Show relations" form
return ()
where
firstRow = S.newFormEntry "" () S.\\ S.newFormEntry "Show" ()
S.\\ S.newFormEntry "Transitive" ()
relSpecToFormEntry (A.RelViewSpec relname b1 b2) =
S.newFormEntry relname () S.\\ S.newFormEntry "" b1
S.\\ S.newFormEntry "" b2
| spechub/Hets | Taxonomy/MMiSSOntologyGraph.hs | gpl-2.0 | 24,216 | 0 | 22 | 7,970 | 7,492 | 3,821 | 3,671 | 521 | 9 |
{- |
Module : ./Common/ATerm/ATermDiffMain.hs
Copyright : (c) C. Maeder, DFKI GmbH 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : experimental
Portability : portable
-}
module Main (main) where
import System.Environment
import ATerm.ReadWrite
import ATerm.Unshared
import ATerm.Diff
import Control.Monad
main :: IO ()
main = do
args <- getArgs
case args of
[x, y] -> atDiffFP x y
_ -> putStrLn "expected two argument filenames"
atDiffFP :: FilePath -> FilePath -> IO ()
atDiffFP fp1 fp2 =
do at1 <- atermFromFile fp1
at2 <- atermFromFile fp2
let (at, diffs) = atDiff at1 at2
unless (null diffs) $ do
putStrLn (writeATerm (toATermTable at))
putStrLn (writeATerm (toATermTable (AList diffs [])))
atermFromFile :: FilePath -> IO ATerm
atermFromFile fp =
do str <- readFile fp
return (getATermFull (readATerm str))
| spechub/Hets | Common/ATerm/ATermDiffMain.hs | gpl-2.0 | 959 | 0 | 17 | 228 | 269 | 131 | 138 | 24 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.IAM.ResyncMFADevice
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Synchronizes the specified MFA device with AWS servers.
--
-- For more information about creating and working with virtual MFA
-- devices, go to
-- <http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_VirtualMFA.html Using a Virtual MFA Device>
-- in the /Using IAM/ guide.
--
-- /See:/ <http://docs.aws.amazon.com/IAM/latest/APIReference/API_ResyncMFADevice.html AWS API Reference> for ResyncMFADevice.
module Network.AWS.IAM.ResyncMFADevice
(
-- * Creating a Request
resyncMFADevice
, ResyncMFADevice
-- * Request Lenses
, rmdUserName
, rmdSerialNumber
, rmdAuthenticationCode1
, rmdAuthenticationCode2
-- * Destructuring the Response
, resyncMFADeviceResponse
, ResyncMFADeviceResponse
) where
import Network.AWS.IAM.Types
import Network.AWS.IAM.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'resyncMFADevice' smart constructor.
data ResyncMFADevice = ResyncMFADevice'
{ _rmdUserName :: !Text
, _rmdSerialNumber :: !Text
, _rmdAuthenticationCode1 :: !Text
, _rmdAuthenticationCode2 :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ResyncMFADevice' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rmdUserName'
--
-- * 'rmdSerialNumber'
--
-- * 'rmdAuthenticationCode1'
--
-- * 'rmdAuthenticationCode2'
resyncMFADevice
:: Text -- ^ 'rmdUserName'
-> Text -- ^ 'rmdSerialNumber'
-> Text -- ^ 'rmdAuthenticationCode1'
-> Text -- ^ 'rmdAuthenticationCode2'
-> ResyncMFADevice
resyncMFADevice pUserName_ pSerialNumber_ pAuthenticationCode1_ pAuthenticationCode2_ =
ResyncMFADevice'
{ _rmdUserName = pUserName_
, _rmdSerialNumber = pSerialNumber_
, _rmdAuthenticationCode1 = pAuthenticationCode1_
, _rmdAuthenticationCode2 = pAuthenticationCode2_
}
-- | The name of the user whose MFA device you want to resynchronize.
rmdUserName :: Lens' ResyncMFADevice Text
rmdUserName = lens _rmdUserName (\ s a -> s{_rmdUserName = a});
-- | Serial number that uniquely identifies the MFA device.
rmdSerialNumber :: Lens' ResyncMFADevice Text
rmdSerialNumber = lens _rmdSerialNumber (\ s a -> s{_rmdSerialNumber = a});
-- | An authentication code emitted by the device.
rmdAuthenticationCode1 :: Lens' ResyncMFADevice Text
rmdAuthenticationCode1 = lens _rmdAuthenticationCode1 (\ s a -> s{_rmdAuthenticationCode1 = a});
-- | A subsequent authentication code emitted by the device.
rmdAuthenticationCode2 :: Lens' ResyncMFADevice Text
rmdAuthenticationCode2 = lens _rmdAuthenticationCode2 (\ s a -> s{_rmdAuthenticationCode2 = a});
instance AWSRequest ResyncMFADevice where
type Rs ResyncMFADevice = ResyncMFADeviceResponse
request = postQuery iAM
response = receiveNull ResyncMFADeviceResponse'
instance ToHeaders ResyncMFADevice where
toHeaders = const mempty
instance ToPath ResyncMFADevice where
toPath = const "/"
instance ToQuery ResyncMFADevice where
toQuery ResyncMFADevice'{..}
= mconcat
["Action" =: ("ResyncMFADevice" :: ByteString),
"Version" =: ("2010-05-08" :: ByteString),
"UserName" =: _rmdUserName,
"SerialNumber" =: _rmdSerialNumber,
"AuthenticationCode1" =: _rmdAuthenticationCode1,
"AuthenticationCode2" =: _rmdAuthenticationCode2]
-- | /See:/ 'resyncMFADeviceResponse' smart constructor.
data ResyncMFADeviceResponse =
ResyncMFADeviceResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ResyncMFADeviceResponse' with the minimum fields required to make a request.
--
resyncMFADeviceResponse
:: ResyncMFADeviceResponse
resyncMFADeviceResponse = ResyncMFADeviceResponse'
| fmapfmapfmap/amazonka | amazonka-iam/gen/Network/AWS/IAM/ResyncMFADevice.hs | mpl-2.0 | 4,615 | 0 | 9 | 891 | 587 | 356 | 231 | 80 | 1 |
--
-- Copyright (c) 2012 Citrix Systems, Inc.
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, write to the Free Software
-- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
--
{-# LANGUAGE TypeSynonymInstances,ScopedTypeVariables,FlexibleInstances,UndecidableInstances,OverlappingInstances,PatternGuards,FlexibleContexts #-}
module Rpc.Variables
( DBusTypeable (..)
, Variable (..)
) where
import Control.Applicative
import Data.Maybe
import Data.Int
import Data.Word
import Data.String
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as C
import qualified Data.ByteString.UTF8 as BUTF8
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TLE
import qualified Data.Map as M
import Data.Map (Map)
import Rpc.Types
import qualified Network.DBus as D
--
-- instances for easier mapping complex haskell types e.g. maps and lists to dbus counterparts
--
-- we need a class to infer dbus type from haskell one
class DBusTypeable a where
dbusType :: a -> D.SignatureElem
-- required for specialsed instance for [Char]
dbusListType :: [a] -> D.SignatureElem
dbusListType _
= let elemT = dbusType (undefined :: a)
in D.SigArray elemT
-- | convert between haskell & dbus types
class Variable a where
fromVariant :: DBusValue -> Maybe a
toVariant :: a -> DBusValue
-- required to give specialised instance for [Word8], [Char]
listFromVariant :: DBusTypeable a => DBusValue -> Maybe [a]
listFromVariant = listFromVariant_
listToVariant :: DBusTypeable a => [a] -> DBusValue
listToVariant = listToVariant_
listToVariant_ :: forall a. (DBusTypeable a, Variable a) => [a] -> DBusValue
listToVariant_ xs
= let elemT = dbusType (undefined :: a) in
D.DBusArray elemT $ map toVariant xs
listFromVariant_ :: forall a. (DBusTypeable a, Variable a) => DBusValue -> Maybe [a]
listFromVariant_ = f where
f (D.DBusArray t xs) | t == dbusType (undefined :: a)
= mapM fromVariant xs
f _ = Nothing
-- map standard types to dbus types
instance DBusTypeable Bool where
dbusType _ = D.SigBool
instance DBusTypeable Word8 where
dbusType _ = D.SigByte
instance DBusTypeable Int16 where
dbusType _ = D.SigInt16
instance DBusTypeable Int32 where
dbusType _ = D.SigInt32
instance DBusTypeable Int64 where
dbusType _ = D.SigInt64
instance DBusTypeable Word16 where
dbusType _ = D.SigUInt16
instance DBusTypeable Word32 where
dbusType _ = D.SigUInt32
instance DBusTypeable Word64 where
dbusType _ = D.SigUInt64
instance DBusTypeable Char where
dbusType _ = D.SigString
dbusListType _ = D.SigString
instance DBusTypeable TL.Text where
dbusType _ = D.SigString
instance DBusTypeable Double where
dbusType _ = D.SigDouble
instance DBusTypeable D.DBusValue where
dbusType _ = D.SigVariant
instance DBusTypeable ObjectPath where
dbusType _ = D.SigObjectPath
instance DBusTypeable D.SignatureElem where
dbusType _ = D.SigSignature
dbusListType _ = D.SigSignature
instance (DBusTypeable a) => DBusTypeable [a] where
dbusType = dbusListType
instance (Ord k, DBusTypeable k, DBusTypeable v) => DBusTypeable (Map k v) where
dbusType _ = let keyT = dbusType (undefined :: k)
elemT = dbusType (undefined :: v)
in
D.SigArray (D.SigDict keyT elemT)
-- | some mind numbing Variable instances
instance Variable TL.Text where
toVariant = D.DBusString . D.PackedString . B.concat . BL.toChunks . TLE.encodeUtf8
fromVariant (D.DBusString (D.PackedString x)) = Just (TLE.decodeUtf8 . BL.fromChunks . (:[]) $ x)
fromVariant _ = Nothing
instance Variable B.ByteString where
toVariant = D.DBusByteArray
fromVariant (D.DBusByteArray b) = Just b
fromVariant (D.DBusArray D.SigByte xs) = B.pack <$> mapM fromVariant xs
fromVariant _ = Nothing
instance Variable Bool where
toVariant = D.DBusBoolean
fromVariant (D.DBusBoolean x) = Just x
fromVariant _ = Nothing
-- marshall char as dbus string
instance Variable Char where
toVariant c = listToVariant [c]
fromVariant v = case listFromVariant v of
Just (x:_) -> Just x
_ -> Nothing
-- handle [Char] aka String
listToVariant = D.DBusString . D.PackedString . BUTF8.fromString
listFromVariant (D.DBusString (D.PackedString x)) = Just $ BUTF8.toString x
listFromVariant _ = Nothing
instance Variable Word8 where
toVariant = D.DBusByte
fromVariant (D.DBusByte x) = Just x
fromVariant _ = Nothing
listToVariant = D.DBusByteArray . B.pack
listFromVariant (D.DBusArray D.SigByte xs) = mapM fromVariant xs
listFromVariant (D.DBusByteArray xs) = Just $ B.unpack xs
listFromVariant _ = Nothing
instance Variable Word16 where
toVariant = D.DBusUInt16
fromVariant (D.DBusUInt16 x) = Just x
fromVariant _ = Nothing
instance Variable Word32 where
toVariant = D.DBusUInt32
fromVariant (D.DBusUInt32 x) = Just x
fromVariant _ = Nothing
instance Variable Word64 where
toVariant = D.DBusUInt64
fromVariant (D.DBusUInt64 x) = Just x
fromVariant _ = Nothing
instance Variable Int16 where
toVariant = D.DBusInt16
fromVariant (D.DBusInt16 x) = Just x
fromVariant _ = Nothing
instance Variable Int32 where
toVariant = D.DBusInt32
fromVariant (D.DBusInt32 x) = Just x
fromVariant _ = Nothing
instance Variable Int64 where
toVariant = D.DBusInt64
fromVariant (D.DBusInt64 x) = Just x
fromVariant _ = Nothing
instance Variable Double where
toVariant = D.DBusDouble
fromVariant (D.DBusDouble x) = Just x
fromVariant _ = Nothing
instance Variable D.DBusValue where
toVariant = D.DBusVariant
fromVariant (D.DBusVariant x) = Just x
fromVariant _ = Nothing
instance Variable ObjectPath where
fromVariant (D.DBusObjectPath p) = Just . mkObjectPath_ . TL.pack $ show p
fromVariant _ = Nothing
toVariant p = D.DBusObjectPath . fromString . TL.unpack . strObjectPath $ p
instance Variable D.SignatureElem where
toVariant c = listToVariant [c]
fromVariant v = case listFromVariant v of
Just (x:_) -> Just x
_ -> Nothing
listToVariant = D.DBusSignature
listFromVariant (D.DBusSignature x) = Just x
listFromVariant _ = Nothing
-- these are equivalent to strings actually, instances for convenience
instance Variable BusName where
fromVariant x = mkBusName_ <$> fromVariant x
toVariant = toVariant . strBusName
instance Variable InterfaceName where
fromVariant x = mkInterfaceName_ <$> fromVariant x
toVariant = toVariant . strInterfaceName
instance Variable MemberName where
fromVariant x = mkMemberName_ <$> fromVariant x
toVariant = toVariant . strMemberName
-- list of variables we can infer dbus type for, is a variable
instance (DBusTypeable a, Variable a) => Variable [a] where
toVariant = listToVariant
fromVariant = listFromVariant
-- map is a variable if we can infer types of keys and elements, and they're variables
instance (Ord k, DBusTypeable k, Variable k, DBusTypeable v, Variable v) => Variable (Map k v) where
toVariant m =
let keyT = dbusType (undefined :: k)
elemT = dbusType (undefined :: v)
elems = map (\(k,v) -> D.DBusDict (toVariant k) (toVariant v)) $ M.toList m
in D.DBusArray (D.SigDict keyT elemT) elems
fromVariant (D.DBusArray (D.SigDict keyT elemT) xs)
| keyT == dbusType (undefined :: k)
, elemT == dbusType (undefined :: v)
= M.fromList <$> sequence (map mk xs)
where mk (D.DBusDict k v) = (,) <$> fromVariant k <*> fromVariant v
mk _ = Nothing
fromVariant _ = Nothing
| OpenXT/xclibs | xch-rpc/Rpc/Variables.hs | lgpl-2.1 | 8,325 | 0 | 16 | 1,709 | 2,265 | 1,183 | 1,082 | 175 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | Routines for formatting dbus values
--
-- In particular, types and variants
module DBusBrowser.Formatting where
import DBus
import Data.Text (Text)
import qualified Data.Text as T
import Data.Maybe
import Unparse
formatType :: Type -> Text
formatType = T.pack . unparse
formatVariant :: Variant -> Text
formatVariant v = fromJust $ T.stripPrefix "Variant " $ T.pack $ show v
instance Unparse Type where
precedence (TypeArray _) = 0
precedence (TypeDictionary _ _) = 0
precedence (TypeStructure _) = 0
precedence _ = 1
associativity _ = NoneAssoc
printRec f t = case t of
TypeBoolean -> "Bool"
TypeWord8 -> "UInt8"
TypeWord16 -> "UInt16"
TypeWord32 -> "UInt32"
TypeWord64 -> "UInt64"
TypeInt16 -> "Int16"
TypeInt32 -> "Int32"
TypeInt64 -> "Int64"
TypeDouble -> "Double"
TypeString -> "String"
TypeSignature -> "Signature"
TypeObjectPath -> "ObjectPath"
TypeVariant -> "Variant"
TypeArray t' -> "Array " +> f t'
TypeDictionary t1 t2 -> "Dict " +> f t1 ++ " " +> f t2
TypeStructure ts -> foldr (\t' s -> ((s ++ " ") +> f t')) "Struct" ts
TypeUnixFd -> "Filedescriptor"
| hpdeifel/dbus-browser | src/DBusBrowser/Formatting.hs | bsd-3-clause | 1,231 | 0 | 15 | 270 | 353 | 183 | 170 | 36 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.MC
-- Copyright : Copyright (c) 2010, Patrick Perry <[email protected]>
-- License : BSD3
-- Maintainer : Patrick Perry <[email protected]>
-- Stability : experimental
--
-- A monad and monad transformer for Monte Carlo computations.
--
module Control.Monad.MC (
module Control.Monad.MC.GSL
) where
import Control.Monad.MC.GSL
| beni55/hs-monte-carlo | lib/Control/Monad/MC.hs | bsd-3-clause | 457 | 0 | 5 | 70 | 34 | 27 | 7 | 3 | 0 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ar-SA">
<title>JSON View</title>
<maps>
<homeID>jsonview</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/jsonview/src/main/javahelp/help_ar_SA/helpset_ar_SA.hs | apache-2.0 | 959 | 77 | 66 | 156 | 407 | 206 | 201 | -1 | -1 |
module A2 where
f x y = x + y * 42
| kmate/HaRe | old/testing/comSubexpElim/A2.hs | bsd-3-clause | 38 | 0 | 6 | 15 | 22 | 12 | 10 | 2 | 1 |
module Distribution.Simple.Test.Log
( PackageLog(..)
, TestLogs(..)
, TestSuiteLog(..)
, countTestResults
, localPackageLog
, summarizePackage
, summarizeSuiteFinish, summarizeSuiteStart
, summarizeTest
, suiteError, suiteFailed, suitePassed
, testSuiteLogPath
) where
import Distribution.Package ( PackageId )
import qualified Distribution.PackageDescription as PD
import Distribution.Simple.Compiler ( Compiler(..), compilerInfo, CompilerId )
import Distribution.Simple.InstallDirs
( fromPathTemplate, initialPathTemplateEnv, PathTemplateVariable(..)
, substPathTemplate , toPathTemplate, PathTemplate )
import qualified Distribution.Simple.LocalBuildInfo as LBI
import Distribution.Simple.Setup ( TestShowDetails(..) )
import Distribution.Simple.Utils ( notice )
import Distribution.System ( Platform )
import Distribution.TestSuite ( Options, Result(..) )
import Distribution.Verbosity ( Verbosity )
import Control.Monad ( when )
import Data.Char ( toUpper )
-- | Logs all test results for a package, broken down first by test suite and
-- then by test case.
data PackageLog = PackageLog
{ package :: PackageId
, compiler :: CompilerId
, platform :: Platform
, testSuites :: [TestSuiteLog]
}
deriving (Read, Show, Eq)
-- | A 'PackageLog' with package and platform information specified.
localPackageLog :: PD.PackageDescription -> LBI.LocalBuildInfo -> PackageLog
localPackageLog pkg_descr lbi = PackageLog
{ package = PD.package pkg_descr
, compiler = compilerId $ LBI.compiler lbi
, platform = LBI.hostPlatform lbi
, testSuites = []
}
-- | Logs test suite results, itemized by test case.
data TestSuiteLog = TestSuiteLog
{ testSuiteName :: String
, testLogs :: TestLogs
, logFile :: FilePath -- path to human-readable log file
}
deriving (Read, Show, Eq)
data TestLogs
= TestLog
{ testName :: String
, testOptionsReturned :: Options
, testResult :: Result
}
| GroupLogs String [TestLogs]
deriving (Read, Show, Eq)
-- | Count the number of pass, fail, and error test results in a 'TestLogs'
-- tree.
countTestResults :: TestLogs
-> (Int, Int, Int) -- ^ Passes, fails, and errors,
-- respectively.
countTestResults = go (0, 0, 0)
where
go (p, f, e) (TestLog { testResult = r }) =
case r of
Pass -> (p + 1, f, e)
Fail _ -> (p, f + 1, e)
Error _ -> (p, f, e + 1)
go (p, f, e) (GroupLogs _ ts) = foldl go (p, f, e) ts
-- | From a 'TestSuiteLog', determine if the test suite passed.
suitePassed :: TestLogs -> Bool
suitePassed l =
case countTestResults l of
(_, 0, 0) -> True
_ -> False
-- | From a 'TestSuiteLog', determine if the test suite failed.
suiteFailed :: TestLogs -> Bool
suiteFailed l =
case countTestResults l of
(_, 0, _) -> False
_ -> True
-- | From a 'TestSuiteLog', determine if the test suite encountered errors.
suiteError :: TestLogs -> Bool
suiteError l =
case countTestResults l of
(_, _, 0) -> False
_ -> True
resultString :: TestLogs -> String
resultString l | suiteError l = "error"
| suiteFailed l = "fail"
| otherwise = "pass"
testSuiteLogPath :: PathTemplate
-> PD.PackageDescription
-> LBI.LocalBuildInfo
-> String -- ^ test suite name
-> TestLogs -- ^ test suite results
-> FilePath
testSuiteLogPath template pkg_descr lbi name result =
fromPathTemplate $ substPathTemplate env template
where
env = initialPathTemplateEnv
(PD.package pkg_descr) (LBI.localLibraryName lbi)
(compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi)
++ [ (TestSuiteNameVar, toPathTemplate name)
, (TestSuiteResultVar, toPathTemplate $ resultString result)
]
-- | Print a summary to the console after all test suites have been run
-- indicating the number of successful test suites and cases. Returns 'True' if
-- all test suites passed and 'False' otherwise.
summarizePackage :: Verbosity -> PackageLog -> IO Bool
summarizePackage verbosity packageLog = do
let counts = map (countTestResults . testLogs) $ testSuites packageLog
(passed, failed, errors) = foldl1 addTriple counts
totalCases = passed + failed + errors
passedSuites = length
$ filter (suitePassed . testLogs)
$ testSuites packageLog
totalSuites = length $ testSuites packageLog
notice verbosity $ show passedSuites ++ " of " ++ show totalSuites
++ " test suites (" ++ show passed ++ " of "
++ show totalCases ++ " test cases) passed."
return $! passedSuites == totalSuites
where
addTriple (p1, f1, e1) (p2, f2, e2) = (p1 + p2, f1 + f2, e1 + e2)
-- | Print a summary of a single test case's result to the console, supressing
-- output for certain verbosity or test filter levels.
summarizeTest :: Verbosity -> TestShowDetails -> TestLogs -> IO ()
summarizeTest _ _ (GroupLogs {}) = return ()
summarizeTest verbosity details t =
when shouldPrint $ notice verbosity $ "Test case " ++ testName t
++ ": " ++ show (testResult t)
where shouldPrint = (details > Never) && (notPassed || details == Always)
notPassed = testResult t /= Pass
-- | Print a summary of the test suite's results on the console, suppressing
-- output for certain verbosity or test filter levels.
summarizeSuiteFinish :: TestSuiteLog -> String
summarizeSuiteFinish testLog = unlines
[ "Test suite " ++ testSuiteName testLog ++ ": " ++ resStr
, "Test suite logged to: " ++ logFile testLog
]
where resStr = map toUpper (resultString $ testLogs testLog)
summarizeSuiteStart :: String -> String
summarizeSuiteStart n = "Test suite " ++ n ++ ": RUNNING...\n"
| rimmington/cabal | Cabal/Distribution/Simple/Test/Log.hs | bsd-3-clause | 6,072 | 0 | 15 | 1,603 | 1,427 | 795 | 632 | 118 | 4 |
{-|
Handles sending packets to galaxies. We need to get their IP addresses
from DNS, which is more complicated.
-- Asynchronous thread per galaxy which handles domain resolution, and can
-- block its own queue of ByteStrings to send.
--
-- Maybe perform the resolution asynchronously, injecting into the resolver
-- queue as a message.
--
-- TODO: Figure out how the real haskell time library works.
-- We've failed to lookup the IP. Drop the outbound packet
-- because we have no IP for our galaxy, including possible
-- previous IPs.
{-
- Sending Packets to Galaxies.
- Each galaxy has it's own DNS resolution thread.
- Initially, no threads are started.
- To send a message to a galaxy,
- Check to see if it already has a resolution thread.
- If it does, pass the packet to that thread.
- If it doesn't, start a new thread and give it the packet.
- Galaxy resolution threads work as follows:
- First, they are given:
- They know which galaxy they are responsible for.
- They have access to the turfs TVar (shared state with Ames driver).
- They can be given packets (to be send to their galaxy).
- They must be given a way to send UDP packets.
- Next, we loop forever
- In the loop we track:
- the last-known IP address.
- the time when we last looked up the IP address.
- We wait to be given a packet.
- We get the IP address.
- If we looked up the IP address in the last 5 minute, use the
cached IP address.
- Just use the one from last time.
- Otherwise,
- Do a DNS lookup.
- Go through the turf list one item at a time.
- Try each one.
- If it resolves to one-or-more IP addresses,
- Use the first one.
- If it resolves to zero IP addresses, move on to the next turf.
- If none of the turfs can be used to resolve the IP address,
then we don't know where the galaxy is.
- Drop the packet.
-}
-}
module Urbit.Vere.Ames.DNS
( NetworkMode(..)
, ResolvServ(..)
, resolvServ
, galaxyPort
, renderGalaxy
)
where
import Urbit.Prelude
import Network.Socket
import Urbit.Arvo hiding (Fake)
import qualified Data.Map.Strict as M
import qualified Urbit.Noun.Time as Time
import qualified Urbit.Ob as Ob
-- Types -----------------------------------------------------------------------
data NetworkMode = Fake | Localhost | Real | NoNetwork
deriving (Eq, Ord, Show)
data ResolvServ = ResolvServ
{ rsSend :: Galaxy -> ByteString -> IO ()
, rsKill :: IO ()
}
-- Utils -----------------------------------------------------------------------
galaxyPort :: NetworkMode -> Galaxy -> PortNumber
galaxyPort Fake (Patp g) = fromIntegral g + 31337
galaxyPort Localhost (Patp g) = fromIntegral g + 13337
galaxyPort Real (Patp g) = fromIntegral g + 13337
galaxyPort NoNetwork _ = fromIntegral 0
turfText :: Turf -> Text
turfText = intercalate "." . reverse . fmap unCord . unTurf
renderGalaxy :: Galaxy -> Text
renderGalaxy = Ob.renderPatp . Ob.patp . fromIntegral . unPatp
galaxyHostname :: Galaxy -> Turf -> Text
galaxyHostname g t = galaName g ++ "." ++ turfText t
where
stripSig :: Text -> Text
stripSig inp = fromMaybe inp (stripPrefix "~" inp)
galaName :: Galaxy -> Text
galaName = stripSig . renderGalaxy
resolv :: Galaxy -> [Turf] -> IO (Maybe (Turf, Text, PortNumber, SockAddr))
resolv gal = go
where
go = \case
[] -> pure Nothing
turf : turfs -> do
let host = galaxyHostname gal turf
port = galaxyPort Real gal
getAddrInfo Nothing (Just (unpack host)) (Just (show port)) >>= \case
[] -> go turfs
ip : _ -> pure $ Just (turf, host, port, addrAddress ip)
doResolv
:: HasLogFunc e
=> Galaxy
-> (Time.Wen, Maybe SockAddr)
-> [Turf]
-> (Text -> RIO e ())
-> RIO e (Maybe SockAddr, Time.Wen)
doResolv gal (prevWen, prevIP) turfs stderr = do
current <- io $ Time.now
if (Time.gap current prevWen ^. Time.secs) < 300
then pure (prevIP, prevWen)
else do
tim <- io (Time.now)
io (resolv gal turfs) >>= \case
Nothing -> do
stderr $ "ames: czar at " ++ galStr ++ ": not found"
logInfo $ displayShow ("(ames) Failed to lookup IP for ", gal)
pure (prevIP, tim)
Just (turf, host, port, addr) -> do
when (Just addr /= prevIP) (printCzar addr)
logInfo $ displayShow ("(ames) Looked up ", host, port, turf, addr)
pure (Just addr, tim)
where
galStr = renderGalaxy gal
printCzar addr = stderr $ "ames: czar " ++ galStr ++ ": ip " ++ tshow addr
resolvWorker
:: forall e
. HasLogFunc e
=> Galaxy
-> TVar (Maybe [Turf])
-> TVar (Time.Wen, Maybe SockAddr)
-> STM ByteString
-> (SockAddr -> ByteString -> IO ())
-> (Text -> RIO e ())
-> RIO e (Async ())
resolvWorker gal vTurfs vLast waitMsg send stderr = async (forever go)
where
logDrop =
logInfo $ displayShow ("(ames) Dropping packet; no ip for galaxy ", gal)
go :: RIO e ()
go = do
(packt, turfs, (lastTime, lastAddr)) <- atomically
((,,) <$> waitMsg <*> readTVar vTurfs <*> readTVar vLast)
(newAddr, newTime) <- doResolv gal
(lastTime, lastAddr)
(fromMaybe [] turfs)
stderr
maybe logDrop (\ip -> io (send ip packt)) newAddr
atomically $ writeTVar vLast (newTime, newAddr)
resolvServ
:: HasLogFunc e
=> TVar (Maybe [Turf])
-> (SockAddr -> ByteString -> IO ())
-> (Text -> RIO e ())
-> RIO e ResolvServ
resolvServ vTurfs send stderr = do
vGala <- newTVarIO (mempty :: Map Galaxy (Async (), TQueue ByteString))
vDead <- newTVarIO False
envir <- ask
let spawnWorker :: Galaxy -> IO (Async (), TQueue ByteString)
spawnWorker gal = runRIO envir $ do
que <- newTQueueIO
las <- newTVarIO (Time.unixEpoch, Nothing)
tid <- resolvWorker gal vTurfs las (readTQueue que) send stderr
pure (tid, que)
let getWorker :: Galaxy -> IO (Async (), TQueue ByteString)
getWorker gal = do
(fmap (lookup gal) $ atomically $ readTVar vGala) >>= \case
Just (tid, que) -> do
pure (tid, que)
Nothing -> do
(tid, que) <- spawnWorker gal
atomically $ modifyTVar' vGala (M.insert gal (tid, que))
pure (tid, que)
let doSend :: Galaxy -> ByteString -> IO ()
doSend gal byt = do
dead <- atomically (readTVar vDead)
unless dead $ do
(_, que) <- getWorker gal
atomically (writeTQueue que byt)
let doKill :: IO ()
doKill = do
galas <- atomically $ do
writeTVar vDead True
readTVar vGala
for_ galas (cancel . fst)
pure (ResolvServ doSend doKill)
| urbit/urbit | pkg/hs/urbit-king/lib/Urbit/Vere/Ames/DNS.hs | mit | 6,887 | 0 | 22 | 1,919 | 1,805 | 914 | 891 | -1 | -1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeApplications #-}
{-# OPTIONS_GHC -Wno-unticked-promoted-constructors #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Language.LSP.Types.Parsing where
import Language.LSP.Types.LspId
import Language.LSP.Types.Method
import Language.LSP.Types.Message
import Data.Aeson
import Data.Aeson.Types
import Data.GADT.Compare
import Data.Type.Equality
import Data.Function (on)
-- ---------------------------------------------------------------------
-- Working with arbritary messages
-- ---------------------------------------------------------------------
data FromServerMessage' a where
FromServerMess :: forall t (m :: Method FromServer t) a. SMethod m -> Message m -> FromServerMessage' a
FromServerRsp :: forall (m :: Method FromClient Request) a. a m -> ResponseMessage m -> FromServerMessage' a
type FromServerMessage = FromServerMessage' SMethod
instance Eq FromServerMessage where
(==) = (==) `on` toJSON
instance Show FromServerMessage where
show = show . toJSON
instance ToJSON FromServerMessage where
toJSON (FromServerMess m p) = serverMethodJSON m (toJSON p)
toJSON (FromServerRsp m p) = clientResponseJSON m (toJSON p)
fromServerNot :: forall (m :: Method FromServer Notification).
Message m ~ NotificationMessage m => NotificationMessage m -> FromServerMessage
fromServerNot m@NotificationMessage{_method=meth} = FromServerMess meth m
fromServerReq :: forall (m :: Method FromServer Request).
Message m ~ RequestMessage m => RequestMessage m -> FromServerMessage
fromServerReq m@RequestMessage{_method=meth} = FromServerMess meth m
data FromClientMessage' a where
FromClientMess :: forall t (m :: Method FromClient t) a. SMethod m -> Message m -> FromClientMessage' a
FromClientRsp :: forall (m :: Method FromServer Request) a. a m -> ResponseMessage m -> FromClientMessage' a
type FromClientMessage = FromClientMessage' SMethod
instance ToJSON FromClientMessage where
toJSON (FromClientMess m p) = clientMethodJSON m (toJSON p)
toJSON (FromClientRsp m p) = serverResponseJSON m (toJSON p)
fromClientNot :: forall (m :: Method FromClient Notification).
Message m ~ NotificationMessage m => NotificationMessage m -> FromClientMessage
fromClientNot m@NotificationMessage{_method=meth} = FromClientMess meth m
fromClientReq :: forall (m :: Method FromClient Request).
Message m ~ RequestMessage m => RequestMessage m -> FromClientMessage
fromClientReq m@RequestMessage{_method=meth} = FromClientMess meth m
-- ---------------------------------------------------------------------
-- Parsing
-- ---------------------------------------------------------------------
type LookupFunc f a = forall (m :: Method f Request). LspId m -> Maybe (SMethod m, a m)
{-
Message Types we must handle are the following
Request | jsonrpc | id | method | params?
Response | jsonrpc | id | | | response? | error?
Notification | jsonrpc | | method | params?
-}
{-# INLINE parseServerMessage #-}
parseServerMessage :: LookupFunc FromClient a -> Value -> Parser (FromServerMessage' a)
parseServerMessage lookupId v@(Object o) = do
methMaybe <- o .:! "method"
idMaybe <- o .:! "id"
case methMaybe of
-- Request or Notification
Just (SomeServerMethod m) ->
case splitServerMethod m of
IsServerNot -> FromServerMess m <$> parseJSON v
IsServerReq -> FromServerMess m <$> parseJSON v
IsServerEither | SCustomMethod cm <- m -> do
case idMaybe of
-- Request
Just _ ->
let m' = (SCustomMethod cm :: SMethod (CustomMethod :: Method FromServer Request))
in FromServerMess m' <$> parseJSON v
Nothing ->
let m' = (SCustomMethod cm :: SMethod (CustomMethod :: Method FromServer Notification))
in FromServerMess m' <$> parseJSON v
Nothing -> do
case idMaybe of
Just i -> do
case lookupId i of
Just (m,res) -> clientResponseJSON m $ FromServerRsp res <$> parseJSON v
Nothing -> fail $ unwords ["Failed in looking up response type of", show v]
Nothing -> fail $ unwords ["Got unexpected message without method or id"]
parseServerMessage _ v = fail $ unwords ["parseServerMessage expected object, got:",show v]
{-# INLINE parseClientMessage #-}
parseClientMessage :: LookupFunc FromServer a -> Value -> Parser (FromClientMessage' a)
parseClientMessage lookupId v@(Object o) = do
methMaybe <- o .:! "method"
idMaybe <- o .:! "id"
case methMaybe of
-- Request or Notification
Just (SomeClientMethod m) ->
case splitClientMethod m of
IsClientNot -> FromClientMess m <$> parseJSON v
IsClientReq -> FromClientMess m <$> parseJSON v
IsClientEither | SCustomMethod cm <- m -> do
case idMaybe of
-- Request
Just _ ->
let m' = (SCustomMethod cm :: SMethod (CustomMethod :: Method FromClient Request))
in FromClientMess m' <$> parseJSON v
Nothing ->
let m' = (SCustomMethod cm :: SMethod (CustomMethod :: Method FromClient Notification))
in FromClientMess m' <$> parseJSON v
Nothing -> do
case idMaybe of
Just i -> do
case lookupId i of
Just (m,res) -> serverResponseJSON m $ FromClientRsp res <$> parseJSON v
Nothing -> fail $ unwords ["Failed in looking up response type of", show v]
Nothing -> fail $ unwords ["Got unexpected message without method or id"]
parseClientMessage _ v = fail $ unwords ["parseClientMessage expected object, got:",show v]
-- ---------------------------------------------------------------------
-- Helper Utilities
-- ---------------------------------------------------------------------
{-# INLINE clientResponseJSON #-}
clientResponseJSON :: SClientMethod m -> (HasJSON (ResponseMessage m) => x) -> x
clientResponseJSON m x = case splitClientMethod m of
IsClientReq -> x
IsClientEither -> x
{-# INLINE serverResponseJSON #-}
serverResponseJSON :: SServerMethod m -> (HasJSON (ResponseMessage m) => x) -> x
serverResponseJSON m x = case splitServerMethod m of
IsServerReq -> x
IsServerEither -> x
{-# INLINE clientMethodJSON#-}
clientMethodJSON :: SClientMethod m -> (ToJSON (ClientMessage m) => x) -> x
clientMethodJSON m x =
case splitClientMethod m of
IsClientNot -> x
IsClientReq -> x
IsClientEither -> x
{-# INLINE serverMethodJSON #-}
serverMethodJSON :: SServerMethod m -> (ToJSON (ServerMessage m) => x) -> x
serverMethodJSON m x =
case splitServerMethod m of
IsServerNot -> x
IsServerReq -> x
IsServerEither -> x
type HasJSON a = (ToJSON a,FromJSON a,Eq a)
-- Reify universal properties about Client/Server Messages
data ClientNotOrReq (m :: Method FromClient t) where
IsClientNot
:: ( HasJSON (ClientMessage m)
, Message m ~ NotificationMessage m)
=> ClientNotOrReq (m :: Method FromClient Notification)
IsClientReq
:: forall (m :: Method FromClient Request).
( HasJSON (ClientMessage m)
, HasJSON (ResponseMessage m)
, Message m ~ RequestMessage m)
=> ClientNotOrReq m
IsClientEither
:: ClientNotOrReq CustomMethod
data ServerNotOrReq (m :: Method FromServer t) where
IsServerNot
:: ( HasJSON (ServerMessage m)
, Message m ~ NotificationMessage m)
=> ServerNotOrReq (m :: Method FromServer Notification)
IsServerReq
:: forall (m :: Method FromServer Request).
( HasJSON (ServerMessage m)
, HasJSON (ResponseMessage m)
, Message m ~ RequestMessage m)
=> ServerNotOrReq m
IsServerEither
:: ServerNotOrReq CustomMethod
{-# INLINE splitClientMethod #-}
splitClientMethod :: SClientMethod m -> ClientNotOrReq m
splitClientMethod SInitialize = IsClientReq
splitClientMethod SInitialized = IsClientNot
splitClientMethod SShutdown = IsClientReq
splitClientMethod SExit = IsClientNot
splitClientMethod SWorkspaceDidChangeWorkspaceFolders = IsClientNot
splitClientMethod SWorkspaceDidChangeConfiguration = IsClientNot
splitClientMethod SWorkspaceDidChangeWatchedFiles = IsClientNot
splitClientMethod SWorkspaceSymbol = IsClientReq
splitClientMethod SWorkspaceExecuteCommand = IsClientReq
splitClientMethod SWindowWorkDoneProgressCancel = IsClientNot
splitClientMethod STextDocumentDidOpen = IsClientNot
splitClientMethod STextDocumentDidChange = IsClientNot
splitClientMethod STextDocumentWillSave = IsClientNot
splitClientMethod STextDocumentWillSaveWaitUntil = IsClientReq
splitClientMethod STextDocumentDidSave = IsClientNot
splitClientMethod STextDocumentDidClose = IsClientNot
splitClientMethod STextDocumentCompletion = IsClientReq
splitClientMethod SCompletionItemResolve = IsClientReq
splitClientMethod STextDocumentHover = IsClientReq
splitClientMethod STextDocumentSignatureHelp = IsClientReq
splitClientMethod STextDocumentDeclaration = IsClientReq
splitClientMethod STextDocumentDefinition = IsClientReq
splitClientMethod STextDocumentTypeDefinition = IsClientReq
splitClientMethod STextDocumentImplementation = IsClientReq
splitClientMethod STextDocumentReferences = IsClientReq
splitClientMethod STextDocumentDocumentHighlight = IsClientReq
splitClientMethod STextDocumentDocumentSymbol = IsClientReq
splitClientMethod STextDocumentCodeAction = IsClientReq
splitClientMethod STextDocumentCodeLens = IsClientReq
splitClientMethod SCodeLensResolve = IsClientReq
splitClientMethod STextDocumentDocumentLink = IsClientReq
splitClientMethod SDocumentLinkResolve = IsClientReq
splitClientMethod STextDocumentDocumentColor = IsClientReq
splitClientMethod STextDocumentColorPresentation = IsClientReq
splitClientMethod STextDocumentFormatting = IsClientReq
splitClientMethod STextDocumentRangeFormatting = IsClientReq
splitClientMethod STextDocumentOnTypeFormatting = IsClientReq
splitClientMethod STextDocumentRename = IsClientReq
splitClientMethod STextDocumentPrepareRename = IsClientReq
splitClientMethod STextDocumentFoldingRange = IsClientReq
splitClientMethod STextDocumentSelectionRange = IsClientReq
splitClientMethod STextDocumentPrepareCallHierarchy = IsClientReq
splitClientMethod SCallHierarchyIncomingCalls = IsClientReq
splitClientMethod SCallHierarchyOutgoingCalls = IsClientReq
splitClientMethod STextDocumentSemanticTokens = IsClientReq
splitClientMethod STextDocumentSemanticTokensFull = IsClientReq
splitClientMethod STextDocumentSemanticTokensFullDelta = IsClientReq
splitClientMethod STextDocumentSemanticTokensRange = IsClientReq
splitClientMethod SWorkspaceSemanticTokensRefresh = IsClientReq
splitClientMethod SCancelRequest = IsClientNot
splitClientMethod SCustomMethod{} = IsClientEither
{-# INLINE splitServerMethod #-}
splitServerMethod :: SServerMethod m -> ServerNotOrReq m
splitServerMethod SWindowShowMessage = IsServerNot
splitServerMethod SWindowShowMessageRequest = IsServerReq
splitServerMethod SWindowShowDocument = IsServerReq
splitServerMethod SWindowLogMessage = IsServerNot
splitServerMethod SWindowWorkDoneProgressCreate = IsServerReq
splitServerMethod SProgress = IsServerNot
splitServerMethod STelemetryEvent = IsServerNot
splitServerMethod SClientRegisterCapability = IsServerReq
splitServerMethod SClientUnregisterCapability = IsServerReq
splitServerMethod SWorkspaceWorkspaceFolders = IsServerReq
splitServerMethod SWorkspaceConfiguration = IsServerReq
splitServerMethod SWorkspaceApplyEdit = IsServerReq
splitServerMethod STextDocumentPublishDiagnostics = IsServerNot
splitServerMethod SCancelRequest = IsServerNot
splitServerMethod SCustomMethod{} = IsServerEither
-- | Given a witness that two custom methods are of the same type, produce a witness that the methods are the same
data CustomEq m1 m2 where
CustomEq
:: (m1 ~ (CustomMethod :: Method f t1), m2 ~ (CustomMethod :: Method f t2))
=> { runCustomEq :: (t1 ~ t2 => m1 :~~: m2) }
-> CustomEq m1 m2
runEq :: (t1 ~ t2)
=> (SMethod m1 -> SMethod m2 -> Maybe (Either (CustomEq m1 m2) (m1 :~~: m2)))
-> SMethod (m1 :: Method f t1)
-> SMethod (m2 :: Method f t2)
-> Maybe (m1 :~~: m2)
runEq f m1 m2 = do
res <- f m1 m2
pure $ case res of
Right eq -> eq
Left ceq -> runCustomEq ceq
-- | Heterogeneous equality on singleton server methods
mEqServer :: SServerMethod m1 -> SServerMethod m2 -> Maybe (Either (CustomEq m1 m2) (m1 :~~: m2))
mEqServer m1 m2 = go (splitServerMethod m1) (splitServerMethod m2)
where
go IsServerNot IsServerNot = do
Refl <- geq m1 m2
pure $ Right HRefl
go IsServerReq IsServerReq = do
Refl <- geq m1 m2
pure $ Right HRefl
go IsServerEither IsServerEither
| SCustomMethod c1 <- m1
, SCustomMethod c2 <- m2
, c1 == c2
= Just $ Left $ CustomEq HRefl
go _ _ = Nothing
-- | Heterogeneous equality on singleton client methods
mEqClient :: SClientMethod m1 -> SClientMethod m2 -> Maybe (Either (CustomEq m1 m2) (m1 :~~: m2))
mEqClient m1 m2 = go (splitClientMethod m1) (splitClientMethod m2)
where
go IsClientNot IsClientNot = do
Refl <- geq m1 m2
pure $ Right HRefl
go IsClientReq IsClientReq = do
Refl <- geq m1 m2
pure $ Right HRefl
go IsClientEither IsClientEither
| SCustomMethod c1 <- m1
, SCustomMethod c2 <- m2
, c1 == c2
= Just $ Left $ CustomEq HRefl
go _ _ = Nothing
| wz1000/haskell-lsp | lsp-types/src/Language/LSP/Types/Parsing.hs | mit | 13,976 | 224 | 24 | 2,629 | 3,211 | 1,668 | 1,543 | -1 | -1 |
module RecordExplicitUnused where
import Language.Haskell.Names (Symbol (Value, symbolModule, symbolName))
foo = Value {symbolModule = undefined}
| serokell/importify | test/test-data/haskell-names@records/04-RecordExplicitUnused.hs | mit | 158 | 0 | 6 | 27 | 36 | 25 | 11 | 3 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module TestBulletML (
spec
) where
import Control.Monad (unless)
import System.Exit (exitFailure)
import Control.Arrow (arr, returnA)
import Control.Category ((<<<), (>>>))
import Data.Tree.NTree.TypeDefs (NTree)
import Debug.Trace (trace)
import Text.XML.HXT.Arrow.XmlArrow (ArrowXml)
import Text.XML.HXT.Arrow.XmlState.TypeDefs (IOStateArrow)
import Text.XML.HXT.Core (constA, deep,
getAttrValue,
getChildren, getText,
hasName, isA, isElem,
listA, no, orElse,
readDocument, readString,
runLA, runX,
withRemoveWS,
withSubstDTDEntities,
withValidate, xread, yes)
import Text.XML.HXT.DOM.TypeDefs (XNode, XmlTree)
import BulletML -- (BulletML(BulletML), Orientation(ONone), Speed(Speed), SpeedOrAccelType(SOATRelative), ActionContent(ACActionRef), Reference(Reference), getBulletML, getSpeed, getActionOrRef)
import Test.Hspec
deg2rad :: Angle -> Double
deg2rad a = fromIntegral a / 180.0 * pi
angleToUnitVector :: Angle -> (Double, Double)
angleToUnitVector a = (cos a', sin a') where a' = deg2rad a
traceShow :: Show a => a -> a
traceShow x = trace (show x) x
--parsedWith :: String -> a -> b
s `parsedWith` f = runLA (xread >>> f) s
spec :: Spec
spec = do
describe "getBulletML" $ do
it "empty bulletml" $ do
"<bulletml></bulletml>" `parsedWith` getBulletML `shouldBe` [BulletML ONone []]
describe "getSpeed" $ do
it "relative speed" $ do
"<speed type='relative'>6</speed>" `parsedWith` getSpeed `shouldBe` [Speed SOATRelative 6]
describe "getActionContent" $ do
it "actionRef" $ do
"<actionRef label='toto'><param>1</param><param>2</param></actionRef>" `parsedWith` getActionContent `shouldBe` [(ACActionRef (Reference "toto" [1.0, 2.0]))]
allClose a b = 1e-15 > abs (a-b)
--prop_angleToUnitVector_0 = let (x,y) = angleToUnitVector 0 in allClose x 1.0 && allClose y 0.0
--prop_angleToUnitVector_90 = let (x,y) = angleToUnitVector 90 in allClose x 0.0 && allClose y 1.0
--prop_angleToUnitVector_180 = let (x,y) = angleToUnitVector 180 in allClose x (-1.0) && allClose y 0.0
--prop_angleToUnitVector_270 = let (x,y) = angleToUnitVector 270 in allClose x 0.0 && allClose y (-1.0)
-- `main` is here so that this module can be run from GHCi on its own. It is
-- not needed for automatic spec discovery.
main :: IO ()
main = hspec spec
| didmar/bulletml-haskell | test/TestBulletML.hs | mit | 2,604 | 0 | 19 | 635 | 541 | 310 | 231 | 45 | 1 |
-- Spirangle.hs
-- copied from https://github.com/sleexyz/hylogen-fun/blob/master/Spirangle.hs
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleContexts #-}
module Spirangle where
import Util
output = toProgram (spirangle osc1)
spirangle draw = rgba where
rgba = (line)
& (over (b2a bb))
& (clamp 0 1)
bb = q (texture2D backBuffer) uvN
q x = x
& lmap (view norm)
& lmap (rot ((negate 2 * pi /3) + muchless sin time ))
& lmap (*1.1)
& rmap (clamp 0 1)
mask :: (Veccable n) => Vec n -> Vec n
mask x = (x `gt` 0) ? (0, 1)
gate :: (Veccable n) => Vec n -> Vec n -> Vec n -> Vec n
gate s e x = ((x `geq` s) * (x `lt` e)) ? (1, 0)
line :: Vec4
line = vec4 (v, v, v, 1)
v = 1
& (*(gate (-0.35) (-0.33) (y_ uvN)))
& (*(gate (-0.55) (0.65) (x_ uvN)))
& (*draw)
| lemilonkh/algorithmuss | visuals/Spirangle.hs | mit | 843 | 0 | 18 | 224 | 414 | 225 | 189 | 26 | 1 |
module XMLQuery
where
import XMLQuery.Prelude hiding (Text)
import qualified XMLQuery.Prelude as Prelude
import qualified XMLQuery.AST as AST
-- * Text
-------------------------
-- |
-- Parser in the context of a textual value.
type Text =
Alt AST.Text
-- |
-- Lifts an arbitrary textual parser function to the text-value parser.
--
-- Provides a doorway for composition with such libraries as \"parsec\" or \"attoparsec\".
text :: (Prelude.Text -> Either Prelude.Text a) -> Text a
text =
liftAlt . AST.Text
-- ** Derivatives
-------------------------
-- |
-- Simply extracts the textual value.
textValue :: Text Prelude.Text
textValue =
text pure
-- * Element
-------------------------
type Element =
Alt AST.Element
elementNameText :: Text a -> Element a
elementNameText =
liftAlt . AST.ElementNameText
-- |
-- Parses one of element's attributes without any regard to order.
elementAttr :: Attr a -> Element a
elementAttr =
liftAlt . AST.ElementAttr
-- |
-- Parses all of element's nodes.
--
-- Can be used multiple times,
-- thus allowing for parallel parsing of element's child-nodes.
-- Naturally this will result in traversing the element's nodes multiple times.
elementNodes :: Nodes a -> Element a
elementNodes =
liftAlt . AST.ElementNodes
-- ** Derivatives
-------------------------
elementNameIs :: Prelude.Text -> Element ()
elementNameIs expected =
elementNameText (text textParserFn)
where
textParserFn actual =
if actual == expected
then Right ()
else Left ("elementNameIs: The actual name \"" <> actual <> "\" does not equal the expected \"" <> expected <> "\"")
-- * Attr
-------------------------
type Attr =
Alt AST.Attr
-- |
-- Parses the attribute's name using the provided textual parser.
attrNameText :: Text a -> Attr a
attrNameText =
liftAlt . AST.AttrNameText
-- |
-- Parses the attribute's value using the provided textual parser.
attrValueText :: Text a -> Attr a
attrValueText =
liftAlt . AST.AttrValueText
-- ** Derivatives
-------------------------
-- |
-- A parser, which succeeds if the attribute's name matches the provided value.
attrNameIs :: Prelude.Text -> Attr ()
attrNameIs expected =
attrNameText (text textParserFn)
where
textParserFn actual =
if actual == expected
then Right ()
else Left ("attrNameIs: The actual name \"" <> actual <> "\" does not equal the expected \"" <> expected <> "\"")
-- |
-- A parser, which succeeds if the attribute's value matches the provided value.
attrValueIs :: Prelude.Text -> Attr ()
attrValueIs expected =
attrValueText (text textParserFn)
where
textParserFn actual =
if actual == expected
then Right ()
else Left ("attrValueIs: The actual name \"" <> actual <> "\" does not equal the expected \"" <> expected <> "\"")
-- * Nodes
-------------------------
-- |
-- A sequential backtracking parser of nodes.
type Nodes =
Alt AST.Nodes
-- |
-- Parses the next node.
nodesImmediateNode :: Node a -> Nodes a
nodesImmediateNode =
liftAlt . AST.NodesNode
-- |
-- Parses one of the following nodes.
nodesEventualNode :: Node a -> Nodes a
nodesEventualNode node =
fix $ \loop ->
nodesImmediateNode node <|> (nodesImmediateNode (pure ()) *> loop)
-- * Node
-------------------------
type Node =
Alt AST.Node
nodeElement :: Element a -> Node a
nodeElement =
liftAlt . AST.NodeElement
nodeText :: Text a -> Node a
nodeText =
liftAlt . AST.NodeText
| sannsyn/xml-query | library/XMLQuery.hs | mit | 3,475 | 0 | 13 | 682 | 700 | 382 | 318 | 69 | 2 |
import Text.ParserCombinators.Parsec
import Control.Monad
import System.Cmd
import System.Directory
import System.Environment
import TexParser
import StyleSvg
import Config
main =
do args <- getArgs -- get the list of command line arguments
let configFilePath = case args of
[] -> "config" -- default is ./config
xs -> xs !! 0 -- set the config file path
-- read the IO part of the config:
[inputDir, texFileName, targetDir, htmlFileName] <- readConfigIO configFilePath
-- read tex-document contents:
contents <- readFile (inputDir ++ texFileName)
let parsed = parseTex contents -- parse contents into a list
let ext = onlyExternalize parsed -- get a list of the ones to externalize to SVGs
let numberOfFiles = length ext -- how many SVGs are there?
-- read the SVG part of the config:
[standalone_tex_header, document_commands] <- readConfigSVG configFilePath
-- write the externalized tex files to the target directory:
externalize targetDir (texHeader standalone_tex_header parsed) document_commands tex ext
-- compile the externalized tex files to PDFs:
mapM_ system (commandList "" ("pdflatex -output-directory " ++ targetDir) tex numberOfFiles)
-- convert PDFs to SVGs:
mapM_ system (commandList2 targetDir "pdf2svg" pdf svgTmp numberOfFiles)
-- read the contents of the generated SVG files:
svgContents <- mapM readFile (listOfFiles targetDir svgTmp numberOfFiles)
-- translate the parsed tex file to HTML and write it to the target directory:
writeFile (targetDir ++ htmlFileName) (toHTML functionDispatch parsed)
-- remove SVG styles and enable CSS like styling (still empty):
multiWrite (listOfFiles targetDir svg numberOfFiles) (map (cssStyleSVG "") svgContents)
-- remove all files that are not necessary:
mapM_ removeFile (listOfFiles targetDir tex numberOfFiles)
mapM_ removeFile (listOfFiles targetDir ".log" numberOfFiles)
mapM_ removeFile (listOfFiles targetDir ".aux" numberOfFiles)
mapM_ removeFile (listOfFiles targetDir ".out" numberOfFiles)
mapM_ removeFile (listOfFiles targetDir svgTmp numberOfFiles)
mapM_ removeFile (listOfFiles targetDir pdf numberOfFiles)
-- suffixes
svgTmp = ".tmp.svg"
svg = ".svg"
pdf = ".pdf"
tex = ".tex"
-- command to compile to pdf:
texCommand = "pdflatex"
-- command to translate to svg:
svgCommand = "pdf2svg"
-- uses the parseDocument function from TexParser and removes the Either
parseTex :: String -> [Kind]
parseTex input = case parse (parseDocument (0 :: Int) ) "(unknown)" input of
Left str -> []
Right xs -> xs
-- writes multiple files
multiWrite :: [String] -> [String] -> IO ()
multiWrite [] _ = return ()
multiWrite _ [] = return ()
multiWrite (filename:rest) (x:xs) =
do
writeFile filename x
multiWrite rest xs
-- takes a path to a directory, a command, a suffix and the total number of files
-- repeats the command for each file, e.g.
-- $ pdflatex path/1.tex
-- $ pdflatex path/2.tex
commandList :: String -> String -> String -> Int -> [String]
commandList path command arg_suffix n =
zipWith ( (++) . (++ " ") ) (repeat command) (listOfFiles path arg_suffix n)
-- same as commandList but takes to lists of arguments as in
-- pdf2svg path/1.tex path/1.svg
-- pdf2svg path/2.tex path/2.svg
commandList2 :: String -> String -> String -> String -> Int -> [String]
commandList2 path command arg1_suffix arg2_suffix n =
zipWith ( (++) . (++ " ") ) (commandList path command arg1_suffix n) (listOfFiles path arg2_suffix n)
-- from a path, a suffix and the total number of files, a list is created as
-- [path/1.tex, path/2.tex, ..., path/n.tex]
listOfFiles :: String -> String -> Int -> [String]
listOfFiles path str n = zipWith (++) (repeat path) (zipWith ((++).show) [1..n] (repeat str))
-- translates all parsed input that needs to be externalized to standalone tex documents
externalize :: String -> String -> String -> String -> [Kind] -> IO ()
externalize path header document_commands suffix input =
do
let files = map (toTex document_commands header) input
multiWrite (listOfFiles path suffix (length files)) files
-- takes a dispatch function that translates all parsed input to HTML tags
toHTML :: (Kind -> String) -> [Kind] -> String
toHTML dispatch [] = ""
toHTML dispatch (x:xs) = (dispatch x) ++ (toHTML dispatch xs)
| dino-r/casttex | src/Main.hs | mit | 4,573 | 0 | 13 | 1,021 | 1,049 | 543 | 506 | 64 | 2 |
module Unison.Test.ResourcePool where
import Control.Concurrent
import Control.Applicative
import Control.Monad
import Data.Functor
import Data.IORef (IORef)
import Data.Maybe
import Data.Time (UTCTime,getCurrentTime, addUTCTime)
import Test.Tasty
import Test.Tasty.HUnit
import qualified Control.Concurrent as CC
import qualified Control.Concurrent.MVar as MVar
import qualified Control.Concurrent.Map as M
import qualified Control.Concurrent.STM.TQueue as TQ
import qualified Control.Monad.STM as STM
import qualified Data.Hashable as H
import qualified Data.IORef as IORef
import qualified Unison.Runtime.ResourcePool as RP
type Resource = String
type Params = String
type TestState = M.Map String String
increment :: IORef Int -> IO Int
increment ref = IORef.atomicModifyIORef' ref (\n -> (n+1, n))
decrement :: IORef Int -> IO Int
decrement ref = IORef.atomicModifyIORef' ref (\n -> (n-1, n))
makePool :: RP.MaxPoolSize -> IO (RP.ResourcePool Int Int, Assertion)
makePool maxSize = do
rs <- IORef.newIORef 0
nonce <- IORef.newIORef 0
pool <- RP.make 3 maxSize (\_ -> increment rs >> increment nonce) (\_ -> decrement rs $> ())
pure (pool, IORef.readIORef rs >>= \n -> if n == 0 then pure () else fail $ "count nonzero: " ++ show n)
acquireSequential1 :: Assertion
acquireSequential1 = do
(pool, ok) <- makePool 10
let check n = assertEqual "resource not recycled" 0 n
replicateM_ 100 (RP.acquire pool 42 >>= \(n, release) -> check n >> release)
eventually ok
acquireSequential2 :: Assertion
acquireSequential2 = do
(pool, ok) <- makePool 100
-- let check n = putStrLn $ "got: " ++ show n
let check n = assertEqual "resource not recycled" True (n <= 100)
let acquire i = RP.acquire pool i >>= \(n, release) -> check n >> release
mapM_ acquire ([1..100] ++ [1..100]) -- second 100 acquires should get cached
eventually ok
parallelTraverse :: (a -> IO b) -> [a] -> IO [b]
parallelTraverse f xs = sequence =<< mapM spawn xs where
spawn a = do
r <- MVar.newEmptyMVar
_ <- CC.forkIO $ f a >>= \b -> MVar.putMVar r b
pure (MVar.takeMVar r)
parallelSequence :: [IO a] -> IO [a]
parallelSequence = parallelTraverse id
parallelSequence_ :: [IO a] -> IO ()
parallelSequence_ ios = void $ parallelSequence ios
acquireConcurrent1 :: Assertion
acquireConcurrent1 = do
(pool, ok) <- makePool 10
parallelSequence_ $ replicate 100 (RP.acquire pool 42 >>= \(n, release) -> release)
eventually ok
acquireConcurrent2 :: Assertion
acquireConcurrent2 = do
(pool, ok) <- makePool 100
let acquire i = RP.acquire pool i >>= \(n, release) -> release
_ <- parallelTraverse acquire ([1..100] ++ [1..100])
eventually ok
delaySeconds μs = threadDelay (1000000 * μs)
eventually :: Assertion -> Assertion
eventually cond = go 1 where
go n | n > 8 = cond
go n = cond <|> (delaySeconds n >> go (n*2))
tests :: TestTree
tests = testGroup "ResourcePool"
[ testCase "acquireSequential1" acquireSequential1
, testCase "acquireSequential2" acquireSequential2
, testCase "acquireConcurrent1" acquireConcurrent1
, testCase "acquireConcurrent2" acquireConcurrent2 ]
main = defaultMain tests
| nightscape/platform | node/tests/Unison/Test/ResourcePool.hs | mit | 3,140 | 0 | 13 | 545 | 1,132 | 593 | 539 | 77 | 2 |
module Main where
import Apl1
import BadMonoid
import Control.Applicative
import Data.Monoid
import ListAp
import Test.QuickCheck.Checkers
import Test.QuickCheck.Classes
main :: IO ()
main = do
quickBatch (monoid Twoo)
quickBatch (applicative (undefined :: [(String, String, Int)]))
quickBatch (monoid (ZipList [Sum 1 :: Sum Int]))
quickBatch (monoid (Cons 'a' Nil))
quickBatch (applicative (undefined :: [(List Int, List Int, Int)]))
| JoshuaGross/haskell-learning-log | Code/Haskellbook/Applicatives/src/Main.hs | mit | 517 | 0 | 13 | 138 | 178 | 95 | 83 | 15 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- |
-- Module : Test.Tasty.KAT
-- License : MIT
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : experimental
-- Portability : Good
--
-- Tasty support for KAT (Known Answer Tests)
--
module Test.Tasty.KAT
(
-- * Run tests
testKatDetailed
, testKatGrouped
-- * Load KAT resources
, testKatLoad
, Resource(..)
, katLoaderSimple
, mapTestUnits
) where
import Control.Applicative
import Control.Exception
import Data.Typeable
import Test.Tasty (testGroup)
import Test.Tasty.Providers
import Test.Tasty.KAT.FileLoader
newtype Resource a = Resource [(String, [a])]
data TestKatSingle = TestKatSingle (IO Bool)
deriving Typeable
data TestKatGroup = TestKatGroup [(Int, IO Bool)]
deriving Typeable
data KatResult = KatFailed String | KatSuccess
deriving (Show,Eq)
tryResult :: IO Bool -> IO KatResult
tryResult f = do
er <- try f
case er of
Left (e :: SomeException)
| show e == "<<timeout>>" -> throwIO e
| otherwise -> return $ KatFailed (show e)
Right r -> return $ if r then KatSuccess else KatFailed "test failed"
instance IsTest TestKatSingle where
run _ (TestKatSingle tst) _ = do
r <- tryResult tst
case r of
KatSuccess -> return $ testPassed ""
KatFailed s -> return $ testFailed s
testOptions = return []
instance IsTest TestKatGroup where
run _ (TestKatGroup groupTests) _ = do
(success, failed) <- summarize <$> mapM runGroup groupTests
return $
(if failed == 0 then testPassed else testFailed)
(if failed > 0 then (show failed) ++ " tests failed on " ++ show (failed + success)
else (show success) ++ " tests succeed")
where summarize :: [KatResult] -> (Int, Int)
summarize = foldl (\(s,f) k -> if k == KatSuccess then (s+1,f) else (s,f+1)) (0,0)
runGroup :: (Int, IO Bool) -> IO KatResult
runGroup (_, tst) = tryResult tst
--nbGroups = fromIntegral $ length groupTests
--yieldProgress $ Progress { progressText = groupName, progressPercent = fromIntegral tstNb / nbGroups }
testOptions = return []
-- | run one tasty test per vectors in each groups
--
-- This is useful to have detailed output on what failed
-- and what succeeded. For a more concise output, use
-- 'testKatGrouped'
testKatDetailed :: TestName
-> Resource a
-> (String -> a -> IO Bool)
-> TestTree
testKatDetailed name (Resource groups) test = -- singleTest name $ mkTestKat resource test
testGroup name $ map groupToTests groups
where groupToTests (groupName, vectors) =
testGroup groupName $ map (\(i, v) -> singleTest (show (i :: Int)) (TestKatSingle $ test groupName v)) (zip [1..] vectors)
-- | run one tasty test per group
testKatGrouped :: TestName
-> Resource a
-> (String -> a -> IO Bool)
-> TestTree
testKatGrouped name (Resource groups) test = -- singleTest name $ mkTestKat resource test
testGroup name $ map groupToTests groups
where groupToTests (groupName, vectors) =
singleTest groupName $ TestKatGroup $ map (\(i, v) -> (i, test groupName v)) (zip [1..] vectors)
-- | Read a KAT file into values that will be used for KATs tests
testKatLoad :: FilePath
-> ([String] -> [(String, [a])])
-> IO (Resource a)
testKatLoad filepath transform = Resource . transform . lines <$> readFile filepath
| vincenthz/tasty-kat | Test/Tasty/KAT.hs | mit | 3,678 | 0 | 15 | 1,015 | 980 | 530 | 450 | 70 | 3 |
import Haste
import Haste.DOM
import Haste.Graphics.Canvas
import Basal
steps :: Int
steps = 20000
size :: Int
size = 500
main = do
Just canv <- getCanvasById "field"
Just button <- elemById "button"
_ <- onEvent button OnClick (onButtonClick canv steps)
seed <- newSeed
renderRW canv seed steps
onButtonClick :: Canvas -> Int -> Int -> (Int,Int) -> IO ()
onButtonClick canv n _ _ = do
seed <- newSeed
renderRW canv seed n
renderRW :: Canvas -> Seed -> Int -> IO ()
renderRW canv seed n = let
r = fromIntegral (size `div` 2)
walk = map (\(x,y) -> (x*2,y*2)) $ randWalkN (-1,1) seed n
in do
render canv . translate (r,r) . color backColor . stroke . path $ walk
renderOnTop canv . translate (r,r) .
color mainColor . stroke . path $ eraseWhole walk
mainColor :: Color
mainColor = RGB 0 0 250
backColor :: Color
backColor = RGBA 0 0 0 0.4
eraseLoop :: (Eq a) => [a] -> [a]
eraseLoop [] = []
eraseLoop (x:xs) = case dropWhile (/=x) xs of
[] -> x: eraseLoop xs
(h:rest) -> h : eraseLoop rest
eraseWhole :: (Eq a) => [a] -> [a]
eraseWhole xs = let xs' = eraseLoop xs in
if length xs == length xs' then xs
else eraseWhole xs'
| lesguillemets/schrammloewner | src/LoopErased.hs | mit | 1,282 | 0 | 15 | 378 | 564 | 286 | 278 | 39 | 2 |
module Solidran.Revc.DetailSpec (spec) where
import Solidran.Revc.Detail
import Test.Hspec
spec :: Spec
spec = do
describe "Solidran.Revc.Detail" $ do
describe "complementDna" $ do
it "should do nothing with an empty string" $ do
complementDna "" `shouldBe` ""
it "should correctly complement a dna string" $ do
complementDna "A" `shouldBe` "T"
complementDna "AAAACCCGGT"
`shouldBe`
"ACCGGGTTTT"
| Jefffrey/Solidran | test/Solidran/Revc/DetailSpec.hs | mit | 526 | 0 | 17 | 183 | 109 | 55 | 54 | 14 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE RebindableSyntax #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
-- | This module is a duplication of the Control.Monad.Error module from the
-- mtl, for constrained monads.
module Control.Monad.Constrained.Error
(MonadError(..)
,ExceptT(..)
,Except)
where
import GHC.Exts
import Control.Monad.Constrained
import Control.Monad.Constrained.Trans
import Control.Monad.Trans.Except hiding (catchE)
import qualified Control.Monad.Trans.Identity as Identity
import qualified Control.Monad.Trans.Maybe as Maybe
import qualified Control.Monad.Trans.Reader as Reader
import qualified Control.Monad.Trans.State.Lazy as State.Lazy
import qualified Control.Monad.Trans.State.Strict as State.Strict
import qualified Prelude
-- | A class for monads which can error out.
class Monad m =>
MonadError e m | m -> e where
type SuitableError m a :: Constraint
-- | Raise an error.
throwError :: SuitableError m a => e -> m a
{- |
A handler function to handle previous errors and return to normal execution.
A common idiom is:
> do { action1; action2; action3 } `catchError` handler
where the @action@ functions can call 'throwError'.
Note that @handler@ and the do-block must have the same return type.
-}
catchError :: SuitableError m a => m a -> (e -> m a) -> m a
instance MonadError e (Either e) where
type SuitableError (Either e) a = ()
throwError = Left
catchError (Left x) f = f x
catchError r _ = r
instance (Monad m, Prelude.Monad (Unconstrained m)) => MonadError e (ExceptT e m) where
type SuitableError (ExceptT e m) a = Suitable m (Either e a)
throwError = ExceptT . pure . Left
catchError = catchE
catchE
:: (Monad m, Suitable m (Either e' a))
=> ExceptT e m a
-> (e -> ExceptT e' m a)
-> ExceptT e' m a
catchE m h =
ExceptT $
do a <- runExceptT m
case a of
Left l -> runExceptT (h l)
Right r -> return (Right r)
{-# INLINE catchE #-}
instance MonadError e m => MonadError e (Identity.IdentityT m) where
type SuitableError (Identity.IdentityT m) a = SuitableError m a
throwError = lift . throwError
catchError = Identity.liftCatch catchError
instance (MonadError e m, Prelude.Monad (Unconstrained m)) =>
MonadError e (Maybe.MaybeT m) where
type SuitableError (Maybe.MaybeT m) a
= (SuitableError m a
,SuitableError m (Maybe a)
,Suitable m (Maybe a))
throwError = lift . throwError
catchError = Maybe.liftCatch catchError
instance MonadError e m =>
MonadError e (Reader.ReaderT r m) where
type SuitableError (Reader.ReaderT r m) a = SuitableError m a
throwError = lift . throwError
catchError = Reader.liftCatch catchError
instance (MonadError e m, Prelude.Monad (Unconstrained m)) =>
MonadError e (State.Lazy.StateT s m) where
type SuitableError (State.Lazy.StateT s m) a
= (Suitable m (a, s), SuitableError m (a, s), SuitableError m a)
throwError = lift . throwError
catchError = State.Lazy.liftCatch catchError
instance (MonadError e m, Prelude.Monad (Unconstrained m)) =>
MonadError e (State.Strict.StateT s m) where
type SuitableError (State.Strict.StateT s m) a
= (Suitable m (a, s), SuitableError m (a, s), SuitableError m a)
throwError = lift . throwError
catchError = State.Strict.liftCatch catchError
| oisdk/constrained-monads | src/Control/Monad/Constrained/Error.hs | mit | 3,634 | 0 | 13 | 876 | 1,023 | 556 | 467 | -1 | -1 |
import Typechecker
import Types
import Control.Monad
import Data.Either
import qualified Data.Map as M
import qualified Data.Set as S
testExprs :: [MoleculeExpr]
testExprs = [
EApp (EAbs "x" (EVar "x" :+: EVar "x")) (EInt 10)
, EAbs "x" (EVar "x" :+: EVar "x")
, EApp (EAbs "x" (EVar "x")) (EInt 8)
, EApp (EApp (EAbs "x" (EAbs "y" (EVar "x" :+: EVar "y"))) (EInt 10)) (EInt 10)
, EInt 10
, ETrue
, EFalse
, EApp (EAbs "x" ETrue) (EInt 10)
, EAbs "x" ((EVar "x") :+: EInt 80)
, EAbs "x" ((EVar "x") :|: ETrue)
, EApp (EAbs "x" ETrue) (EVar "x")
, EApp (EAbs "x" $ EApp (EVar "x") (EInt 10)) (EAbs "y" (EVar "y"))
]
failExprs :: [MoleculeExpr]
failExprs = [
ETrue :+: EInt 10
, EAbs "a" (EVar "a" :|: EVar "b")
, EVar "y"
, EApp (EApp (EAbs "x" (EAbs "y" (EVar "x" :+: EVar "y"))) (EInt 10)) (ETrue)
, EAbs "x" (EVar "x")
, EAbs "x" ETrue
]
report :: MoleculeExpr -> Either MoleculeError MoleculeType -> String
report e (Right t) = concat [
"\nSuccess:\n"
, show e
, "\nhas type: "
, show t
]
report e (Left err) = concat [
"\nFailure:\n"
, show e
, "\nfailed with error:\n"
, show err
]
test = do
let rights = map typecheck testExprs
lefts = map typecheck failExprs
putStrLn "rights:"
mapM_ putStrLn $ zipWith report testExprs rights
putStrLn "lefts:"
mapM_ putStrLn $ zipWith report failExprs lefts
guard (all isRight rights)
guard (all isLeft lefts)
putStrLn "\ntests passing." | 5outh/Molecule | Test.hs | gpl-2.0 | 1,474 | 0 | 15 | 347 | 664 | 332 | 332 | 49 | 1 |
{- |
Module : $Header$
Description : abstract syntax for FPL
Copyright : (c) Christian Maeder, DFKI GmbH 2011
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
abstract syntax for FPL, logic for functional programs
as CASL extension
-}
module Fpl.As where
-- DrIFT command
{-! global: GetRange !-}
import Common.AS_Annotation
import Common.AnnoState
import Common.Doc as Doc
import Common.DocUtils
import Common.Id
import Common.Keywords
import Common.Lexer
import Common.Parsec
import Common.Token hiding (innerList)
import Text.ParserCombinators.Parsec
import CASL.AS_Basic_CASL
import CASL.Formula
import CASL.SortItem
import CASL.OpItem
import CASL.ToDoc
import Data.List (delete)
import Data.Maybe (isNothing)
type FplBasicSpec = BASIC_SPEC FplExt () TermExt
type FplTerm = TERM TermExt
type FplForm = FORMULA TermExt
data FplExt =
FplSortItems [Annoted FplSortItem] Range
| FplOpItems [Annoted FplOpItem] Range
deriving Show
data FplSortItem =
FreeType DATATYPE_DECL
| CaslSortItem (SORT_ITEM TermExt)
deriving Show
data FplOpItem =
FunOp FunDef
| CaslOpItem (OP_ITEM TermExt)
deriving Show
prepPunctBar :: [Doc] -> [Doc]
prepPunctBar = punctuate (Doc.space <> bar)
printDD :: DATATYPE_DECL -> Doc
printDD (Datatype_decl s as _) =
sep [pretty s <+> keyword freeS <+> keyword withS
, sep $ prepPunctBar
$ map (printAnnoted printALTERNATIVE) as ]
instance ListCheck FplOpItem where
innerList i = case i of
FunOp _ -> [()]
CaslOpItem oi -> innerList oi
instance ListCheck FplSortItem where
innerList i = case i of
FreeType _ -> [()]
CaslSortItem si -> innerList si
instance Pretty FplExt where
pretty e = case e of
FplSortItems ds _ ->
topSigKey (sortS ++ pluralS ds) <+> semiAnnos pretty ds
FplOpItems ds _ -> topSigKey (opS ++ pluralS ds) <+> semiAnnos pretty ds
instance Pretty FplSortItem where
pretty e = case e of
FreeType d -> printDD d
CaslSortItem s -> printSortItem s
instance Pretty FplOpItem where
pretty e = case e of
FunOp o -> pretty o
CaslOpItem s -> printOpItem s
data FunDef = FunDef OP_NAME OP_HEAD (Annoted FplTerm) Range
deriving (Show, Eq, Ord)
kindHead :: OpKind -> OP_HEAD -> OP_HEAD
kindHead k (Op_head _ args r ps) = Op_head k args r ps
instance Pretty FunDef where
pretty (FunDef i h@(Op_head _ l ms _) t _) =
let di = idLabelDoc i
et = equals <+> printAnnoted pretty t
in sep $ if isNothing ms && null l then [di, et] else
[keyword functS,
sep [(if null l then sep else cat) [di, pretty $ kindHead Total h], et]]
{- | extra terms of FPL. if-then-else must use a term as guard with result
sort @Bool@. To allow @true@, @false@ and an equality test an extra term
parser is needed that must not be used when parsing formulas. -}
data TermExt =
FixDef FunDef -- ^ formula
| Case FplTerm [(FplTerm, FplTerm)] Range
| Let FunDef FplTerm Range
| IfThenElse FplTerm FplTerm FplTerm Range
| EqTerm FplTerm FplTerm Range
| BoolTerm FplTerm
deriving (Show, Eq, Ord)
instance FormExtension TermExt
instance Pretty TermExt where
pretty t = case t of
FixDef fd -> pretty fd
Case c l _ ->
sep $ (keyword caseS <+> pretty c <+> keyword ofS)
: prepPunctBar
(map (\ (p, e) -> fsep [pretty p, implies, pretty e]) l)
Let fd i _ -> sep [keyword letS <+> pretty fd, keyword inS <+> pretty i]
IfThenElse i d e _ ->
fsep [ keyword ifS <+> pretty i
, keyword thenS <+> pretty d
, keyword elseS <+> pretty e ]
EqTerm t1 t2 r -> pretty $ Equation t1 Strong t2 r
BoolTerm f -> pretty f
fplReservedWords :: [String]
fplReservedWords = [barS, functS, caseS, ofS, letS]
funDef :: [String] -> AParser st FunDef
funDef ks = do
q <- asKey functS
o <- parseId ks
h <- opHead ks
e <- equalT
a <- annos
t <- eqTerm ks
return $ FunDef o (kindHead Partial h)
(Annoted t nullRange a []) $ toRange q [] e
optVarDecls :: [String] -> AParser st ([VAR_DECL], [Token])
optVarDecls ks =
(oParenT >> separatedBy (varDecl ks) anSemi << cParenT)
<|> return ([], [])
constBool :: AParser st FplTerm
constBool = fmap Mixfix_token (asKey trueS <|> asKey falseS)
boolTerm :: [String] -> AParser st FplTerm
boolTerm ks = constBool <|> mixTerm ks
eqTerm :: [String] -> AParser st FplTerm
eqTerm ks = do
b <- boolTerm ks
option b $ do
e <- equalT
b2 <- boolTerm ks
return $ ExtTERM $ EqTerm b b2 $ tokPos e
{- | extra formulas to compare bool terms with true or false.
Interpreting boolean valued terms as formulas is still missing. -}
eqForm :: [String] -> AParser st TermExt
eqForm ks = do
(c, t) <- try $ pair constBool equalT
e <- mixTerm ks
return $ EqTerm c e $ tokPos t
<|> fmap (\ (e, (t, c)) -> EqTerm e c $ tokPos t)
(try $ pair (mixTerm ks) $ pair equalT constBool)
fplTerm :: [String] -> AParser st TermExt
fplTerm ks = caseTerm ks <|> letTerm ks <|> ifThenElse ks
caseTerm :: [String] -> AParser st TermExt
caseTerm ks = do
c <- asKey caseS
t <- eqTerm ks
o <- asKey ofS
(cs, qs) <- separatedBy (patTermPair ks) barT
return $ Case t cs $ toRange c qs o
patTermPair :: [String] -> AParser st (FplTerm, FplTerm)
patTermPair ks = do
p <- eqTerm ks
implKey
t <- eqTerm ks
return (p, t)
letVar :: [String] -> AParser st FunDef
letVar ks = do
v <- varId ks
e <- equalT
a <- annos
t <- eqTerm ks
return $ FunDef (simpleIdToId v) (Op_head Partial [] Nothing nullRange)
(Annoted t nullRange a []) $ tokPos e
letTerm :: [String] -> AParser st TermExt
letTerm ks = do
l <- asKey letS
d <- funDef ks <|> letVar ks
i <- asKey inS
t <- term ks
return $ Let d t $ toRange l [] i
ifThenElse :: [String] -> AParser st TermExt
ifThenElse ks = do
i <- ifKey
f <- eqTerm ks
t <- asKey thenS
a <- eqTerm ks
e <- asKey elseS
b <- eqTerm ks
return $ IfThenElse f a b $ toRange i [t] e
instance TermParser TermExt where
termParser b = if b then fplTerm fplReservedWords else
fmap FixDef (funDef fplReservedWords) <|> eqForm fplReservedWords
fplExt :: [String] -> AParser st FplExt
fplExt ks = itemList ks sortS fplSortItem FplSortItems
<|> itemList (delete functS ks) opS fplOpItem FplOpItems
<|> ((pluralKeyword etypeS <|> pluralKeyword typeS <|> pluralKeyword predS
<|> pluralKeyword esortS <|> asKey generatedS <|> asKey freeS)
>>= \ k -> unexpected $ "CASL keyword '" ++ shows k "'")
fplSortItem :: [String] -> AParser st FplSortItem
fplSortItem ks = do
s <- sortId ks
freeType ks s <|>
fmap CaslSortItem (subSortDecl ks ([s], nullRange) <|> commaSortDecl ks s
<|> isoDecl ks s <|> return (Sort_decl [s] nullRange))
freeType :: [String] -> SORT -> AParser st FplSortItem
freeType ks s = do
f <- asKey freeS
asKey withS
fmap FreeType $ parseDatatype ks s f
fplOpItem :: [String] -> AParser st FplOpItem
fplOpItem ks = fmap FunOp (funDef ks) <|> fmap CaslOpItem (opItem ks)
instance AParsable FplExt where
aparser = fplExt fplReservedWords
| nevrenato/HetsAlloy | Fpl/As.der.hs | gpl-2.0 | 7,160 | 0 | 17 | 1,652 | 2,624 | 1,292 | 1,332 | 192 | 1 |
module Macro.Todo where
import Types
import Macro.Text
import Macro.Theorem
import Control.Monad (when)
import Control.Monad.Reader (asks)
listoftodos :: Note
listoftodos = do
o <- asks conf_todos
when o $ do
packageDep_ "todonotes"
comm0 "listoftodos"
coloredTodo' :: LaTeXC l => l -> l -> l
coloredTodo' = liftL2 (\color l -> TeXComm "todo" [MOptArg ["color=" <> color, "inline", raw "size=\\small"], FixArg l ])
coloredTodo :: Note -> Note -> Note
coloredTodo color n = do
o <- asks conf_todos
when o $ do
packageDep_ "todonotes"
coloredTodo' color n
todo :: Note -> Note
todo = coloredTodo "red"
clarify :: Note -> Note
clarify n = coloredTodo "yellow" $ "Clarify: " <> n
toprove :: Note
toprove = coloredTodo "orange" $ "There is a proof missing here."
toprove_ :: Note -> Note
toprove_ n = todo $ do
n
newline
"There is a proof missing here."
noproof :: Note
noproof = todo "There either is a proof missing here or a confirmation that no proof is required at all."
noproof_ :: Note
noproof_ = footnotesize "No proof."
exneeded :: Note
exneeded = todo "There is an example missing here."
cexneeded :: Note
cexneeded = todo "There is an counter example missing here."
refneeded :: Note -> Note
refneeded n = todo $ do
"There is a reference to "
raw "``" <> n <> raw "''"
" missing here. "
citneeded :: Note
citneeded = todo "Citation needed"
totheorem :: Note -> Note
totheorem th = todo $ "TODO, theorem: " <> th
why :: Note
why = clarify $ "Why? More of an explanation is missing here."
why_ :: Note -> Note
why_ n = clarify $ "Why " <> n <> "?" <> " " <> "More of an explanation is missing here."
-- | Placeholder for future references
placeholder :: Note -> Note
placeholder n = thm $ do
s ["This is a placeholder for future references"]
n
| NorfairKing/the-notes | src/Macro/Todo.hs | gpl-2.0 | 1,907 | 0 | 12 | 477 | 521 | 263 | 258 | 56 | 1 |
--file: ch03/BadIndent.hs
-- This is the left most column
-- Our first declaration is in column 4.
firstBadIndentation = 1
-- Our second is left of the first, which is illegal!
secondBadIndentation = 2
| craigem/RealWorldHaskell | ch03/BadIndent.hs | gpl-3.0 | 217 | 1 | 5 | 49 | 17 | 10 | 7 | -1 | -1 |
module QHaskell.Expression.ADTUntypedNamed
(Exp(..)) where
import QHaskell.MyPrelude
import qualified QHaskell.Type.ADT as TA
data Exp x = ConI Word32
| ConB Bool
| ConF Float
| Var x
| Prm x [Exp x]
| Abs (x , Exp x)
| App (Exp x) (Exp x)
| Cnd (Exp x) (Exp x) (Exp x)
| Tpl (Exp x) (Exp x)
| Fst (Exp x)
| Snd (Exp x)
| LeT (Exp x) (x , Exp x)
| Typ TA.Typ (Exp x)
| Int Word32
| Mem (Exp x)
| Fix (Exp x)
deriving instance Eq x => Eq (Exp x)
deriving instance Show x => Show (Exp x)
deriving instance Functor Exp
deriving instance Foldable Exp
deriving instance Traversable Exp
| shayan-najd/QHaskell | QHaskell/Expression/ADTUntypedNamed.hs | gpl-3.0 | 767 | 0 | 8 | 303 | 299 | 164 | 135 | -1 | -1 |
----------------------------------------------------------------------------
-- |
-- Module : $Header$
-- Copyright : (c) Proyecto Theona, 2012-2013
-- (c) Alejandro Gadea, Emmanuel Gunther, Miguel Pagano
-- License : <license>
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Define la noción de derivación de una función en base a una
-- especificación y una prueba, es decir, una derivación propia de la
-- función.
--
----------------------------------------------------------------------------
module Fun.Derivation (
Derivation (..)
, module Fun.Derivation.Error
, createDerivations
, checkDerivation
)
where
import Fun.Decl
import Fun.Decl.Error
import Fun.Declarations
import Fun.Derivation.Derivation
import Fun.Derivation.Error
import Equ.Proof
import Equ.Expr
import Equ.Rule (getRelationFromType,Relation)
import Equ.TypeChecker (getType)
import Equ.Theories
import Equ.Syntax
import qualified Equ.PreExpr as PE
import qualified Data.Text as T hiding (map,foldl)
import Data.List as L (map, find)
import Data.Either
import qualified Data.Map as M
import Data.Monoid
import Control.Arrow ((***),second)
import Control.Monad
import Control.Lens
-- | A partir de las declaraciones, crea los objetos "Derivation" juntando la informacion
-- de cada especificación con la correspondiente derivación y def de función en caso que
-- ocurra. Por ahora no se puede hacer más de una derivación de una misma especificación.
-- Además, la especificación de una derivación debe estar EN EL MISMO módulo que la derivación.
createDerivations:: Declarations -> [EDeriv]
createDerivations decls = foldl (createDeriv spcs fncs) [] drvs
where drvs = decls ^. derivs
spcs = bare specs decls
fncs = bare functions decls
createDeriv :: [SpecDecl] -> [FunDecl] -> [EDeriv] -> (DeclPos,DerivDecl) -> [EDeriv]
createDeriv pSpecs pFuncs prevDerivs (derPos,der) = der':prevDerivs
where der' = alreadyDefined der (rights prevDerivs) >>
maybe (Left ([NotSpecification],der)) newDer
(L.find (sameDecl der) pSpecs)
newDer sp = Right $ Derivation der derPos sp
$ L.find (sameDecl der) pFuncs
-- | Verifica que no haya derivaciones duplicadas.
alreadyDefined :: DerivDecl -> [Derivation] -> EDeriv' ()
alreadyDefined derDecl = mconcat . map (checkRedef derDecl . deriv)
-- | Controla si ya existe una derivación para la misma función.
checkRedef :: (Decl d) => d -> DerivDecl -> EDeriv' ()
checkRedef d d' = when (sameDecl d d') $ Left ([RedefinedDerivation $ getNameDecl d'],d')
-- | Funcion que dada una derivacion dice si es válida o no. Esto es
-- solo para las derivaciones por recursión. Si luego se implementa
-- otro tipo de derivación, entonces debería diferenciarse.
checkDer :: [Operator] -> Declarations -> Declarations ->
[ThmDecl] -> Derivation -> EDeriv' (Annot FunDecl)
checkDer pops decls imDecls thms der = do
let (declS,mDeclF,declD) = (spec der,prog der,deriv der)
-- primero chequeamos que la variable sobre la que se hace
-- la derivación por recursión es la misma que está definida en
-- la especificación.
v = declD ^. derVar
f = declD ^. derFun
pfsnt = declD ^. derCases
vname = tRepr v
spVars = L.map tRepr $ declS ^. specArgs
expr = declS ^. specSpec
(Just texpr) = getType expr
rel = getRelationFromType texpr
whenDer (vname `elem` spVars) ([InvalidVariable v],declD)
-- Remplazamos la variable por la de la especificación, ya que ésta está
-- tipada.
vspec <- getVarInSpec v declS declD
let declD' = Deriv f vspec pfsnt
-- Ahora construimos una prueba inductiva con los datos de la derivación.
-- Se asume que la declaración está bien tipada
fexpr = firstExpr declS
caseExpr = caseExprFromDerivation vspec declD'
-- agregar como hipotesis la especificacion
hypSpec = mkIndHyp f rel fexpr expr
pfs' = addHypInSubProofs hypSpec pfsnt
ctx = addHypothesis' hypSpec M.empty
prf' = Ind ctx rel (PE.toFocus fexpr)
(PE.toFocus caseExpr)
(PE.toFocus $ PE.Var vspec)
pfs'
-- Agregamos todas las declaraciones como hipotesis
prf = addDeclHypothesis decls thms imDecls prf'
valPrf = validateProof pops prf
funPos = derivPos der
derivedFun = createFunDecl declS vspec declD'
_ <- maybe (return ()) (isDeclared declD' derivedFun) mDeclF
case valPrf of
Left err -> Left ([ProofNotValid err],declD')
Right _ -> return (funPos,derivedFun)
addHypInSubProofs :: Hypothesis -> [(PE.Focus,Proof)] -> [(PE.Focus,Proof)]
addHypInSubProofs hyp = L.map (second updCtx)
where updCtx p = let Right ctx = getCtx p
ctx' = addHypothesis' hyp ctx
Right p' = setCtx ctx' p
in p'
isDeclared :: DerivDecl -> FunDecl -> FunDecl -> Either ([DerivationError], DerivDecl) ()
isDeclared der derF declF = when (not (isEq derF declF)) $
Left ([DerivedFunctionDeclaredNotEqual (derF ^. funDeclName)],der)
mkIndHyp :: Variable -> Relation -> PE.PreExpr -> PE.PreExpr -> Hypothesis
mkIndHyp fun rel fexpr expr = createHypothesis name hypExpr (GenConditions [])
where hypExpr = Expr $ PE.BinOp (relToOp rel) fexpr expr
name = T.concat [T.pack "spec ",tRepr fun]
checkDerivation :: [Operator] -> Declarations -> Declarations -> [ThmDecl] ->
EDeriv -> EDeriv' (DeclPos,FunDecl)
checkDerivation pops decls imDecls thms =
either Left (checkDer pops decls imDecls thms)
getVarInSpec :: Variable -> SpecDecl -> DerivDecl -> EDeriv' Variable
getVarInSpec v spc derDecl = getVarInSpec' v (spc ^. specArgs)
where getVarInSpec' v' [] = Left ([InvalidVariable v'],derDecl)
getVarInSpec' v' (w:ws) = if v' == w
then return w
else getVarInSpec' v' ws
firstExpr :: SpecDecl -> PE.PreExpr
firstExpr spDecl = exprApplyFun (spDecl ^. specName) (spDecl ^. specArgs)
where exprApplyFun :: Variable -> [Variable] -> PE.PreExpr
exprApplyFun f = foldl (\e -> PE.App e . PE.Var) (PE.Var f)
createFunDecl :: SpecDecl -> Variable -> DerivDecl -> FunDecl
createFunDecl specDecl v derDecl = Fun fun vars expr Nothing
where vars = specDecl ^. specArgs
fun = specDecl ^. specName
expr = caseExprFromDerivation v derDecl
caseExprFromDerivation :: Variable -> DerivDecl -> PE.PreExpr
caseExprFromDerivation v derDecl = PE.Case pattern cases
where pattern = PE.Var v
pfs = derDecl ^. derCases
cases = L.map (PE.toExpr *** PE.toExpr . finalExpr) pfs
finalExpr = fromRight . getEnd
fromRight = either (error "fromRight Left") id
| alexgadea/fun | Fun/Derivation.hs | gpl-3.0 | 7,688 | 0 | 14 | 2,351 | 1,772 | 947 | 825 | -1 | -1 |
{-# language GADTs #-}
{-|
Copyright : (c) Quivade, 2017
License : GPL-3
Maintainer : Jakub Kopański <[email protected]>
Stability : experimental
Portability : POSIX
This module provides FIRRTL Types definitions.
|-}
module Language.FIRRTL.Types
( Type
, PolyType
, TypeF (..)
, Field (..)
, Ground (..)
, Orientation (..)
, poly
, ground
, vector
, bundle
) where
import Control.Unification (UTerm (..), Variable (..))
import Control.Unification.IntVar (IntVar)
import Language.FIRRTL.Recursion (Fix (..), cata)
import Language.FIRRTL.Syntax.Common (Ident)
-- | Ground types
data Ground
= Unsigned (Maybe Int) -- ^ Unsigned integer with bit width
| Signed (Maybe Int) -- ^ Signed integer with bit width
| Clock -- ^ Clock
| Natural -- ^ Non negative integer
deriving (Eq, Show)
-- data Scheme where
-- Forall :: Variable v => [v] -> Type v -> Scheme
data Scheme = Forall [IntVar] Type
-- | Firrtl types
data TypeF t
= Ground Ground -- ^ Ground type
| Vector t Int -- ^ Vector type
| Bundle [Field t] -- ^ Bundle type
deriving (Functor, Foldable, Traversable, Eq, Show)
-- | Single field in the bundle type
data Field t = Field
{ _fieldOrientation :: Orientation
, _fieldName :: Ident
, _fieldType :: t
} deriving (Functor, Foldable, Traversable, Eq, Show)
-- | Fiels orientation
data Orientation = Direct | Flipped
deriving (Eq, Show)
-- Fix (TypeF (Fix TypeF))
type Type = Fix TypeF
-- UTerm (TypeF (UTerm TypeF IntVar))
type PolyType = UTerm TypeF IntVar
ground :: Ground -> PolyType
ground = UTerm . Ground
vector :: PolyType -> Int -> PolyType
vector t n = UTerm $ Vector t n
bundle :: [Field PolyType] -> PolyType
bundle = UTerm . Bundle
-- poly :: Type -> PolyType
-- poly (Fix (Ground t)) = ground t
-- poly (Fix (Vector t n)) = vector (poly t) n
-- poly (Fix (Bundle fs)) = bundle ((\f -> f { _fieldType = poly $ _fieldType f }) <$> fs)
poly :: Type -> PolyType
poly = cata alg
where alg (Ground t) = ground t
alg (Vector t n) = vector t n
alg (Bundle fs) = bundle fs
| quivade/screwdriver | src/Language/FIRRTL/Types.hs | gpl-3.0 | 2,132 | 0 | 9 | 510 | 486 | 288 | 198 | 48 | 3 |
module Amoeba.Middleware.Tracing.Log
( setupLogger
, finish
, info
, warning
, debug
, error
, notice ) where
import System.Log.Logger
import System.Log.Handler.Simple
import System.Log.Handler (setFormatter)
import System.Log.Formatter
import Prelude hiding (error)
logFormat = "$utcTime $prio $loggername: $msg"
loggerName = "Amoeba.Application"
defaultFormatter = simpleLogFormatter logFormat
setupLogger logFileRealName = do
handler <- fileHandler logFileRealName INFO
let handler' = setFormatter handler defaultFormatter
updateGlobalLogger loggerName $ addHandler handler'
updateGlobalLogger loggerName $ setLevel INFO
finish = removeAllHandlers
info = infoM loggerName
warning = warningM loggerName
debug = debugM loggerName
error = errorM loggerName
notice = noticeM loggerName
| graninas/The-Amoeba-World | src/Amoeba/Middleware/Tracing/Log.hs | gpl-3.0 | 839 | 0 | 10 | 146 | 193 | 105 | 88 | 27 | 1 |
module EmailParser.Lib
--(
-- makeEmailsList
-- ,parseEmails
--)
where
-- currently exporting everything while testing in ghci
-- ======================================================================
import Text.Regex.Posix
import System.Directory
import Data.Maybe -- needed for MaybeMap
import qualified Data.Map as M
-- ____ _
-- | _ \ __ _| |_ __ _
-- | | | |/ _` | __/ _` |
-- | |_| | (_| | || (_| |
-- |____/ \__,_|\__\__,_|
-- _____
-- |_ _| _ _ __ ___ ___
-- | || | | | '_ \ / _ \/ __|
-- | || |_| | |_) | __/\__ \
-- |_| \__, | .__/ \___||___/
-- |___/|_|
type Email = String
-- basic data structure
data MailData = MailData {
fromEmail :: Email
,subject :: String
,spamScore :: Float
,spamFlag :: Bool
} deriving (Show)
-- __ __ _ _ __ _ _
-- | \/ | __ _(_) | / _|_ _ _ __ ___| |_(_) ___ _ __ ___
-- | |\/| |/ _` | | | | |_| | | | '_ \ / __| __| |/ _ \| '_ \/ __|
-- | | | | (_| | | | | _| |_| | | | | (__| |_| | (_) | | | \__ \
-- |_| |_|\__,_|_|_| |_| \__,_|_| |_|\___|\__|_|\___/|_| |_|___/
-- this should open the file and extract the data we need
parseEmailContent :: String -> MailData
parseEmailContent filecontent = MailData {
fromEmail = searchFrom filecontent
,subject = searchSubject filecontent
,spamScore = searchSpamScore filecontent
,spamFlag = searchSpamFlag filecontent
}
searchFrom :: String -> String
searchFrom content = head(content =~ "^From:.*<(.*)>" :: [[String]]) !! 1
searchSubject :: String -> String
searchSubject content = head (content =~ "^Subject: (.*)" :: [[String]]) !! 1
-- TODO deal with encoded subjects
searchSpamScore :: String -> Float
searchSpamScore content = read (head (content =~ "score=([0-9.]+)" :: [[String]]) !! 1) :: Float
searchSpamFlag :: String -> Bool
searchSpamFlag content = content =~ "^X-Spam-Flag: YES" :: Bool
-- _ _ _ _
-- ___ __ _| | ___ _ _| | __ _| |_(_) ___ _ __ ___
-- / __/ _` | |/ __| | | | |/ _` | __| |/ _ \| '_ \/ __|
-- | (_| (_| | | (__| |_| | | (_| | |_| | (_) | | | \__ \
-- \___\__,_|_|\___|\__,_|_|\__,_|\__|_|\___/|_| |_|___/
--
avgSpamRate :: [MailData] -> Float
avgSpamRate list_ = sum list / elems
where
list = [(spamScore x)|x <- list_]
elems = fromIntegral (length list_) -- to return a Float
-- simple test in stack ghci:
-- > test <- run
-- > avgSpamRate test
-- run is a simple shortcut to launch main function with directory to parse as parameter
-- I nedd to compare the tails of strings... >>>
-- compare from head, to compare from tail, strings will have to bve reversed first
-- will return the number of characters that matches from head
-- no better idea for now
commonPart :: String -> String -> Int -> Int
commonPart (m1:m1s) (m2:m2s) counter
| m1 == m2 = 1 + commonPart m1s m2s counter
| otherwise = counter
-- compares 2 strings (emails) and returns the common part if any
compareEmailTail :: Email -> Email -> Maybe String
compareEmailTail email1 email2
| common > 0 = Just $ reverse $ take common remail1
| otherwise = Nothing
where
remail1 = reverse email1
remail2 = reverse email2
common = commonPart remail1 remail2 0
-- <<<
compareOneToList :: Email -> [Email] -> [String]
compareOneToList email list = mapMaybe (compareEmailTail email) list
compareAllList :: [Email] -> [String]
compareAllList [x] = [] -- if one value is left, we have nothing to compare
compareAllList (email:list) = compareOneToList email list ++ compareAllList list
-- this is not a perfect calculation yet, but it'll do for now
-- FIXME do a better algorithm
groupUniques :: [String] -> M.Map String Int
groupUniques [] = M.empty
groupUniques (x:xs) = M.insertWith (\ new old -> new + old) x 1 map
where map = groupUniques xs
-- easy way to test this:
-- list <- test
-- groupUniques . compareAllList $ list
-- testing function
test = do
emailsList <- makeEmailsList "emails"
emailsData <- parseEmails emailsList
return [fromEmail x | x <- emailsData]
-- _ _ _ _
-- | | (_)___| |_(_)_ __ __ _
-- | | | / __| __| | '_ \ / _` |
-- | |___| \__ \ |_| | | | | (_| |
-- |_____|_|___/\__|_|_| |_|\__, |
-- |___/
-- __ _ _
-- / _|_ _ _ __ ___| |_(_) ___ _ __ ___
-- | |_| | | | '_ \ / __| __| |/ _ \| '_ \/ __|
-- | _| |_| | | | | (__| |_| | (_) | | | \__ \
-- |_| \__,_|_| |_|\___|\__|_|\___/|_| |_|___/
-- first, simple array of the results of all mails we analysed
makeEmailsList :: FilePath -> IO [FilePath]
makeEmailsList dir = do
list <- listDirectory dir
return [dir ++ "/" ++ file | file <- list]
-- takes a list of emails and returns a list of corresponding MailData
parseEmails :: [FilePath] -> IO [MailData]
parseEmails [] = return []
parseEmails (file:files) = do
filecontent <- readFile file
rest <- parseEmails files
return (parseEmailContent filecontent : rest)
| simonced/haskell-kata | email-parser-cabal/src/EmailParser/Lib.hs | gpl-3.0 | 5,274 | 0 | 11 | 1,503 | 936 | 518 | 418 | 64 | 1 |
module TestIntegration
( integrationTests
) where
import Control.Concurrent (forkIO)
import Control.Monad.Except
import Control.Monad.IO.Class (liftIO)
import Crypto.Hash (SHA512(SHA512))
import Crypto.PubKey.RSA (generate)
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Char8 as L
import Data.Either (isRight)
import Data.IORef
import Data.Maybe (fromJust, fromMaybe)
import Data.PEM (pemWriteBS)
import Data.X509 (AltName(AltNameDNS), ExtSubjectAltName(ExtSubjectAltName))
import Data.X509.PKCS10
( KeyPair(KeyPairRSA)
, PKCS9Attribute(PKCS9Attribute)
, PKCS9Attributes(PKCS9Attributes)
, X520Attribute(..)
, generateCSR
, makeX520Attributes
, toDER
, toPEM
)
import Network.Connection (TLSSettings(TLSSettingsSimple))
import Network.HTTP.Client
( Manager
, managerResponseTimeout
, newManager
, responseTimeoutMicro
)
import Network.HTTP.Client.TLS (mkManagerSettings)
import Network.HTTP.Types
import Network.Wai (rawPathInfo, requestHeaders, responseLBS)
import Network.Wai.Handler.Warp
import System.IO
import System.Process
import Test.Tasty
import Test.Tasty.HUnit
import Crypto.JOSE.Types (Base64Octets(..))
import Network.ACME
import Network.ACME.JWS
import Network.ACME.Object
import Network.ACME.Type
integrationTests :: TestTree
integrationTests =
pebbleResource $ testGroup "Network.ACME" [testNewAccount, testOrderNew]
testNewAccount :: TestTree
testNewAccount =
testCase "Account operations" $ do
(acc, jwk) <- acmeNewObjAccountStub "[email protected]"
state <- myState jwk putStrLn
_ <-
flip evalCragT state $ do
(url1, _) <- acmePerformCreateAccount acc
url2 <- acmePerformFindAccountURL
return (url1 @?= url2)
return ()
testOrderNew :: TestTree
testOrderNew =
testCase "testOrderNew" $ do
(accStub, jwk) <- acmeNewObjAccountStub "[email protected]"
httpServerLiveConf <- newIORef []
myHttpServer httpServerLiveConf
state <- myState jwk putStrLn
res <-
flip evalCragT state $ do
let domains = ["localhost"] --, "ip6-localhost", "ip6-loopback"]
csr <- liftIO $ newCrt domains
let cert = Base64Octets csr
_ <- acmePerformCreateAccount accStub
crt <-
obtainCertificate domains cert (challengeReactions httpServerLiveConf)
liftIO $ putStrLn ("CRT: " <> concat crt)
return ()
print res
isRight res @?= True
myState :: JWK -> (String -> IO ()) -> IO (CragReader, CragState)
myState jwk step = do
manager <- newUnsafeTestManager
let logger x = step ("[cragLog] " <> x)
(Right state) <-
runExceptT $ acmePerformRunner' manager (Just logger) (config jwk)
return state
challengeReactions :: HTTPServerLiveConf -> [(String, ChallengeReaction)]
challengeReactions httpServerLiveConf =
[ ( http01
, ChallengeReaction
{ fulfill =
\identifier keyAuthz -> do
addResponse
(requestId identifier keyAuthz, keyAuthorization keyAuthz)
httpServerLiveConf
putStrLn "[Challenge Reaction] Fulfilled challenge"
, rollback =
\identifier keyAuthz -> do
removeResponse (requestId identifier keyAuthz) httpServerLiveConf
putStrLn "[Challenge Reaction] Removed challenge response"
})
]
where
requestId identifier keyAuthz =
(acmeObjIdentifierValue identifier, keyAuthorizationHttpPath keyAuthz)
pebbleResource :: TestTree -> TestTree
pebbleResource = withResource pebbleProcess terminateProcess . const
where
pebbleProcess = do
stdOut <- openFile "pebble.log" WriteMode
let pr =
(proc "gopath/bin/pebble" ["-strict"]) {std_out = UseHandle stdOut}
(_, _, _, pid) <- createProcess_ "spawnProcess" pr
return pid
type HTTPServerLiveConf = IORef [((String, String), String)]
addResponse :: ((String, String), String) -> HTTPServerLiveConf -> IO ()
addResponse x httpServerLiveConf = modifyIORef httpServerLiveConf (x :)
removeResponse :: (String, String) -> HTTPServerLiveConf -> IO ()
removeResponse k httpServerLiveConf =
modifyIORef httpServerLiveConf (filter ((/= k) . fst))
myHttpServer :: HTTPServerLiveConf -> IO ()
myHttpServer v = do
void $ forkIO $ runServer "127.0.0.1"
void $ forkIO $ runServer "::1"
where
runServer host =
runSettings (setHost host $ setPort 5002 defaultSettings) app
app req respond = do
resp <- readIORef v
let host =
takeWhile (/= ':') $
B.unpack $ fromJust $ lookup "Host" $ requestHeaders req
path = B.unpack $ rawPathInfo req
case lookup (host, path) resp of
Nothing -> error $ show (host, path) ++ " not found in " ++ show resp
Just r -> do
putStrLn $
"[HTTP-Server] Responding to request: " ++ show (host, path)
respond $ responseLBS status200 [] (L.pack r)
newUnsafeTestManager :: IO Manager
newUnsafeTestManager =
newManager
(mkManagerSettings (TLSSettingsSimple True False False) Nothing)
{managerResponseTimeout = responseTimeoutMicro 3000000}
newCrt :: [String] -> IO B.ByteString
newCrt domains = do
let rsaKeySize = 256 :: Int
let publicExponent = 0x10001 :: Integer
(pubKey, privKey) <- generate rsaKeySize publicExponent
let subjectAttrs =
makeX520Attributes
[(X520CommonName, head domains), (X520OrganizationName, head domains)]
Right req <-
generateCSR
subjectAttrs
(PKCS9Attributes
[PKCS9Attribute . ExtSubjectAltName $ map AltNameDNS domains])
(KeyPairRSA pubKey privKey)
SHA512
putStrLn $ B.unpack $ pemWriteBS $ toPEM req
return $ toDER req
confUrl :: URL
confUrl =
fromMaybe (error "invalid conf url") $ parseURL "https://localhost:14000/dir"
config :: JWK -> CragConfig
config jwk =
(newCragConfig confUrl jwk)
{cragConfigPollingInterval = 2, cragConfigRateLimitRetryAfter = 2}
| hemio-ev/libghc-acme | test/TestIntegration.hs | lgpl-3.0 | 5,972 | 0 | 18 | 1,273 | 1,675 | 878 | 797 | 159 | 2 |
-- Copyright © 2014 Garrison Jensen
-- License
-- This code and text are dedicated to the public domain.
-- You can copy, modify, distribute and perform the work,
-- even for commercial purposes, all without asking permission.
-- For more information, please refer to <http://unlicense.org/>
-- or the accompanying LICENSE file.
--
import LambdaCalc
import Parser
main :: IO ()
main = do
l <- lambdaParser
print $ eval l
>> main
| GarrisonJ/LambLamb | main.hs | unlicense | 454 | 0 | 9 | 97 | 51 | 29 | 22 | 7 | 1 |
-------------------------------------------------
-- |
-- Module : Crypto.Noise.HandshakePatterns
-- Maintainer : John Galt <[email protected]>
-- Stability : experimental
-- Portability : POSIX
--
-- This module contains all of the handshake patterns specified in sections
-- 7.2, 7.3, and 9.4.
module Crypto.Noise.HandshakePatterns
( -- * Standard patterns
noiseNN
, noiseKN
, noiseNK
, noiseKK
, noiseNX
, noiseKX
, noiseXN
, noiseIN
, noiseXK
, noiseIK
, noiseXX
, noiseIX
, noiseN
, noiseK
, noiseX
-- * PSK patterns
, noiseNNpsk0
, noiseNNpsk2
, noiseNKpsk0
, noiseNKpsk2
, noiseNXpsk2
, noiseXNpsk3
, noiseXKpsk3
, noiseXXpsk3
, noiseKNpsk0
, noiseKNpsk2
, noiseKKpsk0
, noiseKKpsk2
, noiseKXpsk2
, noiseINpsk1
, noiseINpsk2
, noiseIKpsk1
, noiseIKpsk2
, noiseIXpsk2
, noiseNpsk0
, noiseKpsk0
, noiseXpsk1
-- * Deferred patterns
, noiseNK1
, noiseNX1
, noiseX1N
, noiseX1K
, noiseXK1
, noiseX1K1
, noiseX1X
, noiseXX1
, noiseX1X1
, noiseK1N
, noiseK1K
, noiseKK1
, noiseK1K1
, noiseK1X
, noiseKX1
, noiseK1X1
, noiseI1N
, noiseI1K
, noiseIK1
, noiseI1K1
, noiseI1X
, noiseIX1
, noiseI1X1
) where
import Crypto.Noise.Internal.Handshake.Pattern
-- | @Noise_NN():
-- -> e
-- <- e, ee@
noiseNN :: HandshakePattern
noiseNN = handshakePattern "NN" $
initiator e *>
responder (e *> ee)
-- | @Noise_KN(s):
-- -> s
-- ...
-- -> e
-- <- e, ee, se@
noiseKN :: HandshakePattern
noiseKN = handshakePattern "KN" $
preInitiator s *>
initiator e *>
responder (e *> ee *> se)
-- | @Noise_NK(rs):
-- <- s
-- ...
-- -> e, es
-- <- e, ee@
noiseNK :: HandshakePattern
noiseNK = handshakePattern "NK" $
preResponder s *>
initiator (e *> es) *>
responder (e *> ee)
-- | @Noise_KK(s, rs):
-- -> s
-- <- s
-- ...
-- -> e, es, ss
-- <- e, ee, se@
noiseKK :: HandshakePattern
noiseKK = handshakePattern "KK" $
preInitiator s *>
preResponder s *>
initiator (e *> es *> ss) *>
responder (e *> ee *> se)
-- | @Noise_NX(rs):
-- -> e
-- <- e, ee, s, es@
noiseNX :: HandshakePattern
noiseNX = handshakePattern "NX" $
initiator e *>
responder (e *> ee *> s *> es)
-- | @Noise_KX(s, rs):
-- -> s
-- ...
-- -> e
-- <- e, ee, se, s, es@
noiseKX :: HandshakePattern
noiseKX = handshakePattern "KX" $
preInitiator s *>
initiator e *>
responder (e *> ee *> se *> s *> es)
-- | @Noise_XN(s):
-- -> e
-- <- e, ee
-- -> s, se@
noiseXN :: HandshakePattern
noiseXN = handshakePattern "XN" $
initiator e *>
responder (e *> ee) *>
initiator (s *> se)
-- | @Noise_IN(s):
-- -> e, s
-- <- e, ee, se@
noiseIN :: HandshakePattern
noiseIN = handshakePattern "IN" $
initiator (e *> s) *>
responder (e *> ee *> se)
-- | @Noise_XK(s, rs):
-- <- s
-- ...
-- -> e, es
-- <- e, ee
-- -> s, se@
noiseXK :: HandshakePattern
noiseXK = handshakePattern "XK" $
preResponder s *>
initiator (e *> es) *>
responder (e *> ee) *>
initiator (s *> se)
-- | @Noise_IK(s, rs):
-- <- s
-- ...
-- -> e, es, s, ss
-- <- e, ee, se@
noiseIK :: HandshakePattern
noiseIK = handshakePattern "IK" $
preResponder s *>
initiator (e *> es *> s *> ss) *>
responder (e *> ee *> se)
-- | @Noise_XX(s, rs):
-- -> e
-- <- e, ee, s, es
-- -> s, se@
noiseXX :: HandshakePattern
noiseXX = handshakePattern "XX" $
initiator e *>
responder (e *> ee *> s *> es) *>
initiator (s *> se)
-- | @Noise_IX(s, rs):
-- -> e, s
-- <- e, ee, se, s, es@
noiseIX :: HandshakePattern
noiseIX = handshakePattern "IX" $
initiator (e *> s) *>
responder (e *> ee *> se *> s *> es)
-- | @Noise_N(rs):
-- <- s
-- ...
-- -> e, es@
noiseN :: HandshakePattern
noiseN = handshakePattern "N" $
preResponder s *>
initiator (e *> es)
-- | @Noise_K(s, rs):
-- -> s
-- <- s
-- ...
-- -> e, es, ss@
noiseK :: HandshakePattern
noiseK = handshakePattern "K" $
preInitiator s *>
preResponder s *>
initiator (e *> es *> ss)
-- | @Noise_X(s, rs):
-- <- s
-- ...
-- -> e, es, s, ss@
noiseX :: HandshakePattern
noiseX = handshakePattern "X" $
preResponder s *>
initiator (e *> es *> s *> ss)
-- | @Noise_NNpsk0():
-- -> psk, e
-- <- e, ee@
noiseNNpsk0 :: HandshakePattern
noiseNNpsk0 = handshakePattern "NNpsk0" $
initiator (psk *> e) *>
responder (e *> ee)
-- | @Noise_NNpsk2():
-- -> e
-- <- e, ee, psk@
noiseNNpsk2 :: HandshakePattern
noiseNNpsk2 = handshakePattern "NNpsk2" $
initiator e *>
responder (e *> ee *> psk)
-- | @Noise_NKpsk0(rs):
-- <- s
-- ...
-- -> psk, e, es
-- <- e, ee@
noiseNKpsk0 :: HandshakePattern
noiseNKpsk0 = handshakePattern "NKpsk0" $
preResponder s *>
initiator (psk *> e *> es) *>
responder (e *> ee)
-- | @Noise_NKpsk2(rs):
-- <- s
-- ...
-- -> e, es
-- <- e, ee, psk@
noiseNKpsk2 :: HandshakePattern
noiseNKpsk2 = handshakePattern "NKpsk2" $
preResponder s *>
initiator (e *> es) *>
responder (e *> ee *> psk)
-- | @Noise_NXpsk2(rs):
-- -> e
-- <- e, ee, s, es, psk@
noiseNXpsk2 :: HandshakePattern
noiseNXpsk2 = handshakePattern "NXpsk2" $
initiator e *>
responder (e *> ee *> s *> es *> psk)
-- | @Noise_XNpsk3(s):
-- -> e
-- <- e, ee
-- -> s, se, psk@
noiseXNpsk3 :: HandshakePattern
noiseXNpsk3 = handshakePattern "XNpsk3" $
initiator e *>
responder (e *> ee) *>
initiator (s *> se *> psk)
-- | @Noise_XKpsk3(s, rs):
-- <- s
-- ...
-- -> e, es
-- <- e, ee
-- -> s, se, psk@
noiseXKpsk3 :: HandshakePattern
noiseXKpsk3 = handshakePattern "XKpsk3" $
preResponder s *>
initiator (e *> es) *>
responder (e *> ee) *>
initiator (s *> se *> psk)
-- | @Noise_XXpsk3(s, rs):
-- -> e
-- <- e, ee, s, es
-- -> s, se, psk@
noiseXXpsk3 :: HandshakePattern
noiseXXpsk3 = handshakePattern "XXpsk3" $
initiator e *>
responder (e *> ee *> s *> es) *>
initiator (s *> se *> psk)
-- | @Noise_KNpsk0(s):
-- -> s
-- ...
-- -> psk, e
-- <- e, ee, se@
noiseKNpsk0 :: HandshakePattern
noiseKNpsk0 = handshakePattern "KNpsk0" $
preInitiator s *>
initiator (psk *> e) *>
responder (e *> ee *> se)
-- | @Noise_KNpsk2(s):
-- -> s
-- ...
-- -> e
-- <- e, ee, se, psk@
noiseKNpsk2 :: HandshakePattern
noiseKNpsk2 = handshakePattern "KNpsk2" $
preInitiator s *>
initiator e *>
responder (e *> ee *> se *> psk)
-- | @Noise_KKpsk0(s, rs):
-- -> s
-- <- s
-- ...
-- -> psk, e, es, ss
-- <- e, ee, se@
noiseKKpsk0 :: HandshakePattern
noiseKKpsk0 = handshakePattern "KKpsk0" $
preInitiator s *>
preResponder s *>
initiator (psk *> e *> es *> ss) *>
responder (e *> ee *> se)
-- | @Noise_KKpsk2(s, rs):
-- -> s
-- <- s
-- ...
-- -> e, es, ss
-- <- e, ee, se, psk@
noiseKKpsk2 :: HandshakePattern
noiseKKpsk2 = handshakePattern "KKpsk2" $
preInitiator s *>
preResponder s *>
initiator (e *> es *> ss) *>
responder (e *> ee *> se *> psk)
-- | @Noise_KXpsk2(s, rs):
-- -> s
-- ...
-- -> e
-- <- e, ee, se, s, es, psk@
noiseKXpsk2 :: HandshakePattern
noiseKXpsk2 = handshakePattern "KXpsk2" $
preInitiator s *>
initiator e *>
responder (e *> ee *> se *> s *> es *> psk)
-- | @Noise_INpsk1(s):
-- -> e, s, psk
-- <- e, ee, se@
noiseINpsk1 :: HandshakePattern
noiseINpsk1 = handshakePattern "INpsk1" $
initiator (e *> s *> psk) *>
responder (e *> ee *> se)
-- | @Noise_INpsk2(s):
-- -> e, s
-- <- e, ee, se, psk@
noiseINpsk2 :: HandshakePattern
noiseINpsk2 = handshakePattern "INpsk2" $
initiator (e *> s) *>
responder (e *> ee *> se *> psk)
-- | @Noise_IKpsk1(s, rs):
-- <- s
-- ...
-- -> e, es, s, ss, psk
-- <- e, ee, se@
noiseIKpsk1 :: HandshakePattern
noiseIKpsk1 = handshakePattern "IKpsk1" $
preResponder s *>
initiator (e *> es *> s *> ss *> psk) *>
responder (e *> ee *> se)
-- | @Noise_IKpsk2(s, rs):
-- <- s
-- ...
-- -> e, es, s, ss
-- <- e, ee, se, psk@
noiseIKpsk2 :: HandshakePattern
noiseIKpsk2 = handshakePattern "IKpsk2" $
preResponder s *>
initiator (e *> es *> s *> ss) *>
responder (e *> ee *> se *> psk)
-- | @Noise_IXpsk2(s, rs):
-- -> e, s
-- <- e, ee, se, s, es, psk@
noiseIXpsk2 :: HandshakePattern
noiseIXpsk2 = handshakePattern "IXpsk2" $
initiator (e *> s) *>
responder (e *> ee *> se *> s *> es *> psk)
-- | @Noise_Npsk0(rs):
-- <- s
-- ...
-- -> psk, e, es@
noiseNpsk0 :: HandshakePattern
noiseNpsk0 = handshakePattern "Npsk0" $
preResponder s *>
initiator (psk *> e *> es)
-- | @Noise_Kpsk0(s, rs):
-- <- s
-- ...
-- -> psk, e, es, ss@
noiseKpsk0 :: HandshakePattern
noiseKpsk0 = handshakePattern "Kpsk0" $
preInitiator s *>
preResponder s *>
initiator (psk *> e *> es *> ss)
-- | @Noise_Xpsk1(s, rs):
-- <- s
-- ...
-- -> e, es, s, ss, psk@
noiseXpsk1 :: HandshakePattern
noiseXpsk1 = handshakePattern "Xpsk1" $
preResponder s *>
initiator (e *> es *> s *> ss *> psk)
-- | @Noise_NK1:
-- <- s
-- ...
-- -> e
-- <- e, ee, es
noiseNK1 :: HandshakePattern
noiseNK1 = handshakePattern "NK1" $
preResponder s *>
initiator e *>
responder (e *> ee *> es)
-- | @Noise_NX1:
-- -> e
-- <- e, ee, s
-- -> es
noiseNX1 :: HandshakePattern
noiseNX1 = handshakePattern "NX1" $
initiator e *>
responder (e *> ee *> s) *>
initiator es
-- | @Noise_X1N:
-- -> e
-- <- e, ee
-- -> s
-- <- se
noiseX1N :: HandshakePattern
noiseX1N = handshakePattern "X1N" $
initiator e *>
responder (e *> ee) *>
initiator s *>
responder se
-- | @Noise_X1K:
-- <- s
-- ...
-- -> e, es
-- <- e, ee
-- -> s
-- <- se
noiseX1K :: HandshakePattern
noiseX1K = handshakePattern "X1K" $
preResponder s *>
initiator (e *> es) *>
responder (e *> ee) *>
initiator s *>
responder se
-- | @Noise_XK1:
-- <- s
-- ...
-- -> e
-- <- e, ee, es
-- -> s, se
noiseXK1 :: HandshakePattern
noiseXK1 = handshakePattern "XK1" $
preResponder s *>
initiator e *>
responder (e *> ee *> es) *>
initiator (s *> se)
-- | @Noise_X1K1:
-- <- s
-- ...
-- -> e
-- <- e, ee, es
-- -> s
-- <- se
noiseX1K1 :: HandshakePattern
noiseX1K1 = handshakePattern "X1K1" $
preResponder s *>
initiator e *>
responder (e *> ee *> es) *>
initiator s *>
responder se
-- | @Noise_X1X
-- -> e
-- <- e, ee, s, es
-- -> s
-- <- se
noiseX1X :: HandshakePattern
noiseX1X = handshakePattern "X1X" $
initiator e *>
responder (e *> ee *> s *> es) *>
initiator s *>
responder se
-- | @Noise_XX1:
-- -> e
-- <- e, ee, s
-- -> es, s, se
noiseXX1 :: HandshakePattern
noiseXX1 = handshakePattern "XX1" $
initiator e *>
responder (e *> ee *> s) *>
initiator (es *> s *> se)
-- | @Noise_X1X1:
-- -> e
-- <- e, ee, s
-- -> es, s
-- <- se
noiseX1X1 :: HandshakePattern
noiseX1X1 = handshakePattern "X1X1" $
initiator e *>
responder (e *> ee *> s) *>
initiator (es *> s) *>
responder se
-- | @Noise_K1N:
-- -> s
-- ...
-- -> e
-- <- e, ee
-- -> se
noiseK1N :: HandshakePattern
noiseK1N = handshakePattern "K1N" $
preInitiator s *>
initiator e *>
responder (e *> ee) *>
initiator se
-- | @Noise_K1K:
-- -> s
-- <- s
-- ...
-- -> e, es
-- <- e, ee
-- -> se
noiseK1K :: HandshakePattern
noiseK1K = handshakePattern "K1K" $
preInitiator s *>
preResponder s *>
initiator (e *> es) *>
responder (e *> ee) *>
initiator se
-- | @Noise_KK1:
-- -> s
-- <- s
-- ...
-- -> e
-- <- e, ee, se, es
noiseKK1 :: HandshakePattern
noiseKK1 = handshakePattern "KK1" $
preInitiator s *>
preResponder s *>
initiator e *>
responder (e *> ee *> se *> es)
-- | @Noise_K1K1:
-- -> s
-- <- s
-- ...
-- -> e
-- <- e, ee, es
-- -> se
noiseK1K1 :: HandshakePattern
noiseK1K1 = handshakePattern "K1K1" $
preInitiator s *>
preResponder s *>
initiator e *>
responder (e *> ee *> es) *>
initiator se
-- | @Noise_K1X
-- -> s
-- ...
-- -> e
-- <- e, ee, s, es
-- -> se
noiseK1X :: HandshakePattern
noiseK1X = handshakePattern "K1X" $
preInitiator s *>
initiator e *>
responder (e *> ee *> s *> es) *>
initiator se
-- | @Noise_KX1
-- -> s
-- ...
-- -> e
-- <- e, ee, se, s
-- -> es
noiseKX1 :: HandshakePattern
noiseKX1 = handshakePattern "KX1" $
preInitiator s *>
initiator e *>
responder (e *> ee *> se *> s) *>
initiator es
-- | @Noise_K1X1:
-- -> s
-- ...
-- -> e
-- <- e, ee, s
-- -> se, es
noiseK1X1 :: HandshakePattern
noiseK1X1 = handshakePattern "K1X1" $
preInitiator s *>
initiator e *>
responder (e *> ee *> s) *>
initiator (se *> es)
-- | @Noise_I1N:
-- -> e, s
-- <- e, ee
-- -> se
noiseI1N :: HandshakePattern
noiseI1N = handshakePattern "I1N" $
initiator (e *> s) *>
responder (e *> ee) *>
initiator se
-- | @Noise_I1K:
-- <- s
-- ...
-- -> e, es, s
-- <- e, ee
-- -> se
noiseI1K :: HandshakePattern
noiseI1K = handshakePattern "I1K" $
preResponder s *>
initiator (e *> es *> s) *>
responder (e *> ee) *>
initiator se
-- | @Noise_IK1:
-- <- s
-- ...
-- -> e, s
-- <- e, ee, se, es
noiseIK1 :: HandshakePattern
noiseIK1 = handshakePattern "IK1" $
preResponder s *>
initiator (e *> s) *>
responder (e *> ee *> se *> es)
-- | @Noise_I1K1:
-- <- s
-- ...
-- -> e, s
-- <- e, ee, es
-- -> se
noiseI1K1 :: HandshakePattern
noiseI1K1 = handshakePattern "I1K1" $
preResponder s *>
initiator (e *> s) *>
responder (e *> ee *> es) *>
initiator se
-- | @Noise_I1X:
-- -> e, s
-- <- e, ee, s, es
-- -> se
noiseI1X :: HandshakePattern
noiseI1X = handshakePattern "I1X" $
initiator (e *> s) *>
responder (e *> ee *> s *> es) *>
initiator se
-- | @Noise_IX1:
-- -> e, s
-- <- e, ee, se, s
-- -> es
noiseIX1 :: HandshakePattern
noiseIX1 = handshakePattern "IX1" $
initiator (e *> s) *>
responder (e *> ee *> se *> s) *>
initiator es
-- | @Noise_I1X1:
-- -> e, s
-- <- e, ee, s
-- -> se, es
noiseI1X1 :: HandshakePattern
noiseI1X1 = handshakePattern "I1X1" $
initiator (e *> s) *>
responder (e *> ee *> s) *>
initiator (se *> es)
| centromere/cacophony | src/Crypto/Noise/HandshakePatterns.hs | unlicense | 13,865 | 0 | 12 | 3,414 | 3,610 | 1,969 | 1,641 | 367 | 1 |
module Testsuite.Dipole72 where
-- standard modules
import qualified Data.Map as Map
import qualified Data.Set as Set
-- local modules
import Basics
import Calculus.Dipole72
-- ----------------------------------------------------------------------------
-- -- Dipole 72 networks
forwardCircle :: Int -> Network [String] (Set.Set Dipole72)
forwardCircle n
| n < 2 = eNetwork
| otherwise = Network
{ nDesc = show n ++ " nodes," ++
" forming a loop of consecutively connected, collinear dipoles,\
\ each in front of its predecessor."
, nCalc = "Dipole-72"
, nNumOfNodes = Just n
, nCons = Map.fromList $ ([show $ n - 1, "0"], Set.singleton EFBS72) : map
(\k -> ([show k, show $ k + 1], Set.singleton EFBS72)
) [0..n - 2]
}
circleWithTwoCollinearDipolesInside :: Int
-> Network [String] (Set.Set Dipole72)
circleWithTwoCollinearDipolesInside n
| n < 5 = eNetwork
| otherwise = Network
{ nDesc = show n ++ " nodes," ++
" forming a circle with two collinear, dipoles inside, one in\
\ front of the other, inside and outside of the circle at the\
\ same time."
, nCalc = "Dipole-72"
, nNumOfNodes = Nothing
, nCons = Map.fromList $
[ ( [show 0 , show $ n - 2] , Set.singleton LLLR72 )
, ( [show $ div (n - 2) 2, show $ n - 1] , Set.singleton LLRL72 )
, ( [show $ n - 2 , show $ n - 1] , Set.singleton EFBS72 )
, ( [show $ n - 3 , show 0 ] , Set.singleton ELLS72 )
-- make inconsistent:
, ( [show $ div ((n - 2) * 3) 4, show $ n - 1] , Set.singleton RRLL72 )
]
++ map (\k -> ( [show k, show $ n - 3] , Set.singleton LLLL72 )) [1..n - 5]
++ foldl
(\acc k ->
( ( [show k, show $ k + 1] , Set.singleton ELLS72 ) : map
(\l -> ( [show k, show l] , Set.singleton LLLL72 )
) [k + 2..n-4]
) ++ acc
) [] [0..n-4]
}
| spatial-reasoning/zeno | src/Testsuite/Dipole72.hs | bsd-2-clause | 2,209 | 0 | 20 | 844 | 674 | 369 | 305 | 39 | 1 |
{-# LANGUAGE PackageImports #-}
import "devsite" Application (getApplicationDev)
import Network.Wai.Handler.Warp
(runSettings, defaultSettings, settingsPort)
import Control.Concurrent (forkIO)
import System.Directory (doesFileExist, removeFile)
import System.Exit (exitSuccess)
import Control.Concurrent (threadDelay)
main :: IO ()
main = do
putStrLn "Starting devel application"
(port, app) <- getApplicationDev
forkIO $ runSettings defaultSettings
{ settingsPort = port
} app
loop
loop :: IO ()
loop = do
threadDelay 100000
e <- doesFileExist "yesod-devel/devel-terminate"
if e then terminateDevel else loop
terminateDevel :: IO ()
terminateDevel = exitSuccess
| pbrisbin/devsite | devel.hs | bsd-2-clause | 708 | 0 | 10 | 123 | 186 | 101 | 85 | 23 | 2 |
{-# LANGUAGE LambdaCase #-}
import Graphics.UI.GLFW.Pal
import Graphics.GL
main :: IO ()
main = withWindow "GLFW Pal" 640 480 $ \(win, eventsChan) -> do
glClearColor 1 1 0 1
whileWindow win $ do
events <- gatherEvents
forM_ events (closeOnEscape win)
glClear GL_COLOR_BUFFER_BIT
swapBuffers win | lukexi/glfw-pal | test/test.hs | bsd-2-clause | 342 | 0 | 14 | 91 | 102 | 51 | 51 | 11 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module BaseSpec where
import Web.Twitter.Conduit.Base
import Web.Twitter.Conduit.Response
import Control.Applicative
import Control.Lens
import Data.Aeson
import Data.Aeson.Lens
import Data.Conduit
import qualified Data.Conduit.Attoparsec as CA
import Data.Maybe
import qualified Data.Text as T
import qualified Network.HTTP.Types as HT
import Test.Hspec
spec :: Spec
spec = do
unit
unit :: Spec
unit = do
describe "checkResponse" $ do
describe "when the response has \"errors\" key" $ do
let errorMessage = fromJust . decode $ "{\"errors\":[{\"message\":\"Sorry, that page does not exist\",\"code\":34}]}"
response = Response HT.status404 [] errorMessage
result = checkResponse response
it "returns TwitterErrorResponse" $ do
case result of
Left res@(TwitterErrorResponse _ _ msgs) -> do
res `shouldBe` TwitterErrorResponse HT.status404 [] [TwitterErrorMessage 34 ""]
twitterErrorMessage (head msgs) `shouldBe` "Sorry, that page does not exist"
_ -> expectationFailure $ "Unexpected " ++ show result
describe "when the response does not has \"errors\" key but have error status code" $ do
let errorMessage = fromJust . decode $ "{}"
response = Response HT.status404 [] errorMessage
result = checkResponse response
it "returns TwitterStatusError" $ do
case result of
Left (TwitterStatusError st hdr body) -> do
st `shouldBe` HT.status404
hdr `shouldBe` []
body `shouldBe` errorMessage
_ -> expectationFailure $ "Unexpected " ++ show result
describe "sinkJSON" $ do
describe "when valid JSON input" $ do
let input = "{\"test\": \"input\", \"status\": 200 }"
it "can consume the input from Source and returns JSON Value" $ do
res <- yield input $$ sinkJSON
res ^. key "test" . _String `shouldBe` "input"
res ^? key "status" . _Integer `shouldBe` Just 200
describe "when invalid JSON input" $ do
let input = "{]"
it "should raise Data.Conduit.Attoparsec.ParseError" $ do
let parseErrorException (CA.ParseError {}) = True
parseErrorException _ = False
action = yield input $$ sinkJSON
action `shouldThrow` parseErrorException
describe "sinkFromJSON" $ do
describe "when valid JSON input" $ do
let input = "{\"test\": \"input\", \"status\": 200 }"
it "can consume the input from Source and returns a value which type is the specified one" $ do
res <- yield input $$ sinkFromJSON
res `shouldBe` TestJSON "input" 200
describe "when the JSON value does not have expected format" $ do
let input = "{\"status\": 200}"
it "should raise FromJSONError" $ do
let fromJSONException (FromJSONError {}) = True
fromJSONException _ = False
action :: IO TestJSON
action = yield input $$ sinkFromJSON
action `shouldThrow` fromJSONException
data TestJSON = TestJSON
{ testField :: T.Text
, testStatus :: Int
}
deriving (Show, Eq)
instance FromJSON TestJSON where
parseJSON (Object o) =
TestJSON <$> o .: "test"
<*> o .: "status"
parseJSON v = fail $ "Unexpected: " ++ show v
| himura/twitter-conduit | tests/BaseSpec.hs | bsd-2-clause | 3,690 | 0 | 24 | 1,229 | 820 | 408 | 412 | -1 | -1 |
{- |
Module : Data.Graph.Analysis.Algorithms.Common
Description : Algorithms for all graph types.
Copyright : (c) Ivan Lazar Miljenovic 2009
License : 2-Clause BSD
Maintainer : [email protected]
Defines algorithms that work on both undirected and
directed graphs.
-}
module Data.Graph.Analysis.Algorithms.Common
( -- * Graph decomposition
-- $connected
componentsOf,
pathTree,
-- * Clique Detection
-- $cliques
cliquesIn,
cliquesIn',
findRegular,
isRegular,
-- * Cycle Detection
-- $cycles
cyclesIn,
cyclesIn',
uniqueCycles,
uniqueCycles',
-- * Chain detection
-- $chains
chainsIn,
chainsIn'
) where
import Data.Graph.Analysis.Types
import Data.Graph.Analysis.Utils
import Data.Graph.Inductive.Graph
-- For linking purposes. This will throw a warning.
import Data.Graph.Inductive.Query.DFS(components)
import Data.List(unfoldr, foldl', foldl1', intersect, (\\), delete, tails, nub)
import Data.Maybe(isJust)
import Control.Arrow(first)
-- -----------------------------------------------------------------------------
{- $connected
Finding connected components.
Whilst the FGL library does indeed have a function 'components'
that returns the connected components of a graph, it returns each
component as a list of 'Node's. This implementation instead
returns each component as a /graph/, which is much more useful.
Connected components are found by choosing a random node, then
recursively extracting all neighbours of that node until no more
nodes can be removed.
Note that for directed graphs, these are known as the /weakly/
connected components.
-}
-- | Find all connected components of a graph.
componentsOf :: (DynGraph g) => g a b -> [g a b]
componentsOf = unfoldr splitComponent
-- | Find the next component and split it off from the graph.
splitComponent :: (DynGraph g) => g a b -> Maybe (g a b, g a b)
splitComponent g
| isEmpty g = Nothing
| otherwise = Just . -- Get the type right
first buildGr . -- Create the subgraph
extractNode . -- Extract components of subgraph
first Just . -- Getting the types right
matchAny $ g -- Choose an arbitrary node to begin with
-- | Extract the given node and all nodes it is transitively
-- connected to from the graph.
extractNode :: (DynGraph g) => Decomp g a b -> ([Context a b], g a b)
extractNode (Nothing,gr) = ([],gr)
extractNode (Just ctxt, gr)
| isEmpty gr = ([ctxt], empty)
| otherwise = first (ctxt:) $ foldl' nodeExtractor ([],gr) nbrs
where
nbrs = neighbors' ctxt
-- | Helper function for 'extractNode' above.
nodeExtractor :: (DynGraph g) => ([Context a b], g a b) -> Node
-> ([Context a b], g a b)
nodeExtractor cg@(cs,g) n
| gelem n g = first (++ cs) . extractNode $ match n g
| otherwise = cg
-- -----------------------------------------------------------------------------
-- | Find all possible paths from this given node, avoiding loops,
-- cycles, etc.
pathTree :: (DynGraph g) => Decomp g a b -> [NGroup]
pathTree (Nothing,_) = []
pathTree (Just ct,g)
| isEmpty g = []
| null sucs = [[n]]
| otherwise = (:) [n] . map (n:) . concatMap (subPathTree g') $ sucs
where
n = node' ct
sucs = suc' ct
-- Avoid infinite loops by not letting it continue any further
ct' = makeLeaf ct
g' = ct' & g
subPathTree gr n' = pathTree $ match n' gr
-- | Remove all outgoing edges
makeLeaf :: Context a b -> Context a b
makeLeaf (p,n,a,_) = (p', n, a, [])
where
-- Ensure there isn't an edge (n,n)
p' = filter (\(_,n') -> n' /= n) p
-- -----------------------------------------------------------------------------
{- $cliques
Clique detection routines. Find cliques by taking out a node, and
seeing which other nodes are all common neighbours (by both 'pre'
and 'suc').
-}
-- | Finds all cliques (i.e. maximal complete subgraphs) in the given graph.
cliquesIn :: (DynGraph g) => g a b -> [[LNode a]]
cliquesIn gr = map (addLabels gr) (cliquesIn' gr)
-- | Finds all cliques in the graph, without including labels.
cliquesIn' :: (DynGraph g) => g a b -> [NGroup]
cliquesIn' gr = filter (isClique gr') (findRegular gr')
where
gr' = mkSimple gr
-- | Determine if the given list of nodes is indeed a clique,
-- and not a smaller subgraph of a clique.
isClique :: (Graph g) => g a b -> NGroup -> Bool
isClique _ [] = False
isClique gr ns = null .
foldl1' intersect .
map ((\\ ns) . twoCycle gr) $ ns
-- | Find all regular subgraphs of the given graph.
findRegular :: (Graph g) => g a b -> [[Node]]
findRegular = concat . unfoldr findRegularOf
-- | Extract the next regular subgraph of a graph.
findRegularOf :: (Graph g) => g a b -> Maybe ([[Node]], g a b)
findRegularOf g
| isEmpty g = Nothing
| otherwise = Just .
first (regularOf g . node') .
matchAny $ g
-- | Returns all regular subgraphs that include the given node.
regularOf :: (Graph g) => g a b -> Node -> [[Node]]
regularOf gr n = map (n:) (alsoRegular gr crs)
where
crs = twoCycle gr n
-- | Recursively find all regular subgraphs only containing nodes
-- in the given list.
alsoRegular :: (Graph g) => g a b -> [Node] -> [[Node]]
alsoRegular _ [] = []
alsoRegular _ [n] = [[n]]
alsoRegular g (n:ns) = [n] : rs ++ alsoRegular g ns
where
rs = map (n:) (alsoRegular g $ intersect crn ns)
crn = twoCycle g n
-- | Return all nodes that are co-recursive with the given node
-- (i.e. for n, find all n' such that n->n' and n'->n).
twoCycle :: (Graph g) => g a b -> Node -> [Node]
twoCycle gr n = filter (elem n . suc gr) (delete n $ suc gr n)
-- | Determines if the list of nodes represents a regular subgraph.
isRegular :: (Graph g) => g a b -> NGroup -> Bool
isRegular g ns = all allTwoCycle split
where
-- Node + Rest of list
split = zip ns tns'
tns' = tail $ tails ns
allTwoCycle (n,rs) = null $ rs \\ twoCycle g n
-- -----------------------------------------------------------------------------
{- $cycles
Cycle detection. Find cycles by finding all paths from a given
node, and seeing if it reaches itself again.
-}
-- | Find all cycles in the given graph.
cyclesIn :: (DynGraph g) => g a b -> [LNGroup a]
cyclesIn g = map (addLabels g) (cyclesIn' g)
-- | Find all cycles in the given graph, returning just the nodes.
cyclesIn' :: (DynGraph g) => g a b -> [NGroup]
cyclesIn' = concat . unfoldr findCycles . mkSimple
-- | Find all cycles in the given graph, excluding those that are also cliques.
uniqueCycles :: (DynGraph g) => g a b -> [LNGroup a]
uniqueCycles g = map (addLabels g) (uniqueCycles' g)
-- | Find all cycles in the given graph, excluding those that are also cliques.
uniqueCycles' :: (DynGraph g) => g a b -> [NGroup]
uniqueCycles' g = filter (not . isRegular g) (cyclesIn' g)
-- | Find all cycles containing a chosen node.
findCycles :: (DynGraph g) => g a b -> Maybe ([NGroup], g a b)
findCycles g
| isEmpty g = Nothing
| otherwise = Just . getCycles . matchAny $ g
where
getCycles (ctx,g') = (cyclesFor (ctx, g'), g')
-- | Find all cycles for the given node.
cyclesFor :: (DynGraph g) => GDecomp g a b -> [NGroup]
cyclesFor = map init .
filter isCycle .
pathTree .
first Just
where
isCycle p = not (single p) && (head p == last p)
-- -----------------------------------------------------------------------------
{- $chains
A chain is a path in a graph where for each interior node, there is
exactly one predecessor and one successor node, i.e. that part of
the graph forms a \"straight line\". Furthermore, the initial node
should have only one successor, and the final node should have only
one predecessor. Chains are found by recursively finding the next
successor in the chain, until either a leaf node is reached or no
more nodes match the criteria.
-}
-- | Find all chains in the given graph.
chainsIn :: (DynGraph g, Eq b) => g a b -> [LNGroup a]
chainsIn g = map (addLabels g)
$ chainsIn' g
-- | Find all chains in the given graph.
chainsIn' :: (DynGraph g, Eq b) => g a b -> [NGroup]
chainsIn' g = filter (not . single) -- Remove trivial chains
. map (getChain g')
$ filterNodes' isChainStart g'
where
-- Try to make this work on two-element cycles, undirected
-- graphs, etc. Also remove multiple edges, etc.
g' = oneWay $ mkSimple g
-- | Find the chain starting with the given 'Node'.
getChain :: (Graph g) => g a b -> Node -> NGroup
getChain g n = n : unfoldr (chainLink g) (chainNext g n)
-- | Find the next link in the chain.
chainLink :: (Graph g) => g a b -> Maybe Node
-> Maybe (Node, Maybe Node)
chainLink _ Nothing = Nothing
chainLink g (Just n)
| isEmpty g = Nothing
| not $ hasPrev g n = Nothing
| otherwise = Just (n, chainNext g n)
-- | Determines if the given node is the start of a chain.
isChainStart :: (Graph g) => g a b -> Node -> Bool
isChainStart g n = hasNext g n
&& case (pre g n \\ [n]) of
[n'] -> not $ isChainStart g n'
_ -> True
-- | Determine if the given node matches the chain criteria in the given
-- direction, and if so what the next node in that direction is.
chainFind :: (Graph g) => (g a b -> Node -> NGroup)
-> g a b -> Node -> Maybe Node
chainFind f g n = case (nub (f g n) \\ [n]) of
[n'] -> Just n'
_ -> Nothing
-- | Find the next node in the chain.
chainNext :: (Graph g) => g a b -> Node -> Maybe Node
chainNext = chainFind suc
-- | Determines if this node matches the successor criteria for chains.
hasNext :: (Graph g) => g a b -> Node -> Bool
hasNext g = isJust . chainNext g
-- | Determines if this node matches the predecessor criteria for chains.
hasPrev :: (Graph g) => g a b -> Node -> Bool
hasPrev g = isJust . chainFind pre g
| ivan-m/Graphalyze | Data/Graph/Analysis/Algorithms/Common.hs | bsd-2-clause | 10,348 | 0 | 12 | 2,725 | 2,630 | 1,389 | 1,241 | 142 | 2 |
module Main where
import Lib
main :: IO ()
main = someFunc "Haskell"
| pdmurray/haskell-book-ex | app/Main.hs | bsd-3-clause | 71 | 0 | 6 | 15 | 25 | 14 | 11 | 4 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-
Copyright (C) - 2017 Róman Joost <[email protected]>
This file is part of gtfsschedule.
gtfsschedule is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
gtfsschedule is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with gtfsschedule. If not, see <http://www.gnu.org/licenses/>.
-}
{- | Module to parse the real time feed for real time updates
This module uses protocol buffers to parse the feedmessage in order to update
the schedule data.
See also: https://developers.google.com/transit/gtfs-realtime/reference/
-}
module GTFS.Realtime.Message.Schedule
(updateSchedule, updateSchedulesWithRealtimeData, getTripUpdates,
getVehiclePositions)
where
import GTFS.Realtime.Internal.Com.Google.Transit.Realtime.FeedMessage (FeedMessage, entity)
import GTFS.Realtime.Internal.Com.Google.Transit.Realtime.VehiclePosition (VehiclePosition)
import GTFS.Realtime.Message.Types (ForFeedElement (..))
import GTFS.Schedule (ScheduleItem (..))
import Control.Monad.State (State, execState, get, put)
import qualified Data.Map.Lazy as Map
import qualified Data.Text as T
import GTFS.Realtime.Internal.Com.Google.Transit.Realtime.FeedEntity (trip_update, vehicle)
import GTFS.Realtime.Internal.Com.Google.Transit.Realtime.TripUpdate (TripUpdate)
import qualified Text.ProtocolBuffers.Header as P'
import Network.HTTP.Conduit (simpleHttp)
import Text.ProtocolBuffers (messageGet)
import Data.Traversable (mapM)
import Prelude hiding (mapM)
type Schedule = Map.Map String ScheduleItem
-- | Updates the schedule with realtime information from the GTFS feed
--
updateSchedulesWithRealtimeData ::
Maybe T.Text
-> [ScheduleItem]
-> IO [ScheduleItem]
updateSchedulesWithRealtimeData Nothing schedules = pure schedules
updateSchedulesWithRealtimeData (Just url) schedules = do
bytes <- simpleHttp (T.unpack url)
case messageGet bytes of
Left err -> do
print $ "Error occurred decoding feed: " ++ err
pure schedules
Right (fm,_) -> pure $ updateSchedule fm schedules
-- | Updates schedule with trip updates given by feed
--
updateSchedule ::
FeedMessage
-> [ScheduleItem]
-> [ScheduleItem]
updateSchedule fm schedules =
updateScheduleHelper getVehiclePositions fm $
updateScheduleHelper getTripUpdates fm schedules
updateScheduleHelper
:: ForFeedElement e
=> (FeedMessage -> P'.Seq e)
-> FeedMessage
-> [ScheduleItem]
-> [ScheduleItem]
updateScheduleHelper getter fm schedule =
Map.elems $ execState (mapM updateFeedElement $ getter fm) scheduleMap
where
scheduleMap = Map.fromList $ toMap <$> schedule
toMap x = (tripId x, x)
getTripUpdates ::
FeedMessage
-> P'.Seq TripUpdate
getTripUpdates fm = (`P'.getVal` trip_update) <$> P'.getVal fm entity
getVehiclePositions ::
FeedMessage
-> P'.Seq VehiclePosition
getVehiclePositions fm = (`P'.getVal` vehicle) <$> P'.getVal fm entity
updateFeedElement
:: ForFeedElement e
=> e -> State Schedule ()
updateFeedElement x = do
m <- get
let map' = Map.adjustWithKey (updateScheduleItem x) (getTripID x) m
put map'
| romanofski/gtfsbrisbane | src/GTFS/Realtime/Message/Schedule.hs | bsd-3-clause | 4,171 | 0 | 13 | 1,200 | 675 | 380 | 295 | 63 | 2 |
{-# OPTIONS_GHC -fplugin-opt=LiquidHaskell.Plugin:no-termination #-}
{-# LANGUAGE QuasiQuotes #-}
module Fixme where
import LiquidHaskell
import LiquidHaskell.Prelude
data Binder n = B | M (TT n)
data TT n = V Int | Other | Bind (Binder n) (TT n)
[lq| measure binderContainsV :: Binder n -> Bool |]
binderContainsV B = True
binderContainsV (M x) = containsV x
[lq| measure containsV :: TT n -> Bool |]
containsV (V i) = True
containsV (Bind b body) = (binderContainsV b) || (containsV body)
-- containsV (App f arg) = (containsV f) || (containsV arg)
-- containsV (Proj tm i) = containsV tm
containsV _ = False
prop1 = liquidAssert (containsV $ V 7)
prop2 = liquidAssert (containsV $ Bind (M (V 5)) Other)
| spinda/liquidhaskell | tests/gsoc15/working/pos/MeasureContains.hs | bsd-3-clause | 758 | 0 | 12 | 173 | 210 | 115 | 95 | 16 | 1 |
{-# LANGUAGE TypeFamilies, TypeOperators, DataKinds #-}
{-# LANGUAGE UndecidableInstances #-}
module Data.Nat (
PNat (..), type (+), type (*), type (^), type (-)
, Fact (..)
, LTE (..)
, One, Two, Three, Four, Five, Six, Seven, Eight, Nine, Ten
) where
import Data.Type.Equality (gcastWith, (:~:) (Refl))
import qualified GHC.TypeLits as TL
import Unsafe.Coerce (unsafeCoerce)
import Prelude hiding (succ)
data PNat = Z | S PNat deriving (Eq, Show, Ord)
type family One :: PNat where One = S Z
type family Two :: PNat where Two = S One
type family Three :: PNat where Three = S Two
type family Four :: PNat where Four = S Three
type family Five :: PNat where Five = S Four
type family Six :: PNat where Six = S Five
type family Seven :: PNat where Seven = S Six
type family Eight :: PNat where Eight = S Seven
type family Nine :: PNat where Nine = S Eight
type family Ten :: PNat where Ten = S Nine
infixl 6 +
type family (+) (m :: PNat) (n :: PNat) :: PNat where
Z + n = n
S m + n = S (m + n)
infixl 6 -
type family (-) (m :: PNat) (n :: PNat) :: PNat where
m - Z = m
S m - S n = m - n
infixl 7 *
type family (*) (m :: PNat) (n :: PNat) :: PNat where
Z * n = Z
S m * n = n + m * n
infixr 8 ^
type family (^) (m :: PNat) (n :: PNat) :: PNat where
m ^ Z = S Z
m ^ S n = m * (m ^ n)
type family Fact (n :: PNat) :: PNat where
Fact Z = S Z
Fact (S n) = Fact n * S n
type family Lit (n :: TL.Nat) :: PNat where
Lit 0 = Z
Lit n = S (Lit (n TL.- 1))
type family Cmp (m :: PNat) (n :: PNat) :: Ordering where
Cmp Z Z = EQ
Cmp m Z = GT
Cmp Z n = LT
Cmp (S m) (S n) = Cmp m n
type family LTE (m :: PNat) (n :: PNat) :: Bool where
LTE Z n = True
LTE (S m) Z = False
LTE (S m) (S n) = LTE m n
| bmsherman/haskell-vect | Data/Nat.hs | bsd-3-clause | 1,743 | 7 | 10 | 462 | 834 | 494 | 340 | -1 | -1 |
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
-------------------------------------------------------------------------------------
-- |
-- Copyright : (c) Hans Hoglund 2012-2014
--
-- License : BSD-style
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (TF,GNTD)
--
-------------------------------------------------------------------------------------
module Music.Score.Export.ArticulationNotation (
Slur(..),
Mark(..),
ArticulationNotation(..),
notateArticulation,
) where
import Data.Semigroup
import Data.Functor.Context
import Data.Functor.Adjunction (unzipR)
import Control.Lens -- ()
import Music.Score.Articulation (Articulation, Articulated(..), Separation, Accentuation)
import Music.Score.Ties (Tiable(..))
import Music.Time (Transformable(..))
import qualified Music.Score.Articulation
import qualified Music.Articulation
-- TODO need NoSlur etc?
data Slur = NoSlur | BeginSlur | EndSlur
deriving (Eq, Ord, Show)
data Mark = NoMark | Staccato | MoltoStaccato | Marcato | Accent | Tenuto
deriving (Eq, Ord, Show)
instance Monoid Slur where
mempty = NoSlur
mappend NoSlur a = a
mappend a _ = a
instance Monoid Mark where
mempty = NoMark
mappend NoMark a = a
mappend a _ = a
newtype ArticulationNotation
= ArticulationNotation { getArticulationNotation :: ([Slur], [Mark]) }
deriving (Eq, Ord, Show)
instance Wrapped ArticulationNotation where
type Unwrapped ArticulationNotation = ([Slur], [Mark])
_Wrapped' = iso getArticulationNotation ArticulationNotation
instance Rewrapped ArticulationNotation ArticulationNotation
type instance Articulation ArticulationNotation = ArticulationNotation
instance Transformable ArticulationNotation where
transform _ = id
instance Tiable ArticulationNotation where
toTied (ArticulationNotation (slur, marks))
= (ArticulationNotation (slur1, marks1),
ArticulationNotation (slur2, marks2))
where
(marks1, marks2) = splitMarks marks
(slur1, slur2) = splitSlurs slur
splitSlurs = unzipR . fmap splitSlur
splitMarks = unzipR . fmap splitMark
splitSlur NoSlur = (mempty, mempty)
splitSlur BeginSlur = (BeginSlur, mempty)
splitSlur EndSlur = (mempty, EndSlur)
splitMark NoMark = (NoMark, mempty)
splitMark Staccato = (Staccato, mempty)
splitMark MoltoStaccato = (MoltoStaccato, mempty)
splitMark Marcato = (Marcato, mempty)
splitMark Accent = (Accent, mempty)
splitMark Tenuto = (Tenuto, mempty)
instance Monoid ArticulationNotation where
mempty = ArticulationNotation ([], [])
ArticulationNotation ([], []) `mappend` y = y
x `mappend` ArticulationNotation ([], []) = x
x `mappend` y = x
getSeparationMarks :: Double -> [Mark]
getSeparationMarks = fst . getSeparationMarks'
hasSlur' :: Double -> Bool
hasSlur' = snd . getSeparationMarks'
getSeparationMarks' :: Double -> ([Mark], Bool)
getSeparationMarks' x
| x <= (-1) = ([], True)
| (-1) < x && x < 1 = ([], False)
| 1 <= x && x < 2 = ([Staccato], False)
| 2 <= x = ([MoltoStaccato], False)
getAccentMarks :: Double -> [Mark]
getAccentMarks x
| x <= (-1) = []
| (-1) < x && x < 1 = []
| 1 <= x && x < 2 = [Accent]
| 2 <= x = [Marcato]
| otherwise = []
hasSlur :: (Real (Separation t), Articulated t) => t -> Bool
hasSlur y = hasSlur' (realToFrac $ view separation $ y)
allMarks :: (Real (Separation t), Real (Accentuation t), Articulated t) => t -> [Mark]
allMarks y = mempty
<> getSeparationMarks (realToFrac $ y^.separation)
<> getAccentMarks (realToFrac $ y^.accentuation)
-- -- TODO why doesn't this work
-- notateArticulationS :: (Ord a, Articulated a, Music.Score.Articulation.Articulation a ~ Music.Articulation.Articulation) => Ctxt a -> ArticulationNotation
-- notateArticulationS = notateArticulation
notateArticulation :: (Ord a, Articulated a, Real (Separation a), Real (Accentuation a)) => Ctxt a -> ArticulationNotation
notateArticulation (getCtxt -> x) = go x
where
go (Nothing, y, Nothing) = ArticulationNotation ([], allMarks y)
go (Just x, y, Nothing) = ArticulationNotation (if hasSlur x && hasSlur y then [EndSlur] else [], allMarks y)
go (Nothing, y, Just z) = ArticulationNotation (if hasSlur y && hasSlur z then [BeginSlur] else [], allMarks y)
go (Just x, y, Just z) = ArticulationNotation (slur3 x y z, allMarks y)
where
slur3 x y z = case (hasSlur x, hasSlur y, hasSlur z) of
(True, True, True) -> [{-ContSlur-}]
(False, True, True) -> [BeginSlur]
(True, True, False) -> [EndSlur]
_ -> []
| music-suite/music-score | src/Music/Score/Export/ArticulationNotation.hs | bsd-3-clause | 5,552 | 0 | 12 | 1,281 | 1,518 | 850 | 668 | 110 | 9 |
module AERN2.Frac.Eval
(
evalDirect
, evalDI
, evalDf
, evalLip
)
where
import MixedTypesNumPrelude
import AERN2.MP.Dyadic
import AERN2.MP.Ball
import AERN2.RealFun.Operations
import AERN2.Poly.Cheb (ChPoly)
import qualified AERN2.Poly.Cheb as Cheb
import AERN2.Frac.Type
instance
(CanApply (ChPoly c) t, CanDivSameType (ApplyType (ChPoly c) t))
=>
CanApply (Frac c) t
where
type ApplyType (Frac c) t = ApplyType (ChPoly c) t
apply (Frac p q _) t = (apply p t) /! (apply q t)
-- TODO: replace with a specific instance with c~MPBall using evalDI?
instance
(CanApplyApprox (ChPoly c) t, CanDivSameType (ApplyApproxType (ChPoly c) t))
=>
CanApplyApprox (Frac c) t
where
type ApplyApproxType (Frac c) t = ApplyApproxType (ChPoly c) t
applyApprox (Frac p q _) t = (applyApprox p t) /! (applyApprox q t)
evalDirect ::
(Field t, CanAddSubMulDivCNBy t Dyadic, CanDivCNBy t Integer,
CanAddSubMulBy t c, Ring c)
=> Frac c -> t -> t
evalDirect (Frac p q _) x =
(Cheb.evalDirect p x) /! (Cheb.evalDirect q x)
evalDI :: Frac MPBall -> MPBall -> MPBall
evalDI f@(Frac p q _) =
evalDf f (Cheb.derivative p) (Cheb.derivative q)
evalDf :: Frac MPBall -> ChPoly MPBall -> ChPoly MPBall -> MPBall -> MPBall
evalDf f dp dq x =
evalLip f (abs $ Cheb.evalDirect dp x) (abs $ Cheb.evalDirect dq x) x
evalLip :: Frac MPBall -> MPBall -> MPBall -> MPBall -> MPBall
evalLip f@(Frac _ _ m) lp lq x =
fc + (hullMPBall (-errB) (errB))
where
absFc = abs fc
fc = evalDirect f c
c = centreAsBall x
eps = mpBall $ radius x
errB = m*(lp + lq*absFc)*eps
| michalkonecny/aern2 | aern2-fun-univariate/src/AERN2/Frac/Eval.hs | bsd-3-clause | 1,603 | 0 | 11 | 345 | 686 | 358 | 328 | -1 | -1 |
{-#Language DeriveFunctor
, DeriveFoldable
, DeriveTraversable
#-}
module Language.TheExperiment.Inferrer.Type where
import Language.TheExperiment.CodeGen.Type
import Data.Foldable
import Data.Traversable
import qualified Data.Map as Map
data Type a = TypeName String
| Std StdType
| Pointer a
| Array a a -- first parameter should only be a type variable or
-- an NType (type level number), eventually this would
-- be moved to the std libs, and have a constraint on
-- that parameter
| NType Integer
| Func [a] a
{-
| Pointer a
|
| Array (Maybe Integer) a
| TCall a [a]
| Struct [(String, a)]
| Union [(String, a)]
| NType Integer
| Func [a] a
-}
-- optional name, list of record fields and their types (empty for
-- non-structures), Just overloads
| Var (Maybe String) {- (RecFields a) -} [a]
deriving (Show, Eq, Ord, Functor, Foldable, Traversable)
data RecFields a = Fields (Map.Map String a)
deriving (Show, Eq, Ord, Functor, Foldable, Traversable)
noRecFields :: RecFields a
noRecFields = Fields Map.empty
-- #TODO consider making this just a list
-- I think it would simplify the logic in many places
-- (perhaps just a newtype'd list)
newtype Overloads a = Overloads [a] -- potential types
deriving (Show, Eq, Ord, Functor, Foldable, Traversable)
data FlatType = FlatType (Type FlatType)
deriving (Show, Eq, Ord)
-- the type variable 'a' is meant to be either a TypeRef or some other version
-- of Type (like say data FlatType = FlatType (Type FlatType))
data TypeDef a = SimpleType a
| AlgType [(String, a)]
| Struct [(String, a)]
| Union [(String, a)]
deriving (Show, Eq, Ord)
| jvranish/TheExperiment | src/Language/TheExperiment/Inferrer/Type.hs | bsd-3-clause | 1,988 | 0 | 9 | 663 | 334 | 199 | 135 | 29 | 1 |
{-# LANGUAGE CPP #-}
module Calculi.Lambda.Cube.TH (
-- sfo
sf
, stlc
) where
import qualified Language.Haskell.TH as TH
import qualified Language.Haskell.TH.Quote as TH
import qualified Language.Haskell.TH.Syntax as TH
import qualified Compiler.Typesystem.SystemFOmega as SFO
import qualified Compiler.Typesystem.SystemF as SF
import qualified Compiler.Typesystem.SimplyTyped as STLC
import Text.Megaparsec
import Text.Megaparsec.Error
import Calculi.Lambda.Cube
import Data.List
import Control.Monad
-- | Lambda Cube parsec type.
#if MIN_VERSION_megaparsec(5,0,0)
type LCParsec = Parsec Dec String
#else
type LCParsec = Parsec String
#endif
-- | SystemFOmega with mono and poly types represented as strings.
-- type StringSFO = SFO.SystemFOmega String String (Maybe (STLC.SimplyTyped String))
-- | SystemF with mono and poly types represented as strings.
type StringSF = SF.SystemF String String
-- | SimplyTyped with mono types represented as strings.
type StringSTLC = STLC.SimplyTyped String
{-|
Lambda Cube symbol wrapper for strings.
-}
lamcsymbol :: String -> LCParsec String
lamcsymbol = lamctoken . string
{-|
Lambda Cube parser token wrapper, expects the token followed by 0 or more spaces.
-}
lamctoken :: LCParsec a -> LCParsec a
lamctoken p = do
pval <- p
void $ many (char ' ')
return pval
{-|
Parenthesis parser combinator.
-}
paren :: LCParsec a -> LCParsec a
paren = between (lamcsymbol "(") (lamcsymbol ")")
{-|
Wrapper that allows preceeding whitespace before a token and then expects the
input to end.
-}
wrapped :: LCParsec a -> LCParsec a
wrapped p = do
void $ many (lamcsymbol " ")
p' <- p
eof
return p'
{-|
Parser for a bare variable, notated by beginning with a lowercase character.
-}
variable :: LCParsec String
variable = lamctoken ((:) <$> lowerChar <*> many (lowerChar <|> upperChar)) <?> "variable"
{-|
Parser for a bare constant, notated by beginning with an uppercase character.
-}
constant :: LCParsec String
constant = lamctoken ((:) <$> upperChar <*> many (lowerChar <|> upperChar)) <?> "constant"
{-|
given a subexpression parser, parse a sequence of subexpressions
seperated by function arrows.
-}
exprsequence :: SimpleType t => LCParsec t -> LCParsec t
exprsequence subexpr = label "expression sequence" $ do
-- parse as many subexpressions as we can (at least 1 though)
expr <- subexpr
-- optionally parse a function tail if it is present
funApply <- optional $ do
void $ lamcsymbol "->" <|> lamcsymbol "→"
exprsequence subexpr
-- if after the initial sequence it turned out this was the first
-- argument to a function expression, then we apply it as the first argument.
return (maybe expr (expr /->) funApply)
{-
sfoexpr :: LCParsec StringSFO
sfoexpr = label "System-Fω expression" $
quant sfoexpr
<|> exprsequence (fmap (foldl1 (/$)) <$> some $
poly <$> variable
<|> mono <$> constant
<|> paren sfoexpr)
-}
sfexpr :: LCParsec StringSF
sfexpr = label "System-F expression" $
exprsequence (typevar <$> variable
<|> typeconst <$> constant
<|> paren sfexpr)
stlcexpr :: LCParsec StringSTLC
stlcexpr = label "Simply-typed expression" $ exprsequence (typeconst <$> constant <|> paren stlcexpr)
{-
{-|
A QuasiQuoter for SystemFOmega, allowing arbitrary type application
@[sfo| forall x. R x -> M x |] == quantify \"x\" (mono \"R\" /$ poly \"x\" /-> mono \"M\" /$ poly \"x\")@
-}
sfo :: TH.QuasiQuoter
sfo = mkqq "sfo" sfoexpr
-}
{-|
A QuasiQuoter for SystemF, allowing quantification and poly types (lower case).
@[sf| forall a b. a -> b |] == quantify \"a\" (quantify \"b\" (poly \"a\" \/-> poly \"b\"))@
-}
sf :: TH.QuasiQuoter
sf = mkqq "sf" sfexpr
{-|
A QuasiQuoter for SimplyTyped.
@[stlc| A -> B -> C |] == mono \"A\" \/-> mono \"B\" \/-> mono \"C\"@
@[stlc| (A -> B) -> B |] == (mono \"A\" \/-> mono \"B\") \/-> mono \"B\"@
-}
stlc :: TH.QuasiQuoter
stlc = mkqq "stlc" stlcexpr
{-|
Helper to generate a QuasiQuoter for an arbitrary parser with a liftable type.
-}
mkqq :: TH.Lift t => String -> LCParsec t -> TH.QuasiQuoter
mkqq pname p = TH.QuasiQuoter {
TH.quoteExp = \str -> do
loc <- TH.location
let fname = intercalate ":" [pname
, TH.loc_filename loc
, show $ TH.loc_start loc
, show $ TH.loc_end loc
]
case runParser (wrapped p) fname str of
Left err -> fail . show $ err
Right val -> TH.lift val
, TH.quotePat = error $ pname ++ " doesn't implement quotePat for this QuasiQuoter"
, TH.quoteType = error $ pname ++ " doesn't implement quoteType for this QuasiQuoter"
, TH.quoteDec = error $ pname ++ " doesn't implement quoteDec for this QuasiQuoter"
}
| Lokidottir/typerbole | src/Calculi/Lambda/Cube/TH.hs | bsd-3-clause | 5,059 | 0 | 17 | 1,282 | 836 | 447 | 389 | 69 | 2 |
Subsets and Splits