code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
main = do
putStrLn "Hello, what's your name?"
name <- getLine
putStrLn ("Hey " ++ name ++ ", you rock!")
|
ku00/h-book
|
src/WhatsYourName.hs
|
bsd-3-clause
| 117 | 0 | 10 | 32 | 36 | 16 | 20 | 4 | 1 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
\section{Code output phase}
-}
{-# LANGUAGE CPP #-}
module CodeOutput( codeOutput, outputForeignStubs ) where
#include "HsVersions.h"
import AsmCodeGen ( nativeCodeGen )
import LlvmCodeGen ( llvmCodeGen )
import UniqSupply ( mkSplitUniqSupply )
import Finder ( mkStubPaths )
import PprC ( writeCs )
import CmmLint ( cmmLint )
import Packages
import Cmm ( RawCmmGroup )
import HscTypes
import DynFlags
import Config
import SysTools
import Stream (Stream)
import qualified Stream
import ErrUtils
import Outputable
import Module
import SrcLoc
import Control.Exception
import System.Directory
import System.FilePath
import System.IO
{-
************************************************************************
* *
\subsection{Steering}
* *
************************************************************************
-}
codeOutput :: DynFlags
-> Module
-> FilePath
-> ModLocation
-> ForeignStubs
-> [PackageKey]
-> Stream IO RawCmmGroup () -- Compiled C--
-> IO (FilePath,
(Bool{-stub_h_exists-}, Maybe FilePath{-stub_c_exists-}))
codeOutput dflags this_mod filenm location foreign_stubs pkg_deps cmm_stream
=
do {
-- Lint each CmmGroup as it goes past
; let linted_cmm_stream =
if gopt Opt_DoCmmLinting dflags
then Stream.mapM do_lint cmm_stream
else cmm_stream
do_lint cmm = do
{ showPass dflags "CmmLint"
; case cmmLint dflags cmm of
Just err -> do { log_action dflags dflags SevDump noSrcSpan defaultDumpStyle err
; ghcExit dflags 1
}
Nothing -> return ()
; return cmm
}
; stubs_exist <- outputForeignStubs dflags this_mod location foreign_stubs
; case hscTarget dflags of {
HscAsm -> outputAsm dflags this_mod location filenm
linted_cmm_stream;
HscC -> outputC dflags filenm linted_cmm_stream pkg_deps;
HscLlvm -> outputLlvm dflags filenm linted_cmm_stream;
HscInterpreted -> panic "codeOutput: HscInterpreted";
HscNothing -> panic "codeOutput: HscNothing"
}
; return (filenm, stubs_exist)
}
doOutput :: String -> (Handle -> IO a) -> IO a
doOutput filenm io_action = bracket (openFile filenm WriteMode) hClose io_action
{-
************************************************************************
* *
\subsection{C}
* *
************************************************************************
-}
outputC :: DynFlags
-> FilePath
-> Stream IO RawCmmGroup ()
-> [PackageKey]
-> IO ()
outputC dflags filenm cmm_stream packages
= do
-- ToDo: make the C backend consume the C-- incrementally, by
-- pushing the cmm_stream inside (c.f. nativeCodeGen)
rawcmms <- Stream.collect cmm_stream
-- figure out which header files to #include in the generated .hc file:
--
-- * extra_includes from packages
-- * -#include options from the cmdline and OPTIONS pragmas
-- * the _stub.h file, if there is one.
--
let rts = getPackageDetails dflags rtsPackageKey
let cc_injects = unlines (map mk_include (includes rts))
mk_include h_file =
case h_file of
'"':_{-"-} -> "#include "++h_file
'<':_ -> "#include "++h_file
_ -> "#include \""++h_file++"\""
let pkg_names = map packageKeyString packages
doOutput filenm $ \ h -> do
hPutStr h ("/* GHC_PACKAGES " ++ unwords pkg_names ++ "\n*/\n")
hPutStr h cc_injects
writeCs dflags h rawcmms
{-
************************************************************************
* *
\subsection{Assembler}
* *
************************************************************************
-}
outputAsm :: DynFlags -> Module -> ModLocation -> FilePath
-> Stream IO RawCmmGroup ()
-> IO ()
outputAsm dflags this_mod location filenm cmm_stream
| cGhcWithNativeCodeGen == "YES"
= do ncg_uniqs <- mkSplitUniqSupply 'n'
debugTraceMsg dflags 4 (text "Outputing asm to" <+> text filenm)
_ <- {-# SCC "OutputAsm" #-} doOutput filenm $
\h -> {-# SCC "NativeCodeGen" #-}
nativeCodeGen dflags this_mod location h ncg_uniqs cmm_stream
return ()
| otherwise
= panic "This compiler was built without a native code generator"
{-
************************************************************************
* *
\subsection{LLVM}
* *
************************************************************************
-}
outputLlvm :: DynFlags -> FilePath -> Stream IO RawCmmGroup () -> IO ()
outputLlvm dflags filenm cmm_stream
= do ncg_uniqs <- mkSplitUniqSupply 'n'
{-# SCC "llvm_output" #-} doOutput filenm $
\f -> {-# SCC "llvm_CodeGen" #-}
llvmCodeGen dflags f ncg_uniqs cmm_stream
{-
************************************************************************
* *
\subsection{Foreign import/export}
* *
************************************************************************
-}
outputForeignStubs :: DynFlags -> Module -> ModLocation -> ForeignStubs
-> IO (Bool, -- Header file created
Maybe FilePath) -- C file created
outputForeignStubs dflags mod location stubs
= do
let stub_h = mkStubPaths dflags (moduleName mod) location
stub_c <- newTempName dflags "c"
case stubs of
NoStubs ->
return (False, Nothing)
ForeignStubs h_code c_code -> do
let
stub_c_output_d = pprCode CStyle c_code
stub_c_output_w = showSDoc dflags stub_c_output_d
-- Header file protos for "foreign export"ed functions.
stub_h_output_d = pprCode CStyle h_code
stub_h_output_w = showSDoc dflags stub_h_output_d
createDirectoryIfMissing True (takeDirectory stub_h)
dumpIfSet_dyn dflags Opt_D_dump_foreign
"Foreign export header file" stub_h_output_d
-- we need the #includes from the rts package for the stub files
let rts_includes =
let rts_pkg = getPackageDetails dflags rtsPackageKey in
concatMap mk_include (includes rts_pkg)
mk_include i = "#include \"" ++ i ++ "\"\n"
-- wrapper code mentions the ffi_arg type, which comes from ffi.h
ffi_includes | cLibFFI = "#include \"ffi.h\"\n"
| otherwise = ""
stub_h_file_exists
<- outputForeignStubs_help stub_h stub_h_output_w
("#include \"HsFFI.h\"\n" ++ cplusplus_hdr) cplusplus_ftr
dumpIfSet_dyn dflags Opt_D_dump_foreign
"Foreign export stubs" stub_c_output_d
stub_c_file_exists
<- outputForeignStubs_help stub_c stub_c_output_w
("#define IN_STG_CODE 0\n" ++
"#include \"Rts.h\"\n" ++
rts_includes ++
ffi_includes ++
cplusplus_hdr)
cplusplus_ftr
-- We're adding the default hc_header to the stub file, but this
-- isn't really HC code, so we need to define IN_STG_CODE==0 to
-- avoid the register variables etc. being enabled.
return (stub_h_file_exists, if stub_c_file_exists
then Just stub_c
else Nothing )
where
cplusplus_hdr = "#ifdef __cplusplus\nextern \"C\" {\n#endif\n"
cplusplus_ftr = "#ifdef __cplusplus\n}\n#endif\n"
-- Don't use doOutput for dumping the f. export stubs
-- since it is more than likely that the stubs file will
-- turn out to be empty, in which case no file should be created.
outputForeignStubs_help :: FilePath -> String -> String -> String -> IO Bool
outputForeignStubs_help _fname "" _header _footer = return False
outputForeignStubs_help fname doc_str header footer
= do writeFile fname (header ++ doc_str ++ '\n':footer ++ "\n")
return True
|
ghc-android/ghc
|
compiler/main/CodeOutput.hs
|
bsd-3-clause
| 9,182 | 1 | 19 | 3,241 | 1,428 | 721 | 707 | 145 | 7 |
{-# LANGUAGE TypeFamilies #-}
module T5306b where
data family F a
data instance F Int = FInt
|
urbanslug/ghc
|
testsuite/tests/rename/should_compile/T5306b.hs
|
bsd-3-clause
| 99 | 0 | 5 | 23 | 22 | 14 | 8 | 4 | 0 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DoAndIfThenElse #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Cook.BuildFile
( BuildFileId(..), BuildFile(..), BuildBase(..), DockerCommand(..), TxRef
, buildFileIdAddParent
, dockerCmdToText
, parseBuildFile
, buildTxScripts, copyTarAndUnpack
, FilePattern, matchesFilePattern, parseFilePattern
, UnpackMode(..)
-- don't use - only exported for testing
, parseBuildFileText, emptyBuildFile
)
where
import Cook.Types
import Cook.Util
import Control.Applicative
import Control.Monad
import Data.Attoparsec.Text hiding (take)
import Data.Char
import Data.Hashable
import Data.List (find)
import Data.Maybe
import System.Exit (ExitCode(..))
import System.FilePath
import System.IO.Temp
import System.Process (readProcessWithExitCode)
import qualified Data.ByteString.Base16 as B16
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.IO as T
import qualified Data.Vector as V
newtype BuildFileId
= BuildFileId { unBuildFileId :: T.Text }
deriving (Show, Eq)
buildFileIdAddParent :: FilePath -> BuildFileId -> BuildFileId
buildFileIdAddParent fp (BuildFileId f) =
BuildFileId $ T.pack (fp </> T.unpack f)
newtype TxRef
= TxRef { _unTxRef :: Int }
deriving (Show, Eq, Hashable)
data BuildFile
= BuildFile
{ bf_name :: BuildFileId
, bf_base :: BuildBase
, bf_unpackTarget :: Maybe FilePath
, bf_dockerCommands :: V.Vector (Either TxRef DockerCommand)
, bf_include :: V.Vector FilePattern
, bf_prepare :: V.Vector T.Text
, bf_downloadDeps :: V.Vector DownloadUrl
, bf_transactions :: HM.HashMap TxRef (V.Vector T.Text)
, bf_cookCopy :: HM.HashMap FilePath [(FilePath, FilePath)]
, bf_requiredVars :: V.Vector (T.Text, Maybe T.Text)
} deriving (Show, Eq)
data BuildBase
= BuildBaseDocker DockerImage
| BuildBaseCook BuildFileId
deriving (Show, Eq)
data BuildFileLine
= IncludeLine FilePattern
-- ^ copy files from data directory to temporary cook directory
| BaseLine BuildBase
-- ^ use either cook file or docker image as base
| PrepareLine T.Text
-- ^ run shell command in temporary cook directory
| UnpackLine FilePath
-- ^ where should the context be unpacked to?
| ScriptLine FilePath (Maybe T.Text)
-- ^ execute a script in cook directory to generate more cook commands
| BeginTxLine
| CommitTxLine
| DownloadLine DownloadUrl FilePath
-- ^ download a file to a location
| CookCopyLine FilePath FilePath FilePath
-- ^ copy a file or folder from a cook-image to the _cookprep folder
| RequireEnvVarLine (T.Text, Maybe T.Text)
-- ^ require a compile time environment variable with an optional default
| DockerLine DockerCommand
-- ^ regular docker command
deriving (Show, Eq)
data DockerCommand
= DockerCommand
{ dc_command :: T.Text
, dc_args :: T.Text
} deriving (Show, Eq)
newtype FilePattern
= FilePattern { _unFilePattern :: [PatternPart] }
deriving (Show, Eq)
data PatternPart
= PatternText String
| PatternWildCard
deriving (Show, Eq)
dockerCmdToText :: DockerCommand -> T.Text
dockerCmdToText (DockerCommand cmd args) =
T.concat [cmd, " ", args]
matchesFilePattern :: FilePattern -> FilePath -> Bool
matchesFilePattern (FilePattern []) [] = True
matchesFilePattern (FilePattern []) _ = False
matchesFilePattern (FilePattern _) [] = False
matchesFilePattern (FilePattern (x : xs)) fp =
case x of
PatternText t ->
if all (uncurry (==)) (zip t fp)
then matchesFilePattern (FilePattern xs) (drop (length t) fp)
else False
PatternWildCard ->
case xs of
(PatternText nextToken : _) ->
case T.breakOn (T.pack nextToken) (T.pack fp) of
(_, "") -> False
(_, rest) ->
matchesFilePattern (FilePattern xs) (T.unpack rest)
(PatternWildCard : _) ->
matchesFilePattern (FilePattern xs) fp
[] -> True
buildTxScripts :: FilePath -> BuildFile -> IO (V.Vector DockerCommand, SHA1)
buildTxScripts dockerFileEnvDir bf =
withSystemTempDirectory "cooktx" $ \txDir ->
do let dirName =
T.unpack $ T.decodeUtf8 $ B16.encode $ unSha1 $
quickHash $ map T.encodeUtf8 $ V.toList $ V.concat $ HM.elems $
bf_transactions bf
txSh <-
forM (HM.toList (bf_transactions bf)) $ \(TxRef refId, actions) ->
do let f = "tx_" ++ show refId ++ ".sh"
sh = mkScript refId actions
T.writeFile (txDir </> f) sh
return (f, T.encodeUtf8 sh)
case (null txSh) of
False ->
do compressFilesInDir False tarFile txDir (map fst txSh)
return ( V.concat [ pre dirName
, V.map (mkTxLine dirName) (bf_dockerCommands bf)
, post dirName]
, quickHash (map snd txSh)
)
True ->
do checkedCommands <-
V.forM (bf_dockerCommands bf) $ \c ->
case c of
Left (TxRef refId) ->
fail $ "Found undefined transaction reference: " ++ show refId
Right cmd ->
return cmd
return (checkedCommands, quickHash ["no-tx"])
where
mkTxLine dirName l =
case l of
Left (TxRef refId) ->
DockerCommand "RUN" (T.pack $ "bash "
++ (dockerTarDir dirName </> "tx_" ++ show refId ++ ".sh"))
Right cmd -> cmd
pre dirName =
V.fromList (copyTarAndUnpack OverwriteExisting "tx.tar.gz" (dockerTarDir dirName))
post dirName =
V.fromList
[ DockerCommand "RUN" (T.pack $ "rm -rf " ++ (dockerTarDir dirName))
]
dockerTarDir dirName = "/tmp/tx_" ++ dirName
tarFile = dockerFileEnvDir </> "tx.tar.gz"
mkScript txId scriptLines =
T.unlines ("#!/bin/bash" : "# auto generated by dockercook"
: (T.pack $ "echo 'DockercookTx # " ++ show txId ++ "'")
: "set -e" : "set -x"
: (map (\ln -> T.concat ["( ", ln, " )"]) $ V.toList scriptLines)
)
data UnpackMode
= SkipExisting
| OverwriteExisting
copyTarAndUnpack :: UnpackMode -> FilePath -> FilePath -> [DockerCommand]
copyTarAndUnpack um tarName imageDest =
[ DockerCommand "COPY" (T.pack $ tarName ++ " /" ++ tarName)
, DockerCommand "RUN" $ T.pack $
"mkdir -p " ++ imageDest
++ " && /usr/bin/env tar xv "
++ (case um of
SkipExisting -> "--skip-old-files"
OverwriteExisting -> "--overwrite"
)
++ " -f /" ++ tarName ++ " -C " ++ imageDest
++ " && rm -rf /" ++ tarName
]
emptyBuildFile :: BuildFileId -> BuildBase -> BuildFile
emptyBuildFile myId base =
BuildFile
{ bf_name = myId
, bf_base = base
, bf_unpackTarget = Nothing
, bf_dockerCommands = V.empty
, bf_include = V.empty
, bf_prepare = V.empty
, bf_downloadDeps = V.empty
, bf_transactions = HM.empty
, bf_cookCopy = HM.empty
, bf_requiredVars = V.empty
}
constructBuildFile :: FilePath -> FilePath -> [BuildFileLine] -> IO (Either String BuildFile)
constructBuildFile cookDir fp theLines =
case baseLine of
Just (BaseLine base) ->
baseCheck base $ handleLine (Right (emptyBuildFile myId base)) Nothing theLines
_ ->
return $ Left "Missing BASE line!"
where
checkDocker (DockerCommand cmd _) action =
let lowerCmd = T.toLower cmd
in case lowerCmd of
"from" -> return $ Left "FROM command is not allowed in dockercook files"
"add" ->
do logWarn "ADD commands are not recommended as the dependencies aren't tracked. Use PREPARE!"
action
"copy" ->
do logWarn "COPY commands are not recommended as the dependencies aren't tracked. Use PREPARE!"
action
_ -> action
baseCheck base onSuccess =
case base of
BuildBaseCook cookId ->
if cookId == myId
then return $ Left "Recursive BASE line! You are referencing yourself."
else onSuccess
_ -> onSuccess
myId =
BuildFileId (T.pack fp)
baseLine =
flip find theLines $ \l ->
case l of
BaseLine _ -> True
_ -> False
handleLine mBuildFile _ [] =
return mBuildFile
handleLine mBuildFile inTx (line : rest) =
case mBuildFile of
Left err ->
return $ Left err
Right buildFile ->
case inTx of
Just currentTx ->
case line of
DockerLine dockerCmd ->
checkDocker dockerCmd $ handleLineTx dockerCmd buildFile currentTx rest
ScriptLine scriptLoc mArgs ->
handleScriptLine scriptLoc mArgs buildFile inTx rest
CommitTxLine ->
handleLine (Right buildFile) Nothing rest
_ -> return $ Left "Only RUN and SCRIPT commands are allowed in transactions"
Nothing ->
case line of
CookCopyLine cookFile containerPath hostPath ->
handleLine (cookCopyLine cookFile containerPath hostPath buildFile) inTx rest
DownloadLine url target ->
handleLine (downloadLine url target buildFile) inTx rest
ScriptLine scriptLoc mArgs ->
handleScriptLine scriptLoc mArgs buildFile inTx rest
DockerLine dockerCmd ->
checkDocker dockerCmd $
handleLine (Right $ buildFile { bf_dockerCommands = V.snoc (bf_dockerCommands buildFile) (Right dockerCmd) }) inTx rest
IncludeLine pattern ->
handleLine (Right $ buildFile { bf_include = V.snoc (bf_include buildFile) pattern }) inTx rest
PrepareLine cmd ->
handleLine (Right $ buildFile { bf_prepare = V.snoc (bf_prepare buildFile) cmd }) inTx rest
UnpackLine unpackTarget ->
handleLine (Right $ buildFile { bf_unpackTarget = Just unpackTarget }) inTx rest
BeginTxLine ->
let nextTxId = TxRef (HM.size (bf_transactions buildFile))
in handleLine (Right $ buildFile { bf_dockerCommands = V.snoc (bf_dockerCommands buildFile) (Left nextTxId) })
(Just nextTxId) rest
CommitTxLine ->
return $ Left "COMMIT is missing a BEGIN!"
RequireEnvVarLine var ->
handleLine
(Right $ buildFile
{ bf_requiredVars = V.snoc (bf_requiredVars buildFile) var
}
)
inTx rest
_ ->
handleLine mBuildFile inTx rest
cookCopyLine cookFile containerPath hostPath buildFile =
Right $
buildFile
{ bf_cookCopy =
HM.insertWith (\new old -> new ++ old) cookFile [(containerPath, hostPath)] (bf_cookCopy buildFile)
}
downloadLine url@(DownloadUrl realUrl) target buildFile =
Right $
buildFile
{ bf_downloadDeps = V.snoc (bf_downloadDeps buildFile) url
, bf_dockerCommands =
V.snoc (bf_dockerCommands buildFile) $
Right (DockerCommand "ADD" (T.concat [realUrl, " ", T.pack target]))
}
handleScriptLine scriptLoc mArgs buildFile inTx rest =
do let bashCmd = (cookDir </> scriptLoc) ++ " " ++ T.unpack (fromMaybe "" mArgs)
(ec, stdOut, stdErr) <-
readProcessWithExitCode "bash" ["-c", bashCmd] ""
logDebug ("SCRIPT " ++ bashCmd ++ " returned: \n" ++ stdOut ++ "\n" ++ stdErr)
if ec == ExitSuccess
then case parseOnly pBuildFile (T.pack stdOut) of
Left parseError ->
return $ Left ("Failed to parse output of SCRIPT line " ++ bashCmd
++ ": " ++ parseError ++ "\nOutput was:\n" ++ stdOut)
Right moreLines ->
handleLine (Right buildFile) inTx (moreLines ++ rest)
else return $ Left ("Failed to run SCRIPT line " ++ bashCmd
++ ": " ++ stdOut ++ "\n" ++ stdErr)
handleLineTx (DockerCommand cmd args) buildFile txRef rest =
if (T.toLower cmd /= "run")
then return $ Left ("Only RUN commands are allowed in transaction blocks!")
else do let updateF _ oldV =
V.snoc oldV args
buildFile' =
buildFile
{ bf_transactions = HM.insertWith updateF txRef (V.singleton args) (bf_transactions buildFile)
}
handleLine (Right buildFile') (Just txRef) rest
parseBuildFile :: FilePath -> IO (Either String BuildFile)
parseBuildFile fp =
do t <- T.readFile fp
parseBuildFileText fp t
parseBuildFileText :: FilePath -> T.Text -> IO (Either String BuildFile)
parseBuildFileText fp t =
case parseOnly pBuildFile t of
Left err ->
return $ Left err
Right theLines ->
constructBuildFile (takeDirectory fp) fp theLines
parseFilePattern :: T.Text -> Either String FilePattern
parseFilePattern pattern =
parseOnly pFilePattern pattern
isValidFileNameChar :: Char -> Bool
isValidFileNameChar c =
c /= ' ' && c /= '\n' && c /= '\t'
pBuildFile :: Parser [BuildFileLine]
pBuildFile =
many1 lineP <* endOfInput
where
finish =
pComment *> ((() <$ many endOfLine) <|> endOfInput)
lineP =
(many (pComment <* endOfLine)) *> (skipSpace *> lineP')
lineP' =
IncludeLine <$> (pIncludeLine <* finish) <|>
BaseLine <$> (pBuildBase <* finish) <|>
PrepareLine <$> (pPrepareLine <* finish) <|>
UnpackLine <$> (pUnpackLine <* finish) <|>
(pScriptLine <* finish) <|>
BeginTxLine <$ (pBeginTx <* finish) <|>
CommitTxLine <$ (pCommitTx <* finish) <|>
(pDownloadLine <* finish) <|>
(pCookCopyLine <* finish) <|>
RequireEnvVarLine <$> (pCookVar <* finish) <|>
DockerLine <$> (pDockerCommand <* finish)
pCookVar :: Parser (T.Text, Maybe T.Text)
pCookVar =
asciiCI "COOKVAR" *> skipSpace *> valP <* skipSpace
where
valP =
(,)
<$> (takeWhile1 (\x -> isAlphaNum x || x == '_'))
<*> (optional $ T.strip <$> takeWhile1 (not . eolOrComment))
pBeginTx :: Parser ()
pBeginTx = asciiCI "BEGIN" *> skipSpace
pCommitTx :: Parser ()
pCommitTx = asciiCI "COMMIT" *> skipSpace
pUnpackLine :: Parser FilePath
pUnpackLine =
T.unpack <$> ((asciiCI "UNPACK" *> skipSpace) *> takeWhile1 isValidFileNameChar)
pBuildBase :: Parser BuildBase
pBuildBase =
(asciiCI "BASE" *> skipSpace) *> pBase
where
pBase =
BuildBaseDocker <$> (asciiCI "DOCKER" *> skipSpace *> (DockerImage <$> takeWhile1 (not . eolOrComment))) <|>
BuildBaseCook <$> (asciiCI "COOK" *> skipSpace *> (BuildFileId <$> takeWhile1 isValidFileNameChar))
pDockerCommand :: Parser DockerCommand
pDockerCommand =
DockerCommand <$> (takeWhile1 isAlpha <* skipSpace)
<*> (T.stripEnd <$> takeWhile1 (not . eolOrComment))
eolOrComment :: Char -> Bool
eolOrComment x =
isEndOfLine x || x == '#'
eolOrCommentOrSpace :: Char -> Bool
eolOrCommentOrSpace x =
isEndOfLine x || x == '#' || isSpace x
pComment :: Parser ()
pComment =
skipSpace <* optional (char '#' *> skipWhile (not . isEndOfLine))
pIncludeLine :: Parser FilePattern
pIncludeLine =
(asciiCI "INCLUDE" *> skipSpace) *> pFilePattern
pDownloadLine :: Parser BuildFileLine
pDownloadLine =
DownloadLine <$> (DownloadUrl <$> ((asciiCI "DOWNLOAD" *> skipSpace) *> (takeWhile1 (not . isSpace))))
<*> (T.unpack <$> (skipSpace *> takeWhile1 (not . eolOrCommentOrSpace)))
pCookCopyLine :: Parser BuildFileLine
pCookCopyLine =
CookCopyLine <$> ((asciiCI "COOKCOPY" *> skipSpace) *> (T.unpack <$> takeWhile1 isValidFileNameChar))
<*> (T.unpack <$> (skipSpace *> takeWhile1 isValidFileNameChar))
<*> (T.unpack <$> (skipSpace *> takeWhile1 isValidFileNameChar))
pScriptLine :: Parser BuildFileLine
pScriptLine =
ScriptLine <$> (T.unpack <$> ((asciiCI "SCRIPT" *> skipSpace) *> (takeWhile1 isValidFileNameChar)))
<*> (optional $ T.stripEnd <$> takeWhile1 (not . eolOrComment))
pPrepareLine :: Parser T.Text
pPrepareLine =
(asciiCI "PREPARE" *> skipSpace) *> takeWhile1 (not . eolOrComment)
pFilePattern :: Parser FilePattern
pFilePattern =
FilePattern <$> many1 pPatternPart
where
pPatternPart =
PatternWildCard <$ char '*' <|>
PatternText <$> (T.unpack <$> takeWhile1 (\x -> x /= '*' && (not $ isSpace x)))
|
factisresearch/dockercook
|
src/lib/Cook/BuildFile.hs
|
mit
| 18,051 | 215 | 22 | 5,999 | 4,696 | 2,462 | 2,234 | 388 | 27 |
module Simulator(run) where
import Control.Monad.State
import Control.Monad
import Control.Applicative
import Control.Exception
import Data.Array
import Data.Bits
import Data.Int
import System.IO
import Syntax
import MifParser
type Simulator = StateT SIMPLE IO
initialSimple :: [Instruction] -> String -> IO SIMPLE
initialSimple insts ramFile = do
withFile ramFile ReadMode $ \handle ->
do
ramData <- parseMif ramFile
ramSize_ <- evaluate $ length ramData
let ramSize = (fromIntegral ramSize_) :: Int16
return $ SIMPLE { pc = 0
, instruction = insts
, registerFile = array (0, 7) [(i, 0) | i <- [0..7]]
, ram = array (0, ramSize-1) ramData
, code_c = False
, code_v = False
, code_s = False
, code_z = False }
run :: [Instruction] -> String -> IO (Int, SIMPLE)
run insts ramFile = do
simple <- initialSimple insts ramFile
runStateT (runBody 0) simple
runBody :: Int -> Simulator Int
runBody cnt = do
inst <- updatePC 1
case inst of
Nothing -> return cnt
(Just i) -> runInst i >> runBody (cnt+1)
updatePC :: Int16 -> Simulator (Maybe Instruction)
updatePC d = do
simple <- get
let insts = instruction simple
prcnt16 = pc simple
prcnt = (fromIntegral prcnt16) :: Int
put $ simple { pc = prcnt16 + d }
return $ if prcnt < 0 || prcnt >= length insts
then Nothing
else Just (insts !! prcnt)
runInst :: Instruction -> Simulator ()
runInst (Prim op rd rs) = primOp op rd rs
runInst (Shift op rd d) = shiftOp op rd d
runInst (Input rd) = liftIO getLine >>= (writeReg rd) . read
runInst (Output rs) = do { v <- readReg rs;
liftIO $ print v }
runInst Nop = return ()
runInst Halt = do { s <- get;
put $ s { pc = -1 } }
runInst (Load ra d rb) = loadMem ra d rb
runInst (Store ra d rb) = storeMem ra d rb
runInst (LoadIm rb d) = setCodeLogic d >> writeReg rb d
runInst (AddI rb d) = do {
v <- readReg rb;
setCodeLogic (v+d) >> writeReg rb (v+d) }
runInst (UncondBr d) = updatePC d >> return ()
runInst (CondBr op d) = condBrOp op d
readReg :: Int16 -> Simulator Int16
readReg rs = do
regFile <- liftM registerFile get
return $ regFile ! rs
loadMem :: Int16 -> Int16 -> Int16 -> Simulator ()
loadMem ra d rb = do
simple <- get
let regFile = registerFile simple
dataRam = ram simple
put $ simple { registerFile = regFile // [(ra, dataRam ! ((regFile ! rb)+d))] }
storeMem :: Int16 -> Int16 -> Int16 -> Simulator ()
storeMem ra d rb = do
simple <- get
let regFile = registerFile simple
dataRam = ram simple
put $ simple { ram = dataRam // [((regFile ! rb) + d, regFile ! ra)] }
(.^.) :: Bool -> Bool -> Bool
(.^.) a b = (not a && b) || (a && not b)
condBrOp :: String -> Int16 -> Simulator ()
condBrOp op d = do
simple <- get
let br = case op of "BE" -> code_c simple
"BLT" -> code_s simple .^. code_v simple
"BLE" -> code_z simple || (code_s simple .^. code_v simple)
"BNE" -> not $ code_z simple
when br (put $ simple { pc = pc simple + d })
primOp :: String -> Int16 -> Int16 -> Simulator ()
primOp op rd rs = do
simple <- get
let reg = registerFile simple
rdv = reg ! rd
rsv = reg ! rs
case op of
"ADD" -> let res = rdv + rsv in (setCodeArith res rdv rsv) >> writeReg rd res
"SUB" -> let res = rdv - rsv in (setCodeArith res rdv (-rsv)) >> writeReg rd res
"AND" -> let res = rdv.&.rsv in (setCodeLogic res) >> writeReg rd res
"OR" -> let res = rdv.|.rsv in (setCodeLogic res) >> writeReg rd res
"XOR" -> let res = xor rdv rsv in (setCodeLogic res) >> writeReg rd res
"CMP" -> let res = rdv - rsv in (setCodeArith res rdv (-rsv)) >> return ()
"MOV" -> let res = rsv in (setCodeLogic res) >> writeReg rd res
setCodeArith :: Int16 -> Int16 -> Int16 -> Simulator ()
setCodeArith res rdv rsv = do
simple <- get
put $ simple { code_s = res < 0
, code_z = res == 0
, code_c = ((rdv < 0 || rsv < 0) && res >= 0) || (rdv < 0 && rsv < 0)
, code_v = (rdv >= 0 && rsv >= 0 && res < 0) ||
(rdv < 0 && rsv < 0 && res >= 0) }
setCodeLogic :: Int16 -> Simulator ()
setCodeLogic res = do
simple <- get
put $ simple { code_s = res < 0
, code_z = res == 0
, code_c = False
, code_v = False }
shiftOp :: String -> Int16 -> Int16 -> Simulator ()
shiftOp op rd d = do
simple <- get
let rdv = (registerFile simple) ! rd
d' = (fromIntegral $ d) :: Int
case op of
"SLL" -> let res = rdv `shift` d' in setCodeShift res rdv d' False >> writeReg rd res
"SLR" -> let res = rdv `rotate` d' in setCodeLogic res >> writeReg rd res
"SRL" -> let res = rdv `shiftRR` d' in setCodeShift res rdv d' True >> writeReg rd res
"SRA" -> let res = rdv `shift`(-d') in setCodeShift res rdv d' True >> writeReg rd res
shiftRR :: Int16 -> Int -> Int16
shiftRR rdv d = (rdv `shiftR` d) .&. (bit (16-d+1))-1
setCodeShift :: Int16 -> Int16 -> Int -> Bool -> Simulator ()
setCodeShift res rdv d isRight = do
simple <- get
let res' = (fromIntegral res) :: Int
shiftOut = d > 0 && (if isRight then res .&. bit (d-1) else res .&. bit (16-d+1)) > 0
put $ simple { code_s = res < 0
, code_z = res == 0
, code_c = shiftOut
, code_v = False }
writeReg :: Int16 -> Int16 -> Simulator ()
writeReg rd res = do
simple <- get
let newRegFile = (registerFile simple) // [(rd, res)]
put $ simple { registerFile = newRegFile }
|
yu-i9/HaSS
|
src/Simulator.hs
|
mit
| 5,965 | 12 | 38 | 1,954 | 2,494 | 1,253 | 1,241 | -1 | -1 |
module Target.Config
( isSpecial
, isConfig
, load
, merge
, vars
, values
, setValue
, option
, setOption
, set
, debug
, setDebug
, style
, metadata
, resources
, latex
, parseBool
) where
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Loops
import Control.Dangerous hiding ( Warning )
import Data.Char
import Data.List.Utils hiding ( merge )
import qualified Data.Configger as Configger
import Data.Configger ( Config )
import Data.Maybe
import System.Directory
import System.FilePath
import Text.Printf
-- | Constants
configFile :: String
configFile = "config"
variableSection, optionSection, localSection :: String
variableSection = "VARIABLES"
optionSection = "OPTIONS"
localSection = "LOCAL"
styleFile, resourceFile, metadataFile :: String
styleFile = "style.css"
resourceFile = "resources.odt"
metadataFile = "metadata.xml"
styleOption, resourceOption, metadataOption :: String
styleOption = "style"
resourceOption = "resources"
metadataOption = "metadata"
latexOption, debugOption :: String
latexOption = "latex"
debugOption = "debug"
-- | Config file loading and manipulation
isCover :: String -> Bool
isCover f = startswith "cover-image." name || startswith "cover." name
where name = takeFileName f
isSpecial :: FilePath -> Bool
isSpecial f = isCover f || (takeFileName f) `elem` special where
special = [configFile, styleFile, resourceFile, metadataFile]
isConfig :: FilePath -> Bool
isConfig = (== configFile) . takeFileName
raw :: FilePath -> DangerousT IO Config
raw dir = do
let cfile = dir </> configFile
hasConfig <- liftIO $ doesFileExist cfile
if hasConfig then Configger.load variableSection cfile else return []
load :: FilePath -> DangerousT IO Config
load path = do
let sfile = path </> styleFile
let rfile = path </> resourceFile
let mfile = path </> metadataFile
hasStyle <- liftIO $ doesFileExist sfile
hasResources <- liftIO $ doesFileExist rfile
hasMetadata <- liftIO $ doesFileExist mfile
basic <- raw path
let setStyle c = do
css <- liftIO $ readFile sfile
return $ setOption styleOption css c
let setMetadata c = do
meta <- liftIO $ readFile mfile
return $ setOption metadataOption meta c
let setResources = return . setOption resourceOption rfile
let setters = [setStyle | hasStyle] ++
[setResources | hasResources] ++
[setMetadata | hasMetadata]
conf <- concatM setters basic
let locals = Configger.items localSection conf
let mapped = map (\(x, y) -> (x, path </> y)) locals
return $ Configger.mergeSection (variableSection, mapped) conf
merge :: Config -> FilePath -> String -> DangerousT IO Config
merge conf dir fmt = let name = takeFileName dir in do
new <- load dir
when (isJust (option styleOption new) && fmt /= "epub")
(warn $ IgnoringStyle name)
when (isJust (option metadataOption new) && fmt /= "epub")
(warn $ IgnoringMetadata name)
when (isJust (option resourceOption new) && fmt /= "odt")
(warn $ IgnoringResources name)
return $ Configger.merge new conf
vars :: Config -> [(String, String)]
vars = Configger.items variableSection
values :: String -> Config -> [String]
values str conf = map snd $ filter ((str ==) . fst) (vars conf)
setValue :: String -> String -> Config -> Config
setValue = Configger.set variableSection
option :: String -> Config -> Maybe String
option = Configger.get optionSection
setOption :: String -> String -> Config -> Config
setOption = Configger.set optionSection
set :: String -> String -> Config -> Config
set = Configger.set variableSection
debug :: Config -> Bool
debug conf = maybe False parseBool (option debugOption conf)
setDebug :: Config -> Config
setDebug = Configger.set optionSection debugOption (show True)
style :: Config -> Maybe String
style = option styleOption
metadata :: Config -> Maybe String
metadata = option metadataOption
resources :: Config -> Maybe String
resources = option resourceOption
latex :: Config -> Maybe String
latex = option latexOption
parseBool :: String -> Bool
parseBool = (`elem` ["true", "yes", "on", "1"]) . map toLower
data Warning = IgnoringStyle String
| IgnoringResources String
| IgnoringMetadata String
instance Show Warning where
show (IgnoringStyle name) = "WARNING: " ++
printf "Ignoring style file in %s\n" name
show (IgnoringResources name) = "WARNING: " ++
printf "Ignoring resource file in %s\n" name
show (IgnoringMetadata name) = "WARNING: " ++
printf "Ignoring metadata file in %s\n" name
|
Soares/Bookbuilder
|
src/Target/Config.hs
|
mit
| 4,675 | 6 | 15 | 977 | 1,432 | 749 | 683 | -1 | -1 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE Arrows #-}
module Main where
import Yage
import Yage.Lens hiding ((<.>))
import Yage.Math
import Yage.Wire hiding ((<>), at)
import Yage.Camera
import Yage.Scene
import Yage.HDR
import Yage.Texture
import Yage.Texture.TextureAtlas
import Yage.Formats.Font
import Yage.UI.GUI
import Yage.Transformation
import qualified Yage.Resources as Res
import qualified Yage.Material as Mat
import Yage.Pipeline.Deferred
import Yage.Examples.Shared
winSettings :: WindowConfig
winSettings = WindowConfig
{ windowSize = (1200, 800)
, windowHints =
[ WindowHint'ContextVersionMajor 4
, WindowHint'ContextVersionMinor 1
, WindowHint'OpenGLProfile OpenGLProfile'Core
, WindowHint'OpenGLForwardCompat True
, WindowHint'RefreshRate 60
--, WindowHint'Resizable False
--, WindowHint'Decorated False
]
}
appConf :: ApplicationConfig
appConf = defaultAppConfig{ logPriority = WARNING }
main :: IO ()
main = yageMain "yage-material" appConf winSettings mainWire yDeferredLighting (1/60)
-------------------------------------------------------------------------------
-- View Definition
type SceneEntity = GeoEntity
type SceneEnvironment = Environment Light SkyEntity
type MaterialScene = Scene HDRCamera SceneEntity SceneEnvironment GUI
mainWire :: (HasTime Double (YageTimedInputState t), Real t) => YageWire t () MaterialScene
mainWire = proc () -> do
cam <- hdrCameraHandle `overA` cameraControl -< camera & hdrCameraHandle.cameraOrientation .~ axisAngle (V3 1 0 0) (deg2rad $ -15)
sky <- skyDomeW -< cam^.hdrCameraHandle.cameraLocation
gui <- guiWire -< (meshFile, albedoFile, normalFile)
dummy <- (entityOrientation `overA` previewRotationByInput) . dummyEntityW modelRes albeoTex normalTex -< ()
returnA -< emptyScene cam gui
& sceneSky ?~ sky
& sceneEntities .~ fromList [ dummy ]
& sceneLights .~ fromList [ mainLight, specLight ]
where
texDir :: FilePath
texDir = "res"</>"tex"
bloomSettings = defaultBloomSettings
& bloomFactor .~ 0.7
& bloomPreDownsampling .~ 2
& bloomGaussPasses .~ 5
& bloomWidth .~ 2
& bloomThreshold .~ 0.5
camera = defaultHDRCamera ( mkCameraFps (deg2rad 75) (0.1,10000) )
& hdrExposure .~ 2
& hdrExposureBias .~ 0.0
& hdrWhitePoint .~ 11.2
& hdrBloomSettings .~ bloomSettings
meshFile = "res"</>"model"</>"meshpreview"<.>"ygm"
modelRes = meshRes $ loadYGM geoVertex $ (meshFile, mempty)
albedoFile = texDir </> "floor_d" <.> "png"
normalFile = texDir </> "floor_n" <.> "png"
albeoTex, normalTex :: YageResource Texture
albeoTex = mkTexture2D "Albedo" <$> imageRes albedoFile
normalTex = mkTexture2D "Normal" <$> imageRes normalFile
dummyEntityW :: YageResource (Mesh GeoVertex) -> YageResource Texture -> YageResource Texture -> YageWire t () GeoEntity
dummyEntityW meshRes albedoRes normalRes = proc () -> do
entity <- renderData <~~ constMeshW meshRes
>>> materials.albedoMaterial.Mat.matTexture <~~ constTextureW albedoRes
>>> materials.normalMaterial.Mat.matTexture <~~ constTextureW normalRes -< boxEntity :: GeoEntity
returnA -< entity & materials.albedoMaterial.Mat.stpFactor *~ 2.0
& materials.normalMaterial.Mat.stpFactor *~ 2.0
& entityPosition -~ V3 0 1 0
& entityScale //~ 200
mainLight = Light
{ _lightType = Pointlight (V3 15 1 15) 100
, _lightColor = V3 1.0 1.0 1.0
, _lightIntensity = 0.5
}
specLight = Light
{ _lightType = Pointlight (V3 2 1 2) 10
, _lightColor = V3 1.0 1.0 1.0
, _lightIntensity = 1
}
skyDomeW :: YageWire t (V3 Double) SkyEntity
skyDomeW = proc pos -> do
tex <- cubeTextureToTexture "SkyCube" . pure <$> constTextureW skyTex -< ()
returnA -< skydome & materials.skyEnvironmentMap
.Mat.matTexture .~ tex
& entityPosition .~ pos
& entityScale .~ 50
skyTex = mkTexture2D "SkyTexture" <$> (imageRes $ texDir</>"misc"</>"blueprint"</>"Seamless Blueprint Textures"</>"1"<.>"png")
font = fontRes $ "res"</>"font"</>"yft"</>"SourceCodePro-Regular1024.yft"
imgRes = mkTexture2D "LinearZone" <$> (imageRes $ "res"</>"tex"</>"misc"</>"Linear-ZonePlate.png")
guiWire :: (HasTime Double (YageTimedInputState t), Real t) => YageWire t (FilePath, FilePath, FilePath) GUI
guiWire = proc (meshFile, albedoFile, normalFile) -> do
fontTex <- constFontW font -< ()
imgTex <- constTextureW imgRes -< ()
t <- time -< ()
fps <- avgFps 60 -< ()
let fileText = emptyTextBuffer fontTex
& charColor .~ V4 0 0 0 1
& buffText .~ format "mesh: {}\nalbedo: {}\nnormal: {}"
( Shown meshFile, Shown albedoFile, Shown normalFile )
fileTrans :: Transformation Double
fileTrans = idTransformation & transPosition .~ V3 (15) (750) (1.0)
& transScale._xy .~ 1.1
timeText = emptyTextBuffer fontTex
& charColor .~ V4 0 0 0 1
& buffText .~ format "t: {}" ( Only $ fixed 2 t )
timeTrans = idTransformation & transPosition .~ V3 (15) (50) (-1.0)
& transScale._xy .~ 1.1
fpsText = emptyTextBuffer fontTex
& charColor .~ V4 0 0 0 1
& buffText .~ format "fps: {}" ( Only $ fixed 2 (fps :: Double) )
fpsTrans = idTransformation & transPosition .~ V3 (1030) (50) (-1.0)
& transScale._xy .~ 1.1
returnA -< emptyGUI & guiElements.at "FileInfo" ?~ GUIFont fileText fileTrans
& guiElements.at "Time" ?~ GUIFont timeText timeTrans
& guiElements.at "FPS" ?~ GUIFont fpsText fpsTrans
-- & guiElements.at "Image" ?~ guiImage imgTex 1 (V2 (10) (10)) (V2 300 780)
-- Camera Control Wires
camStartPos :: V3 Double
camStartPos = V3 0 0 1.5
mouseSensitivity :: V2 Double
mouseSensitivity = V2 0.1 0.1
wasdControlled :: Real t => YageWire t () (V3 Double)
wasdControlled = wasdMovement (V2 2 2)
mouseControlled :: Real t => YageWire t () (V2 Double)
mouseControlled = whileKeyDown Key'LeftControl . arr (mouseSensitivity *) . mouseVelocity <|> 0
cameraControl :: Real t => YageWire t Camera Camera
cameraControl = fpsCameraMovement camStartPos wasdControlled . fpsCameraRotation mouseControlled
-- Dummy Control Wires
-- dummyControl :: Real t => YageWire t Dummy Dummy
-- dummyControl = overA transOrientation dummyRotationByInput
previewRotationByInput :: (Real t) => YageWire t (Quaternion Double) (Quaternion Double)
previewRotationByInput =
let acc = 20
att = 0.87
in
smoothRotationByKey acc att ( yAxis ) Key'Right
. smoothRotationByKey acc att (-yAxis ) Key'Left
. smoothRotationByKey acc att ( xAxis ) Key'Up
. smoothRotationByKey acc att (-xAxis ) Key'Down
|
MaxDaten/yage-examples
|
src/YageMaterialPreview.hs
|
mit
| 8,101 | 9 | 19 | 2,544 | 1,960 | 1,020 | 940 | -1 | -1 |
module Language.Dash.Asm.DataAssembler (
encodeConstTable
-- These two are only exposed for debugging TODO move them to another module?
, atomizeConstTable
, AtomicConstant(..)
) where
import Control.Arrow ((&&&))
import Control.Monad.Except (ExceptT (..), runExceptT,
throwError)
import Control.Monad.Identity (Identity, runIdentity)
import Control.Monad.State.Strict hiding (state)
import Data.List.Split
import qualified Data.Map as Map
import qualified Data.Sequence as Seq
import Language.Dash.Error.Error (CompilationError (..))
import Language.Dash.IR.Data
import qualified Language.Dash.VM.DataEncoding as Enc
import Language.Dash.VM.Types
{-
Data Assembler
~~~~~~~~~~~~~~
Encodes data of type Constant for the virtual machine.
TODO describe the runtime representation of all data
TODO rename const table to constant pool?
-}
-- TODO explain the algorithm or simplify it (the latter is probably better)
type ConstAddressMap = ConstAddr -> VMWord
-- The ConstAddressMap is a conversion function from the virtual constant
-- addresses used in the Opcode ir to the real binary offsets for the vm
encodeConstTable :: ConstTable
-> Seq.Seq VMWord
-> Either CompilationError ([VMWord], ConstAddressMap)
encodeConstTable ctable funcMap =
(\ (atoms, mapping) -> (map encodeConstant atoms, (mapping Map.!) )) <$>
atomizeConstTable ctable funcMap
-- We receive data as an array of Data.Constant objects. The first step is
-- to split this representation into their atomic parts. This is what this
-- function does. The next step encodes those atomic parts into their
-- byte representation for the vm.
atomizeConstTable :: ConstTable
-> Seq.Seq VMWord
-> Either CompilationError ( [AtomicConstant]
, Map.Map ConstAddr VMWord)
atomizeConstTable ctable funcMap =
let stateOrError = runIdentity $ runExceptT $ execStateT encTable initialState in
(atomized &&& addrMap) <$> stateOrError
where
initialState = emptyConstAtomizationState ctable funcMap
encTable = whileJust atomizeConstant popWorkItem
whileJust :: (b -> ConstAtomization a)
-> ConstAtomization (Maybe b)
-> ConstAtomization ()
whileJust f source = do
next <- source
case next of
Nothing -> return ()
Just x -> do
_ <- f x
whileJust f source
atomizeConstant :: Constant -> ConstAtomization ()
atomizeConstant c = case c of
CNumber n -> addAtomized [ACNumber n]
CPlainSymbol sid -> addAtomized [ACPlainSymbol sid]
CCompoundSymbol sid args -> atomizeCompoundSymbol sid args
CMatchData args -> atomizeMatchData args
CString str -> atomizeString str
COpaqueSymbol sid own args -> atomizeOpaqueSymbol sid own args
CFunction addr -> atomizeFunction addr
CCompoundSymbolRef caddr -> atomizeCompoundSymbolRef caddr
x -> throwError $ InternalCompilerError $ "Unable to encode top-level constant " ++ show x
atomizeCompoundSymbolRef :: ConstAddr -> ConstAtomization ()
atomizeCompoundSymbolRef caddr = do
addr <- actualConstAddr caddr
addAtomized [ACCompoundSymbolRef addr]
atomizeFunction :: FuncAddr -> ConstAtomization ()
atomizeFunction addr = do
faddr <- actualFuncAddr addr
addAtomized [ACFunction faddr]
atomizeCompoundSymbol :: SymId -> [Constant] -> ConstAtomization ()
atomizeCompoundSymbol sid args = do
setReservedSpace (1 + length args) -- TODO this is duplicated logic (see below)
let symbolHeader = ACCompoundSymbolHeader sid (fromIntegral $ length args)
atomizedArgs <- mapM atomizeConstArg args
addAtomized $ symbolHeader : atomizedArgs
setReservedSpace 0
atomizeOpaqueSymbol :: SymId -> SymId -> [Constant] -> ConstAtomization ()
atomizeOpaqueSymbol sid owner args = do
setReservedSpace (2 + length args)
let symbolHeader = ACOpaqueSymbolHeader sid (fromIntegral $ length args)
atomizedArgs <- mapM atomizeConstArg args
addAtomized $ symbolHeader : ACPlainSymbol owner : atomizedArgs
setReservedSpace 0
atomizeMatchData :: [Constant] -> ConstAtomization ()
atomizeMatchData args = do
setReservedSpace (1 + length args)
let matchHeader = ACMatchHeader (fromIntegral $ length args)
atomizedArgs <- mapM atomizeConstArg args
addAtomized $ matchHeader : atomizedArgs
setReservedSpace 0
atomizeString :: String -> ConstAtomization ()
atomizeString str = do
let numChunks = numStringChunksForString str
let nullString = str ++ "\0"
-- fill rest of string with zeroes
let adjustedString = nullString ++
replicate (bytesPerVMWord - (length nullString `rem` bytesPerVMWord)) '\0'
let chunks = chunksOf bytesPerVMWord adjustedString
let encChunks = map (\ [c1, c2, c3, c4] -> ACStringChunk c1 c2 c3 c4) chunks
let header = ACStringHeader (length str) numChunks
addAtomized $ header : encChunks
bytesPerVMWord :: Int
bytesPerVMWord = 4
numStringChunksForString :: String -> Int
numStringChunksForString str =
let len = length str + 1 -- add 1 for terminating \0
(numChunks, remainder) = len `divMod` bytesPerVMWord
adjust = if remainder /= 0 then 1 else 0
in
numChunks + adjust
atomizeConstArg :: Constant -> ConstAtomization AtomicConstant
atomizeConstArg c = case c of
CNumber n -> return $ ACNumber n
CPlainSymbol sid -> return $ ACPlainSymbol sid
CMatchVar n -> return $ ACMatchVar n
ds@(CCompoundSymbol _ _) -> do
addr <- nextFreeAddress
pushWorkItem ds
return $ ACCompoundSymbolRef addr
CFunction addr -> liftM ACFunction $ actualFuncAddr addr
CCompoundSymbolRef caddr -> do
addr <- actualConstAddr caddr
return $ ACCompoundSymbolRef addr
x -> throwError $ InternalCompilerError $ "Unable to encode constant as argument: "
++ show x
-- State
type ConstAtomizationT m a = StateT ConstAtomizationState (ExceptT CompilationError m) a
type ConstAtomization a = ConstAtomizationT Identity a
data ConstAtomizationState = ConstAtomizationState {
constants :: [Constant]
, workQueue :: [Constant]
, addrMap :: Map.Map ConstAddr VMWord
, atomized :: [AtomicConstant] -- should be a Sequence
, reservedSpace :: Int
, numAtomizedConsts :: Int
, functionMap :: Seq.Seq VMWord
}
emptyConstAtomizationState :: [Constant] -> Seq.Seq VMWord -> ConstAtomizationState
emptyConstAtomizationState ctable funcMap = ConstAtomizationState {
constants = ctable
, workQueue = []
, addrMap = Map.empty
, atomized = []
, reservedSpace = 0
, numAtomizedConsts = 0
, functionMap = funcMap
}
-- While encoding for example a compound symbol, the elements of that symbol
-- are pushed onto the workQueue and atomized next. If the workQueue is empty
-- we continue with the next constant from our original input. When we're
-- done we return Nothing.
-- TODO I'm quite sure that this can be written much more elegantly
popWorkItem :: ConstAtomization (Maybe Constant)
popWorkItem = do
state <- get
case (workQueue state, constants state) of
([], []) ->
return Nothing
([], cs) -> do
numAtomized <- gets numAtomizedConsts
let currentAddr = fromIntegral $ length $ atomized state
addAddrMapping (mkConstAddr numAtomized) currentAddr
state' <- get
put $ state' { numAtomizedConsts = numAtomized + 1, constants = tail cs }
return $ Just $ head cs
(ws, _) -> do
put (state { workQueue = tail ws })
return $ Just $ head ws
addAddrMapping :: ConstAddr -> VMWord -> ConstAtomization ()
addAddrMapping src dest = do
state <- get
let newMap = Map.insert src dest (addrMap state)
put $ state { addrMap = newMap }
pushWorkItem :: Constant -> ConstAtomization ()
pushWorkItem c = do
state <- get
let workQ = workQueue state
put $ state { workQueue = workQ ++ [c] }
nextFreeAddress :: ConstAtomization ConstAddr
nextFreeAddress = do
state <- get
let used = length $ atomized state
let reserved = reservedSpace state
let pendingItems = workQueue state
let pending = foldl (\acc c -> acc + spaceNeededByConstant c) 0 pendingItems
return $ mkConstAddr $ used + reserved + pending
spaceNeededByConstant :: Constant -> Int
spaceNeededByConstant c = case c of
CNumber _ -> 1
CPlainSymbol _ -> 1
CMatchVar _ -> 1
CCompoundSymbol _ args -> 1 + length args
COpaqueSymbol _ _ args -> 2 + length args
CMatchData args -> 1 + length args
CString str -> 1 + numStringChunksForString str
CFunction _ -> 1
CCompoundSymbolRef _ -> 1
addAtomized :: [AtomicConstant] -> ConstAtomization ()
addAtomized atoms = do
state <- get
put $ state { atomized = atomized state ++ atoms }
setReservedSpace :: Int -> ConstAtomization ()
setReservedSpace n = do
state <- get
put $ state { reservedSpace = n }
actualFuncAddr :: FuncAddr -> ConstAtomization Int
actualFuncAddr addr = do
funcMap <- gets functionMap
return $ fromIntegral $ funcMap `Seq.index` funcAddrToInt addr
actualConstAddr :: ConstAddr -> ConstAtomization ConstAddr
actualConstAddr caddr = do
state <- get
case Map.lookup caddr (addrMap state) of
Nothing -> throwError $ InternalCompilerError "Can't resolve compound symbol ref"
Just addr -> return $ mkConstAddr $ fromIntegral addr
-- Byte encoding for data
data AtomicConstant =
ACPlainSymbol SymId
| ACCompoundSymbolRef ConstAddr
| ACCompoundSymbolHeader SymId Int
| ACOpaqueSymbolHeader SymId Int
| ACNumber Int
| ACMatchHeader Int
| ACMatchVar Int
| ACStringHeader Int Int -- string length, num chunks
| ACStringChunk Char Char Char Char -- with ascii chars and VMWord as Word32 this would be 4 chars per string chunk
| ACFunction Int
deriving (Show, Eq)
encodeConstant :: AtomicConstant -> VMWord
encodeConstant c = case c of
ACPlainSymbol sid -> Enc.encodePlainSymbol sid
ACCompoundSymbolRef addr -> Enc.encodeCompoundSymbolRef addr
ACCompoundSymbolHeader sid n -> Enc.encodeCompoundSymbolHeader sid n
ACNumber n -> Enc.encodeNumber n
ACMatchHeader n -> Enc.encodeMatchHeader n
ACMatchVar n -> Enc.encodeMatchVar n
ACStringHeader len numChunks -> Enc.encodeStringHeader len numChunks
ACStringChunk b1 b2 b3 b4 -> Enc.encodeStringChunk b1 b2 b3 b4
ACOpaqueSymbolHeader sid n -> Enc.encodeOpaqueSymbolHeader sid n
ACFunction addr -> Enc.encodeFunctionRef addr
|
arne-schroppe/dash
|
src/Language/Dash/Asm/DataAssembler.hs
|
mit
| 10,880 | 0 | 16 | 2,617 | 2,743 | 1,375 | 1,368 | 221 | 10 |
module Compiler.Rum.Interpreter.Rummer where
import Control.Applicative (liftA2, empty)
import Control.Monad.State (get, gets, lift, liftIO, modify, MonadState)
import qualified Data.HashMap.Strict as HM (fromList, lookup)
import Data.IORef
import Compiler.Rum.Internal.AST
import Compiler.Rum.Internal.Rumlude (runRumlude)
import Compiler.Rum.Internal.Util
import Compiler.Rum.Interpreter.Rumlude (preludeLibrary)
interpret :: Program -> InterpretT
interpret [] = return Unit
interpret (st:stmts) = do
stRes <- interpretSt st
x <- gets isReturn
if x then return stRes else interpret stmts
interpretSt :: Statement -> InterpretT
interpretSt AssignmentVar{..} = eval value >>= \x ->
if isUp var
then do
refs <- gets refVarEnv
case HM.lookup var refs of
Just r -> liftIO $ writeIORef r (Val x) >> pure Unit
Nothing -> liftIO (newIORef$ Val x) >>= \rx -> modifyT (updateRefVars var rx)
else modifyT (updateVars var x)
interpretSt AssignmentArr{..} = do
x <- eval value
inds <- mapM eval (index arrC)
refs <- gets refVarEnv
let Just r = HM.lookup (arr arrC) refs
liftIO (setRefArrsCell inds x r) >> pure Unit
interpretSt Skip = return Unit
interpretSt IfElse{..} = eval ifCond >>= \x ->
if isFalse x then interpret falseAct else interpret trueAct
interpretSt st@WhileDo{..} = do
c <- eval whileCond
whenT (isTrue c) $ interpret act >> interpretSt st
interpretSt st@RepeatUntil{..} = do
() <$ interpret act
c <- eval repCond
whenT (isFalse c) $ interpretSt st
interpretSt For{..} = interpret start *> forDo
where
forDo = do
c <- eval expr
whenT (isTrue c) $ interpret body >> interpret update >> forDo
interpretSt Fun{..} = modifyT (updateFuns funName params $ \_ -> interpret funBody)
interpretSt Return{..} = modify (updateBool True) >> eval retExp
interpretSt (FunCallStmt f) = evalFunCall f
{-
StateT s (MaybeT IO) a = s -> IO (Maybe (a, s))
MaybeT IO a = IO (Maybe a)
pure x :: a -> m a = a -> StateT s (MaybeT IO) a = a -> (s -> IO (Maybe (a, s))
empty :: StateT s (MaybeT IO) a = s -> IO Nothing
-}
eval :: Expression -> InterpretT
eval (Const c) = pure c
eval (Var v) =
if isUp v
then do
refVar <- gets (findRefVar v)
case refVar of
Just x -> liftIO $ readIORef x >>= fromRefTypeToIO
Nothing -> evalVar v
else evalVar v
eval (ArrC ArrCell{..}) = do
inds <- mapM eval index
var <- gets (findRefVar arr)
case var of
Just refvar -> liftIO $ readIORef refvar >>= fromRefTypeToIO >>= \x ->
return $ getArrsCell x inds
Nothing -> gets (findVar arr) >>= \v -> case v of
Just v -> return $ getArrsCell v inds
Nothing -> empty
eval (ArrLit exps) = Arr <$> mapM eval exps
eval (Neg e) = do
Number x <- eval e
pure $ Number (negate x)
eval BinOper {..} = do
Number left <- eval l
Number right <- eval r
pure $ Number (binOp bop left right)
eval LogicOper{..} = do
Number left <- eval l
Number right <- eval r
pure $ Number (logicOp lop left right)
eval CompOper{..} = liftA2 (intCompare cop) (eval l) (eval r)
eval (FunCallExp f) = evalFunCall f
evalFunCall :: FunCall -> InterpretT
evalFunCall FunCall{fName = Variable "strset", args = [Var vS, i, c]} = do
test <- gets (findRefVar vS)
valStr <- case test of
Just v -> pure v
Nothing -> do
Just v <- gets (findVar vS)
liftIO $ newIORef (Val v)
Val s <- liftIO $ readIORef valStr
evI <- eval i
evC <- eval c
interpretSt $ AssignmentVar vS (Const $ runRumlude Strset [s, evI, evC])
evalFunCall FunCall{..} = do
env <- get
let funs = funEnv env
-- let globals = varEnv env
evalArgs <- mapM eval args
Just (names, fun) <- gets (findFun fName)
let (locs, refs) = temp names args evalArgs env
let locals = HM.fromList locs
let ref = HM.fromList refs
Interpret $ lift $ evalRunInterpret (fun evalArgs) (Env locals {-`HM.union` globals-} ref funs False)
temp :: [Variable] -> [Expression] -> [Type] -> Environment -> ([(Variable, Type)], [(Variable, IORef RefType)])
temp [] [] [] _ = (mempty, mempty)
temp (v:vs) (e:es) (t:ts) env = case e of
Var var -> let refVar = findRefVar var env in
case refVar of
Just x -> ((v, t):l, (v, x): r)
Nothing -> ((v, t):l, r)
_ -> ((v, t):l, r)
where
(l, r) = temp vs es ts env
evalVar :: Variable -> InterpretT
evalVar v = do
var <- gets (findVar v)
case var of
Just x -> pure x
Nothing -> empty
--------------
---- Util ----
--------------
whenT :: (Applicative f) => Bool -> f Type -> f Type
whenT cond s = if cond then s else pure Unit
modifyT :: MonadState s m => (s -> s) -> m Type
modifyT f = modify f >> pure Unit
-------------------
----- Rummer ------
-------------------
rumInterpreter :: [Statement] -> IO ()
rumInterpreter p = runIOInterpret (interpret p) (Env mempty mempty preludeLibrary False)
|
vrom911/Compiler
|
src/Compiler/Rum/Interpreter/Rummer.hs
|
mit
| 5,188 | 0 | 17 | 1,416 | 2,060 | 1,009 | 1,051 | -1 | -1 |
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
module Memoization (
Double()
) where
-- See: http://stackoverflow.com/a/2217374/704831
import Data.MemoTrie
import Data.Binary
import qualified Data.ByteString.Lazy as BS
mangle :: Double -> [Int]
mangle = map fromIntegral . BS.unpack . encode
unmangle :: [Int] -> Double
unmangle = decode . BS.pack . map fromIntegral
instance HasTrie Double where
data Double :->: a = DoubleTrie ([Int] :->: a)
trie f = DoubleTrie $ trie $ f . unmangle
untrie (DoubleTrie t) = untrie t . mangle
|
sharkdp/zehntausend
|
src/Memoization.hs
|
mit
| 568 | 0 | 10 | 109 | 167 | 93 | 74 | 18 | 1 |
{- This module was generated from data in the Kate syntax
highlighting file curry.xml, version 0.3, by Björn Peemöller ([email protected]) -}
module Text.Highlighting.Kate.Syntax.Curry
(highlight, parseExpression, syntaxName, syntaxExtensions)
where
import Text.Highlighting.Kate.Types
import Text.Highlighting.Kate.Common
import Text.ParserCombinators.Parsec hiding (State)
import Control.Monad.State
import Data.Char (isSpace)
import qualified Data.Set as Set
-- | Full name of language.
syntaxName :: String
syntaxName = "Curry"
-- | Filename extensions for this language.
syntaxExtensions :: String
syntaxExtensions = "*.curry"
-- | Highlight source code using this syntax definition.
highlight :: String -> [SourceLine]
highlight input = evalState (mapM parseSourceLine $ lines input) startingState
parseSourceLine :: String -> State SyntaxState SourceLine
parseSourceLine = mkParseSourceLine (parseExpression Nothing)
-- | Parse an expression using appropriate local context.
parseExpression :: Maybe (String,String)
-> KateParser Token
parseExpression mbcontext = do
(lang,cont) <- maybe currentContext return mbcontext
result <- parseRules (lang,cont)
optional $ do eof
updateState $ \st -> st{ synStPrevChar = '\n' }
pEndLine
return result
startingState = SyntaxState {synStContexts = [("Curry","Normal")], synStLineNumber = 0, synStPrevChar = '\n', synStPrevNonspace = False, synStContinuation = False, synStCaseSensitive = True, synStKeywordCaseSensitive = True, synStCaptures = []}
pEndLine = do
updateState $ \st -> st{ synStPrevNonspace = False }
context <- currentContext
contexts <- synStContexts `fmap` getState
st <- getState
if length contexts >= 2
then case context of
_ | synStContinuation st -> updateState $ \st -> st{ synStContinuation = False }
("Curry","Normal") -> return ()
("Curry","Pragma") -> return ()
("Curry","Multiline Comment") -> return ()
("Curry","Currydoc") -> (popContext) >> pEndLine
("Curry","Comment") -> (popContext) >> pEndLine
("Curry","Import") -> (popContext) >> pEndLine
("Curry","Char") -> pushContext ("Curry","CharSyntaxError") >> return ()
("Curry","CharEscape") -> (popContext) >> pEndLine
("Curry","CharEnd") -> (popContext) >> pEndLine
("Curry","CharSyntaxError") -> return ()
("Curry","String") -> pushContext ("Curry","StringSyntaxError") >> return ()
("Curry","StringEscape") -> pushContext ("Curry","StringGap") >> return ()
("Curry","StringGap") -> return ()
("Curry","StringSyntaxError") -> return ()
("Curry","Infix") -> return ()
_ -> return ()
else return ()
withAttribute attr txt = do
when (null txt) $ fail "Parser matched no text"
updateState $ \st -> st { synStPrevChar = last txt
, synStPrevNonspace = synStPrevNonspace st || not (all isSpace txt) }
return (attr, txt)
list_keywords = Set.fromList $ words $ "case data do else external fcase free if in infix infixl infixr let module of then type where"
list_Prelude_Func = Set.fromList $ words $ "and all any appendFile best break browse browseList chr concat concatMap const curry div done doSolve drop dropWhile either elem ensureNotFree ensureSpine enumFrom enumFromThen enumFromTo enumFromThenTo error failed filter findall flip foldl foldl1 foldr foldr1 fst getChar getLine id if_then_else iterate head length lines lookup map mapIO mapIO_ max maybe min mod negate not notElem null once or ord otherwise print putChar putStr putStrLn readFile repeat replicate return reverse seq sequenceIO sequenceIO_ show snd solveAll span splitAt success tail take takeWhile try uncurry unknown unlines unpack until unwords unzip unzip3 writeFile words zip zip3 zipWith zipWith3"
list_Prelude_Type = Set.fromList $ words $ "Bool Char Either Float Int IO Maybe Ordering String Success"
list_Prelude_Cons = Set.fromList $ words $ "False True Left Right Just Nothing EQ LT GT"
regex_import'5cs'2b'28qualified'29'3f = compileRegex True "import\\s+(qualified)?"
regex_0'28o'7cO'29'5b0'2d7'5d'2b = compileRegex True "0(o|O)[0-7]+"
regex_'28'3a'3a'7c'3a'3d'7c'3a'3e'7c'5c'2d'3e'7c'3c'5c'2d'7c'5c'2e'5c'2e'29 = compileRegex True "(::|:=|:>|\\->|<\\-|\\.\\.)"
regex_'5cs'2a'28'5ba'2dz'5d'5ba'2dzA'2dZ0'2d9'5f'27'5d'2a'7c'5c'28'5b'7e'21'40'23'5c'24'25'5c'5e'26'5c'2a'5c'2b'5c'2d'3d'3c'3e'5c'3f'5c'2e'2f'5c'7c'5c'5c'3a'5d'2b'5c'29'29'5cs'2a'28'2c'5cs'2a'28'5ba'2dz'5d'5ba'2dzA'2dZ0'2d9'5f'27'5d'2a'7c'5c'28'5b'7e'21'40'23'5c'24'25'5c'5e'26'5c'2a'5c'2b'5c'2d'3d'3c'3e'5c'3f'5c'2e'2f'5c'7c'5c'5c'3a'5d'2b'5c'29'29'29'2a'5cs'2a'28'3f'3d'3a'3a'5b'5e'7e'21'40'23'5c'24'25'5c'5e'26'5c'2a'5c'2b'5c'2d'3d'3c'3e'5c'3f'5c'2e'2f'5c'7c'5c'5c'3a'5d'29 = compileRegex True "\\s*([a-z][a-zA-Z0-9_']*|\\([~!@#\\$%\\^&\\*\\+\\-=<>\\?\\./\\|\\\\:]+\\))\\s*(,\\s*([a-z][a-zA-Z0-9_']*|\\([~!@#\\$%\\^&\\*\\+\\-=<>\\?\\./\\|\\\\:]+\\)))*\\s*(?=::[^~!@#\\$%\\^&\\*\\+\\-=<>\\?\\./\\|\\\\:])"
regex_'28'5bA'2dZ'5d'5ba'2dzA'2dZ0'2d9'5f'27'5d'2a'5c'2e'29'2a'5ba'2dz'5d'5ba'2dzA'2dZ0'2d9'5f'27'5d'2a = compileRegex True "([A-Z][a-zA-Z0-9_']*\\.)*[a-z][a-zA-Z0-9_']*"
regex_'28'5bA'2dZ'5d'5ba'2dzA'2dZ0'2d9'5f'27'5d'2a'5c'2e'29'2a'5b'7e'21'40'23'5c'24'25'5c'5e'26'5c'2a'5c'2b'5c'2d'3d'3c'3e'5c'3f'5c'2e'2f'5c'7c'5c'5c'3a'5d'2b = compileRegex True "([A-Z][a-zA-Z0-9_']*\\.)*[~!@#\\$%\\^&\\*\\+\\-=<>\\?\\./\\|\\\\:]+"
regex_'28'5bA'2dZ'5d'5ba'2dzA'2dZ0'2d9'5f'27'5d'2a'5c'2e'29'2a'5bA'2dZ'5d'5ba'2dzA'2dZ0'2d9'5f'27'5d'2a = compileRegex True "([A-Z][a-zA-Z0-9_']*\\.)*[A-Z][a-zA-Z0-9_']*"
regex_'5cS'2b = compileRegex True "\\S+"
regex_'5b'5e'27'5c'5c'5d = compileRegex True "[^'\\\\]"
regex_o'5b0'2d7'5d'2b = compileRegex True "o[0-7]+"
regex_'5b0'2d9'5d'2b = compileRegex True "[0-9]+"
regex_x'5b0'2d9a'2dfA'2dF'5d'2b = compileRegex True "x[0-9a-fA-F]+"
regex_'5c'5e'5bA'2dZ'40'5c'5b'5c'5c'5c'5d'5c'5e'5f'5d = compileRegex True "\\^[A-Z@\\[\\\\\\]\\^_]"
regex_NUL'7cSOH'7cSTX'7cETX'7cEOT'7cENQ'7cACK'7cBEL'7cBS'7cHT'7cLF'7cVT'7cFF'7cCR'7cSO'7cSI'7cDLE'7cDC1'7cDC2'7cDC3'7cDC4'7cNAK'7cSYN'7cETB'7cCAN'7cEM'7cSUB'7cESC'7cFS'7cGS'7cRS'7cUS'7cSP'7cDEL = compileRegex True "NUL|SOH|STX|ETX|EOT|ENQ|ACK|BEL|BS|HT|LF|VT|FF|CR|SO|SI|DLE|DC1|DC2|DC3|DC4|NAK|SYN|ETB|CAN|EM|SUB|ESC|FS|GS|RS|US|SP|DEL"
regex_'2e = compileRegex True "."
regex_'5b'5e'22'5c'5c'5d'2a = compileRegex True "[^\"\\\\]*"
parseRules ("Curry","Normal") =
(((pString False "{-#" >>= withAttribute OtherTok) >>~ pushContext ("Curry","Pragma"))
<|>
((pDetect2Chars False '{' '-' >>= withAttribute CommentTok) >>~ pushContext ("Curry","Multiline Comment"))
<|>
((pString False "---" >>= withAttribute CommentTok) >>~ pushContext ("Curry","Currydoc"))
<|>
((pDetect2Chars False '-' '-' >>= withAttribute CommentTok) >>~ pushContext ("Curry","Comment"))
<|>
((pKeyword " \n\t.():!+,-<=>%&*/;?[]^{|}~\\" list_keywords >>= withAttribute KeywordTok))
<|>
((pKeyword " \n\t.():!+,-<=>%&*/;?[]^{|}~\\" list_Prelude_Func >>= withAttribute FunctionTok))
<|>
((pKeyword " \n\t.():!+,-<=>%&*/;?[]^{|}~\\" list_Prelude_Type >>= withAttribute DataTypeTok))
<|>
((pKeyword " \n\t.():!+,-<=>%&*/;?[]^{|}~\\" list_Prelude_Cons >>= withAttribute KeywordTok))
<|>
((pRegExpr regex_import'5cs'2b'28qualified'29'3f >>= withAttribute KeywordTok) >>~ pushContext ("Curry","Import"))
<|>
((pFloat >>= withAttribute FloatTok))
<|>
((pRegExpr regex_0'28o'7cO'29'5b0'2d7'5d'2b >>= withAttribute BaseNTok))
<|>
((pHlCHex >>= withAttribute BaseNTok))
<|>
((pInt >>= withAttribute DecValTok))
<|>
((pDetectChar False '\'' >>= withAttribute CharTok) >>~ pushContext ("Curry","Char"))
<|>
((pDetectChar False '"' >>= withAttribute StringTok) >>~ pushContext ("Curry","String"))
<|>
((pRegExpr regex_'28'3a'3a'7c'3a'3d'7c'3a'3e'7c'5c'2d'3e'7c'3c'5c'2d'7c'5c'2e'5c'2e'29 >>= withAttribute OtherTok))
<|>
((pRegExpr regex_'5cs'2a'28'5ba'2dz'5d'5ba'2dzA'2dZ0'2d9'5f'27'5d'2a'7c'5c'28'5b'7e'21'40'23'5c'24'25'5c'5e'26'5c'2a'5c'2b'5c'2d'3d'3c'3e'5c'3f'5c'2e'2f'5c'7c'5c'5c'3a'5d'2b'5c'29'29'5cs'2a'28'2c'5cs'2a'28'5ba'2dz'5d'5ba'2dzA'2dZ0'2d9'5f'27'5d'2a'7c'5c'28'5b'7e'21'40'23'5c'24'25'5c'5e'26'5c'2a'5c'2b'5c'2d'3d'3c'3e'5c'3f'5c'2e'2f'5c'7c'5c'5c'3a'5d'2b'5c'29'29'29'2a'5cs'2a'28'3f'3d'3a'3a'5b'5e'7e'21'40'23'5c'24'25'5c'5e'26'5c'2a'5c'2b'5c'2d'3d'3c'3e'5c'3f'5c'2e'2f'5c'7c'5c'5c'3a'5d'29 >>= withAttribute OtherTok))
<|>
((pRegExpr regex_'28'5bA'2dZ'5d'5ba'2dzA'2dZ0'2d9'5f'27'5d'2a'5c'2e'29'2a'5ba'2dz'5d'5ba'2dzA'2dZ0'2d9'5f'27'5d'2a >>= withAttribute NormalTok))
<|>
((pRegExpr regex_'28'5bA'2dZ'5d'5ba'2dzA'2dZ0'2d9'5f'27'5d'2a'5c'2e'29'2a'5b'7e'21'40'23'5c'24'25'5c'5e'26'5c'2a'5c'2b'5c'2d'3d'3c'3e'5c'3f'5c'2e'2f'5c'7c'5c'5c'3a'5d'2b >>= withAttribute FunctionTok))
<|>
((pRegExpr regex_'28'5bA'2dZ'5d'5ba'2dzA'2dZ0'2d9'5f'27'5d'2a'5c'2e'29'2a'5bA'2dZ'5d'5ba'2dzA'2dZ0'2d9'5f'27'5d'2a >>= withAttribute DataTypeTok))
<|>
((pDetectChar False '`' >>= withAttribute OtherTok) >>~ pushContext ("Curry","Infix"))
<|>
((pDetectChar False '(' >>= withAttribute NormalTok))
<|>
((pDetectChar False ')' >>= withAttribute NormalTok))
<|>
((pDetectChar False '[' >>= withAttribute NormalTok))
<|>
((pDetectChar False ']' >>= withAttribute NormalTok))
<|>
((pDetectChar False '{' >>= withAttribute NormalTok))
<|>
((pDetectChar False '}' >>= withAttribute NormalTok))
<|>
(currentContext >>= \x -> guard (x == ("Curry","Normal")) >> pDefault >>= withAttribute NormalTok))
parseRules ("Curry","Pragma") =
(((pString False "#-}" >>= withAttribute OtherTok) >>~ (popContext))
<|>
(currentContext >>= \x -> guard (x == ("Curry","Pragma")) >> pDefault >>= withAttribute OtherTok))
parseRules ("Curry","Multiline Comment") =
(((pDetect2Chars False '-' '}' >>= withAttribute CommentTok) >>~ (popContext))
<|>
(currentContext >>= \x -> guard (x == ("Curry","Multiline Comment")) >> pDefault >>= withAttribute CommentTok))
parseRules ("Curry","Currydoc") =
(currentContext >>= \x -> guard (x == ("Curry","Currydoc")) >> pDefault >>= withAttribute CommentTok)
parseRules ("Curry","Comment") =
(currentContext >>= \x -> guard (x == ("Curry","Comment")) >> pDefault >>= withAttribute CommentTok)
parseRules ("Curry","Import") =
(((pString False "{-#" >>= withAttribute OtherTok) >>~ pushContext ("Curry","Pragma"))
<|>
((pDetect2Chars False '{' '-' >>= withAttribute CommentTok) >>~ pushContext ("Curry","Multiline Comment"))
<|>
((pString False "---" >>= withAttribute CommentTok) >>~ pushContext ("Curry","Currydoc"))
<|>
((pDetect2Chars False '-' '-' >>= withAttribute CommentTok) >>~ pushContext ("Curry","Comment"))
<|>
((pRegExpr regex_'28'5bA'2dZ'5d'5ba'2dzA'2dZ0'2d9'5f'27'5d'2a'5c'2e'29'2a'5bA'2dZ'5d'5ba'2dzA'2dZ0'2d9'5f'27'5d'2a >>= withAttribute DataTypeTok))
<|>
((pDetect2Chars False 'a' 's' >>= withAttribute KeywordTok))
<|>
((pString False "hiding" >>= withAttribute KeywordTok))
<|>
((pDetectChar False '(' >>= withAttribute NormalTok) >>~ (popContext))
<|>
((pRegExpr regex_'5cS'2b >>= withAttribute ErrorTok))
<|>
(currentContext >>= \x -> guard (x == ("Curry","Import")) >> pDefault >>= withAttribute NormalTok))
parseRules ("Curry","Char") =
(((pDetectChar False '\'' >>= withAttribute ErrorTok) >>~ (popContext))
<|>
((pDetectChar False '\\' >>= withAttribute CharTok) >>~ pushContext ("Curry","CharEscape"))
<|>
((pRegExpr regex_'5b'5e'27'5c'5c'5d >>= withAttribute CharTok) >>~ pushContext ("Curry","CharEnd"))
<|>
(currentContext >>= \x -> guard (x == ("Curry","Char")) >> pDefault >>= withAttribute CharTok))
parseRules ("Curry","CharEscape") =
(((pAnyChar "abfnrtv\\\"'" >>= withAttribute CharTok) >>~ (popContext))
<|>
((pRegExpr regex_o'5b0'2d7'5d'2b >>= withAttribute CharTok) >>~ (popContext))
<|>
((pRegExpr regex_'5b0'2d9'5d'2b >>= withAttribute CharTok) >>~ (popContext))
<|>
((pRegExpr regex_x'5b0'2d9a'2dfA'2dF'5d'2b >>= withAttribute CharTok) >>~ (popContext))
<|>
((pRegExpr regex_'5c'5e'5bA'2dZ'40'5c'5b'5c'5c'5c'5d'5c'5e'5f'5d >>= withAttribute CharTok) >>~ (popContext))
<|>
((pRegExpr regex_NUL'7cSOH'7cSTX'7cETX'7cEOT'7cENQ'7cACK'7cBEL'7cBS'7cHT'7cLF'7cVT'7cFF'7cCR'7cSO'7cSI'7cDLE'7cDC1'7cDC2'7cDC3'7cDC4'7cNAK'7cSYN'7cETB'7cCAN'7cEM'7cSUB'7cESC'7cFS'7cGS'7cRS'7cUS'7cSP'7cDEL >>= withAttribute CharTok) >>~ (popContext))
<|>
((pRegExpr regex_'2e >>= withAttribute ErrorTok) >>~ (popContext))
<|>
(currentContext >>= \x -> guard (x == ("Curry","CharEscape")) >> pDefault >>= withAttribute CharTok))
parseRules ("Curry","CharEnd") =
(((pDetectChar False '\'' >>= withAttribute CharTok) >>~ (popContext >> popContext))
<|>
((pRegExpr regex_'2e >>= withAttribute ErrorTok))
<|>
(currentContext >>= \x -> guard (x == ("Curry","CharEnd")) >> pDefault >>= withAttribute CharTok))
parseRules ("Curry","CharSyntaxError") =
(((pDetectChar False '\'' >>= withAttribute ErrorTok) >>~ (popContext >> popContext))
<|>
(currentContext >>= \x -> guard (x == ("Curry","CharSyntaxError")) >> pDefault >>= withAttribute ErrorTok))
parseRules ("Curry","String") =
(((pDetectChar False '"' >>= withAttribute StringTok) >>~ (popContext))
<|>
((pDetectChar False '\\' >>= withAttribute CharTok) >>~ pushContext ("Curry","StringEscape"))
<|>
((pRegExpr regex_'5b'5e'22'5c'5c'5d'2a >>= withAttribute StringTok))
<|>
(currentContext >>= \x -> guard (x == ("Curry","String")) >> pDefault >>= withAttribute StringTok))
parseRules ("Curry","StringEscape") =
(((pAnyChar "abfnrtv\\\"'&" >>= withAttribute CharTok) >>~ (popContext))
<|>
((pRegExpr regex_o'5b0'2d7'5d'2b >>= withAttribute CharTok) >>~ (popContext))
<|>
((pRegExpr regex_'5b0'2d9'5d'2b >>= withAttribute CharTok) >>~ (popContext))
<|>
((pRegExpr regex_x'5b0'2d9a'2dfA'2dF'5d'2b >>= withAttribute CharTok) >>~ (popContext))
<|>
((pRegExpr regex_'5c'5e'5bA'2dZ'40'5c'5b'5c'5c'5c'5d'5c'5e'5f'5d >>= withAttribute CharTok) >>~ (popContext))
<|>
((pRegExpr regex_NUL'7cSOH'7cSTX'7cETX'7cEOT'7cENQ'7cACK'7cBEL'7cBS'7cHT'7cLF'7cVT'7cFF'7cCR'7cSO'7cSI'7cDLE'7cDC1'7cDC2'7cDC3'7cDC4'7cNAK'7cSYN'7cETB'7cCAN'7cEM'7cSUB'7cESC'7cFS'7cGS'7cRS'7cUS'7cSP'7cDEL >>= withAttribute CharTok) >>~ (popContext))
<|>
((pDetectSpaces >>= withAttribute CharTok) >>~ pushContext ("Curry","StringGap"))
<|>
((pRegExpr regex_'2e >>= withAttribute ErrorTok) >>~ (popContext))
<|>
(currentContext >>= \x -> guard (x == ("Curry","StringEscape")) >> pDefault >>= withAttribute StringTok))
parseRules ("Curry","StringGap") =
(((pDetectSpaces >>= withAttribute CharTok))
<|>
((pDetectChar False '\\' >>= withAttribute CharTok) >>~ (popContext >> popContext >> popContext))
<|>
((pDetectChar False '"' >>= withAttribute ErrorTok) >>~ (popContext >> popContext >> popContext))
<|>
((pRegExpr regex_'2e >>= withAttribute ErrorTok))
<|>
(currentContext >>= \x -> guard (x == ("Curry","StringGap")) >> pDefault >>= withAttribute StringTok))
parseRules ("Curry","StringSyntaxError") =
(((pDetectChar False '"' >>= withAttribute ErrorTok) >>~ (popContext >> popContext))
<|>
(currentContext >>= \x -> guard (x == ("Curry","StringSyntaxError")) >> pDefault >>= withAttribute ErrorTok))
parseRules ("Curry","Infix") =
(((pDetectChar False '`' >>= withAttribute OtherTok) >>~ (popContext))
<|>
(currentContext >>= \x -> guard (x == ("Curry","Infix")) >> pDefault >>= withAttribute OtherTok))
parseRules x = parseRules ("Curry","Normal") <|> fail ("Unknown context" ++ show x)
|
ambiata/highlighting-kate
|
Text/Highlighting/Kate/Syntax/Curry.hs
|
gpl-2.0
| 15,701 | 0 | 37 | 2,087 | 3,776 | 1,997 | 1,779 | 243 | 18 |
import Maybe(x)
|
Helium4Haskell/helium
|
test/parser/ImportListError.hs
|
gpl-3.0
| 16 | 0 | 5 | 2 | 9 | 5 | 4 | 1 | 0 |
module Y2015.R1A.C where
import Data.List
import Data.Maybe
import Data.String.Utils
import Debug.Trace
solve :: Problem -> Solution
parse :: [String] -> [Problem]
data Solution = Solution [Int]
deriving (Eq)
instance Show Solution
where show (Solution x) = '\n' : unlines (map show x)
data Problem = Problem { trees :: [(Int, Int)]
} deriving (Eq, Show)
parse (nline:rest) =
let n = read nline
mapCoords l = let (x:y:[]) = map read $ words l in (x,y) :: (Int, Int)
in Problem (map mapCoords (take n rest)) : parse (drop n rest)
parse [] = []
--solve p | trace (show p) False = undefined
solve (Problem trees) = Solution $ replicate 5 0
|
joranvar/GoogleCodeJam
|
Y2015/R1A/C.hs
|
gpl-3.0
| 710 | 0 | 16 | 186 | 300 | 160 | 140 | 19 | 1 |
-- ~/.xmonad/xmonad.hs - full xmonad configuration
import XMonad
import qualified XMonad.Config as DefConfig (def)
import XMonad.Hooks.DynamicLog
import XMonad.Hooks.ManageDocks
import XMonad.Layout
import XMonad.Layout.NoBorders ( noBorders, smartBorders )
import XMonad.Hooks.SetWMName
import XMonad.Util.Run(spawnPipe)
import XMonad.Util.EZConfig(additionalKeys)
import System.IO
main = do
xmproc <- spawnPipe "xmobar"
xmonad $ DefConfig.def
{ terminal = "urxvt"
, focusFollowsMouse = True
, clickJustFocuses = False
, borderWidth = 1
, modMask = mod4Mask
, workspaces = myworkspaces
, normalBorderColor = "#dddddd"
, focusedBorderColor = "#00dd00"
, manageHook = manageDocks <+> myManagers
<+> manageHook DefConfig.def
, layoutHook = avoidStruts
$ smartBorders
$ layoutHook DefConfig.def
--, layoutHook = avoidStruts $ layoutHook DefConfig.def
-- this must be done in this order, docksEventHook must be last
, handleEventHook = handleEventHook DefConfig.def <+> docksEventHook
, logHook = dynamicLogWithPP xmobarPP
{ ppOutput = hPutStrLn xmproc
, ppTitle = xmobarColor "lime" "" . shorten 20
, ppCurrent = xmobarColor "aquamarine" "" . wrap "[" "]"
, ppHiddenNoWindows = xmobarColor "slateblue" ""
, ppLayout = shorten 6
, ppVisible = wrap "(" ")"
, ppUrgent = xmobarColor "fuchsia" "gold"
}
, startupHook = setWMName "LG3D"
} `additionalKeys`
[ ((mod4Mask .|. shiftMask, xK_z) , spawn screenLock)
, ((controlMask, xK_q) , spawn gVim)
, ((controlMask, xK_Print) , spawn scrotWindow)
, ((0, xK_Print) , spawn scrotFullscreen)
, ((mod4Mask, xK_p) , spawn myDmenu)
, ((mod4Mask, xK_b) , sendMessage ToggleStruts)
]
--startup :: X ()
--startup = do
--setWMName "LG3D"
-- works better from .xinitrc
--spawn "stalonetray"
scrotWindow = "sleep 1; scrot -s ~/%Y-%m-%s-%T-screenshot.png"
scrotFullscreen = "scrot ~/%Y-%m-%d-%T-screenshot.png"
screenLock = "xscreensaver-command -lock"
gVim = "gvim"
-- change the Xmonad default font for dmenu
myDmenu = "dmenu_run -fn Dina:size=9"
myworkspaces = [ "code"
, "web"
, "media"
, "irc"
, "random"
, "mail"
, "docs"
, "music"
, "root"
]
myManagers = composeAll
[ className =? "Gimp" --> doShift "random"
, className =? "Firefox" --> doShift "web"
, (className =? "Firefox" <&&> resource =? "Dialog") --> doFloat
]
|
grochmal/rc
|
xmonad/xmonad.hs
|
gpl-3.0
| 2,995 | 0 | 15 | 1,057 | 559 | 329 | 230 | 61 | 1 |
{-# language TypeFamilies, ScopedTypeVariables, MultiParamTypeClasses #-}
{-# language FlexibleInstances, UndecidableInstances #-}
module Data.Array.Accelerate.Sparse.COOElem where
import Foreign.Storable (Storable(..))
import Foreign.Ptr
-- import qualified Data.Vector.Storable as VS
import qualified Data.Array.Accelerate as A
import Data.Array.Accelerate.Array.Sugar
import Data.Array.Accelerate.Array.Data
import Data.Array.Accelerate.Smart
import Data.Array.Accelerate.Product ( IsProduct(..), TupleIdx(..) )
import Data.Typeable
-- | Matrix element in COO form
newtype COOElem i e = CooE (i, i, e) deriving (Eq, Show)
getRow, getCol :: COOElem i e -> i
getRow (CooE (i, _, _)) = i
getCol (CooE (_, j, _)) = j
getElem :: COOElem i e -> e
getElem (CooE (_, _, e)) = e
-- | Lexicographic ordering of matrix elements, rows-first
instance (Ord i, Eq e) => Ord (COOElem i e) where
(CooE (i, j, _)) <= (CooE (i', j', _))
| i < i' = True
| i == i' && j <= j' = True
| otherwise = False
-- | COOElem must be Storable if we want to sort vectors of these via vector-algorithms
instance (Integral i, Storable i, Storable e) => Storable (COOElem i e) where
sizeOf _ = sizeOf (undefined :: e) + 2 * sizeOf (undefined :: i)
alignment _ = max (alignment (undefined :: e)) (alignment (undefined :: i))
peek p = do
let szint = sizeOf (undefined :: i)
i <- peekByteOff p 0
j <- peekByteOff p szint
let p' = castPtr p
e <- peekByteOff p' szint
return $ CooE (i, j, e)
poke p (CooE (i,j,e)) = do
let szint = sizeOf (undefined :: i)
pokeByteOff p 0 i
pokeByteOff p szint j
let p' = castPtr p
pokeByteOff p' szint e
-- * `accelerate`-related instances
type instance EltRepr (COOElem i a) = EltRepr (i, i, a)
-- | Elt
instance (ArrayElt (EltRepr e), Typeable (EltRepr e), ArrayElt (EltRepr i), Typeable (EltRepr i), Show i, Show e, Typeable i, Typeable e, Elt i, Elt e) => A.Elt (COOElem i e) where
fromElt (CooE (i, j, x)) = fromElt (i, j, x)
toElt c = let (i, j, x) = toElt c in CooE (i, j, x)
eltType (_ :: COOElem i a) = eltType (undefined :: (i, i, a))
-- | IsProduct
instance (Elt ix, Elt e) => IsProduct Elt (COOElem ix e) where
type ProdRepr (COOElem ix e) = ((((), ix), ix), e)
fromProd _ (CooE (i, j, x)) = ((((), i), j), x)
toProd _ ((((),i), j), x) = CooE (i,j,x)
prod cst _ = prod cst (undefined :: (i,i,e))
-- | Lift
instance (A.Lift A.Exp e, Elt (A.Plain e), A.Lift A.Exp i, Elt (A.Plain i)) => A.Lift A.Exp (COOElem i e) where
type Plain (COOElem i e) = COOElem (A.Plain i) (A.Plain e)
lift (CooE (i,j,x)) = Exp . Tuple $ NilTup `SnocTup` A.lift i `SnocTup` A.lift j `SnocTup` A.lift x
-- | Unlift
instance (A.Lift A.Exp e, Elt (A.Plain e), A.Lift A.Exp i, Elt (A.Plain i), Elt i, Elt e) => A.Unlift A.Exp (COOElem (A.Exp i) (A.Exp e)) where
unlift c = let ii = Exp $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Prj` c
jj = Exp $ SuccTupIdx ZeroTupIdx `Prj` c
xx = Exp $ ZeroTupIdx `Prj` c
in CooE (ii, jj, xx)
|
ocramz/sparse-linear-algebra
|
accelerate/src/Data/Array/Accelerate/Sparse/COOElem.hs
|
gpl-3.0
| 3,071 | 0 | 14 | 692 | 1,432 | 777 | 655 | -1 | -1 |
-----------------------------------------------------------------------------
--
-- Module : Data.DICOM.Object
-- Copyright : Copyright (c) DICOM Grid 2015
-- License : GPL-3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability :
--
-- | Types and smart constructors for DICOM objects and elements.
--
-----------------------------------------------------------------------------
{-# LANGUAGE PatternSynonyms #-}
module Data.DICOM.Object
( ElementContent(..)
, Element(..)
, SequenceItem(..)
, Sequence(..)
, Object(..)
, readObject
, readObjectFromFile
, writeObject
, writeObjectToFile
, element
, sq
, item
, object
, ae
, as
, cs
, da
, ds
, dt
, fl
, fd
, is
, lo
, lt
, ob
, ow
, pn
, sh
, sl
, ss
, st
, tm
, ui
, ul
, un
, us
, ut
) where
import Prelude hiding (LT)
import Data.Binary
import Data.Binary.Get
import Data.Binary.Put
import Data.Bool (bool)
import Data.Int (Int64)
import Data.Monoid (Monoid(..), (<>))
import Data.Foldable (traverse_)
import Data.List (sortBy)
import Data.Function (on)
import Data.Time.Clock (UTCTime)
import Data.Time.Format (formatTime,defaultTimeLocale)
import Data.DICOM.VL
import Data.DICOM.VR
import Data.DICOM.Tag
import Control.Monad (unless)
import Control.Applicative
import Text.Printf (printf)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy as BL
-- Magic constants
dicm :: B.ByteString
dicm = BC.pack "DICM"
-- Data types
data ElementContent
= BytesContent B.ByteString
| FragmentContent [B.ByteString]
| SequenceContent Sequence deriving Eq
instance Show ElementContent where
showsPrec p (BytesContent _) = showParen (p > 10) $ showString "BytesContent {..}"
showsPrec p (FragmentContent f) = showParen (p > 10) $ showString "FragmentContent { length = " . shows (length f) . showString " }"
showsPrec p (SequenceContent s) = showParen (p > 10) $ showString "SequenceContent " . showsPrec 11 s
data Element = Element
{ elementTag :: Tag
, elementVL :: VL
, elementVR :: VR
, elementContent :: ElementContent
} deriving (Show, Eq)
data SequenceItem = SequenceItem
{ sequenceItemLength :: Word32
, sequenceItemElements :: [Element]
} deriving (Show, Eq)
newtype Sequence = Sequence { runSequence :: [SequenceItem] } deriving (Show, Eq)
newtype Object = Object { runObject :: [Element] } deriving (Show, Eq)
-- Serialization
instance Binary Element where
get = do
_tag <- get
_vr <- get
_vl <- if isVLReserved _vr then
(do skip 2
vl . fromIntegral <$> getWord32le)
else vl . fromIntegral <$> getWord16le
content <- case _vr of
SQ -> SequenceContent <$> readSequence _vl
_ -> case _vl of
UndefinedValueLength ->
case _tag of
PixelData -> FragmentContent <$> readFragmentData
_ -> failWithOffset "Undefined VL not implemented"
_ -> do
bytes <- getByteString $ fromIntegral $ runVL _vl
return $ BytesContent bytes
return $ Element _tag _vl _vr content
put el = do
put $ elementTag el
put $ elementVR el
if isVLReserved (elementVR el) then
(do putWord16le 0
putWord32le . fromIntegral . runVL $ elementVL el)
else putWord16le . fromIntegral . runVL $ elementVL el
case elementContent el of
SequenceContent s -> writeSequence (elementVL el) s
BytesContent bs -> putByteString bs
FragmentContent _ -> fail "Fragment content is not supported for writing."
readSequence :: VL -> Get Sequence
readSequence UndefinedValueLength = do
els <- untilG (isSequenceDelimitationItem <$> get) get
SequenceDelimitationItem <- get
skip 4
return $ Sequence els
readSequence _vl = Sequence <$> untilByteCount (fromIntegral $ runVL _vl) get
writeSequence :: VL -> Sequence -> Put
writeSequence _vl s = do
traverse_ put (runSequence s)
case _vl of
UndefinedValueLength -> do
put SequenceDelimitationItem
putWord32le 0
_ -> return ()
readFragmentData :: Get [B.ByteString]
readFragmentData = do
els <- untilG (isSequenceDelimitationItem <$> get) $ do
t <- get
case t of
Item -> do
itemLength <- getWord32le
getByteString $ fromIntegral $ itemLength
_ -> failWithOffset "Expected Item tag"
SequenceDelimitationItem <- get
skip 4
return els
instance Binary SequenceItem where
get = do
t <- get
case t of
Item -> do
itemLength <- getWord32le
case vl (fromIntegral itemLength) of
UndefinedValueLength -> do
els <- untilG (isItemDelimitationItem <$> get) get
ItemDelimitationItem <- get
skip 4
return $ SequenceItem itemLength els
_ -> do
els <- untilByteCount (fromIntegral itemLength) get
return $ SequenceItem itemLength els
_ -> failWithOffset "Expected Item tag"
put si = do
put Item
putWord32le $ sequenceItemLength si
traverse_ put $ sequenceItemElements si
case vl (fromIntegral (sequenceItemLength si)) of
UndefinedValueLength -> do
put ItemDelimitationItem
putWord32le 0
_ -> return ()
isItemDelimitationItem :: Tag -> Bool
isItemDelimitationItem ItemDelimitationItem = True
isItemDelimitationItem _ = False
isSequenceDelimitationItem :: Tag -> Bool
isSequenceDelimitationItem SequenceDelimitationItem = True
isSequenceDelimitationItem _ = False
untilG :: Get Bool -> Get a -> Get [a]
untilG more a = lookAhead more >>= bool ((:) <$> a <*> untilG more a) (pure [])
untilByteCount :: Int64 -> Get a -> Get [a]
untilByteCount count a = do
start <- bytesRead
flip untilG a $ do
end <- bytesRead
return (end - start >= count)
isVLReserved :: VR -> Bool
isVLReserved OB = True
isVLReserved OW = True
isVLReserved OF = True
isVLReserved SQ = True
isVLReserved UT = True
isVLReserved UN = True
isVLReserved _ = False
instance Binary Object where
get = do
skip 128
header <- getByteString 4
unless (header == dicm) $ failWithOffset "Invalid DICOM header"
Object <$> untilG isEmpty get
put obj = do
putByteString $ B.replicate 128 0
putByteString dicm
let fileMetaInfo = takeWhile ((== TagGroup 0x0002) . tagGroup . elementTag) $ runObject obj
groupLength = sum $ map (BL.length . encode) fileMetaInfo
put $ tag (TagGroup 0x0002) (TagElement 0x0000)
put UL
putWord16le 4
putWord32le $ fromIntegral groupLength
traverse_ put (runObject obj)
failWithOffset :: String -> Get a
failWithOffset msg = do
offset <- bytesRead
fail $ "Error at offset " ++ printf "%08x" offset ++ ": " ++ msg
readObject :: BL.ByteString -> Either String Object
readObject bs = case decodeOrFail bs of
Left (_ , _, e) -> Left e
Right (rest, _, _) | not (BL.null rest) -> Left "Unconsumed input"
Right (_ , _, a) -> Right a
writeObject :: Object -> BL.ByteString
writeObject = encode
-- File IO
readObjectFromFile :: FilePath -> IO (Either String Object)
readObjectFromFile path = readObject <$> BL.readFile path
writeObjectToFile :: FilePath -> Object -> IO ()
writeObjectToFile path = BL.writeFile path . writeObject
-- Smart constructors for DICOM objects
-- TODO: some of these constructors could benefit from better types
instance Monoid Object where
mempty = Object []
mappend (Object es1) (Object es2) = Object (sortBy (compare `on` elementTag) $ es1 ++ es2)
element :: VR -> Tag -> B.ByteString -> Element
element vr tg content = Element tg (vl $ fromIntegral count) vr (BytesContent padded)
where
(count, padded) = case B.length content of
len | len `mod` 2 == 0 -> (len, content)
| otherwise -> (len + 1, content <> BC.pack [padChar])
padChar | vr == UI = '\0'
| isStringVR vr = ' '
| otherwise = '\0'
-- String value representations
ae :: Tag -> String -> Element
ae t = element AE t . BC.pack
as :: Tag -> String -> Element
as t = element AS t . BC.pack
cs :: Tag -> String -> Element
cs t = element CS t . BC.pack
ds :: Tag -> B.ByteString -> Element
ds = element DS
fl :: Tag -> B.ByteString -> Element
fl = element FL
fd :: Tag -> B.ByteString -> Element
fd = element FD
is :: Tag -> Int -> Element
is t = element IS t . BC.pack . show
lo :: Tag -> String -> Element
lo t = element LO t . BC.pack
lt :: Tag -> String -> Element
lt t = element LT t . BC.pack
pn :: Tag -> String -> Element
pn t = element PN t . BC.pack
sh :: Tag -> String -> Element
sh t = element SH t . BC.pack
sl :: Tag -> B.ByteString -> Element
sl = element SL
ss :: Tag -> B.ByteString -> Element
ss = element SS
st :: Tag -> B.ByteString -> Element
st = element ST
ui :: Tag -> String -> Element
ui t = element UI t . BC.pack
ul :: Tag -> B.ByteString -> Element
ul = element UL
un :: Tag -> B.ByteString -> Element
un = element UN
us :: Tag -> B.ByteString -> Element
us = element US
ut :: Tag -> B.ByteString -> Element
ut = element UT
-- Binary value representations
ob :: Tag -> B.ByteString -> Element
ob = element OB
ow :: Tag -> B.ByteString -> Element
ow = element OW
-- Date/time value representations
da :: Tag -> UTCTime -> Element
da t = element DA t . BC.pack . formatTime defaultTimeLocale "%Y%m%d"
dt :: Tag -> UTCTime -> Element
dt t = element DT t . BC.pack . formatTime defaultTimeLocale "%Y%m%d%H%M%S.000000&0000"
tm :: Tag -> UTCTime -> Element
tm t = element TM t . BC.pack . formatTime defaultTimeLocale "%H%M%S.000000"
sq :: Tag -> [SequenceItem] -> Element
sq tg items = Element tg UndefinedValueLength SQ (SequenceContent (Sequence items))
item :: [Element] -> SequenceItem
item = SequenceItem (fromIntegral . runVL $ UndefinedValueLength) . sortBy (compare `on` elementTag)
object :: [Element] -> Object
object = Object . sortBy (compare `on` elementTag)
|
dicomgrid/dicom-haskell-library
|
src/Data/DICOM/Object.hs
|
gpl-3.0
| 10,089 | 0 | 21 | 2,428 | 3,283 | 1,679 | 1,604 | 282 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudPrivateCatalogProducer.Catalogs.Undelete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Undeletes a deleted Catalog and all resources under it.
--
-- /See:/ <https://cloud.google.com/private-catalog/ Cloud Private Catalog Producer API Reference> for @cloudprivatecatalogproducer.catalogs.undelete@.
module Network.Google.Resource.CloudPrivateCatalogProducer.Catalogs.Undelete
(
-- * REST Resource
CatalogsUndeleteResource
-- * Creating a Request
, catalogsUndelete
, CatalogsUndelete
-- * Request Lenses
, cuXgafv
, cuUploadProtocol
, cuAccessToken
, cuUploadType
, cuPayload
, cuName
, cuCallback
) where
import Network.Google.CloudPrivateCatalogProducer.Types
import Network.Google.Prelude
-- | A resource alias for @cloudprivatecatalogproducer.catalogs.undelete@ method which the
-- 'CatalogsUndelete' request conforms to.
type CatalogsUndeleteResource =
"v1beta1" :>
CaptureMode "name" "undelete" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
GoogleCloudPrivatecatalogproducerV1beta1UndeleteCatalogRequest
:>
Post '[JSON]
GoogleCloudPrivatecatalogproducerV1beta1Catalog
-- | Undeletes a deleted Catalog and all resources under it.
--
-- /See:/ 'catalogsUndelete' smart constructor.
data CatalogsUndelete =
CatalogsUndelete'
{ _cuXgafv :: !(Maybe Xgafv)
, _cuUploadProtocol :: !(Maybe Text)
, _cuAccessToken :: !(Maybe Text)
, _cuUploadType :: !(Maybe Text)
, _cuPayload :: !GoogleCloudPrivatecatalogproducerV1beta1UndeleteCatalogRequest
, _cuName :: !Text
, _cuCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CatalogsUndelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cuXgafv'
--
-- * 'cuUploadProtocol'
--
-- * 'cuAccessToken'
--
-- * 'cuUploadType'
--
-- * 'cuPayload'
--
-- * 'cuName'
--
-- * 'cuCallback'
catalogsUndelete
:: GoogleCloudPrivatecatalogproducerV1beta1UndeleteCatalogRequest -- ^ 'cuPayload'
-> Text -- ^ 'cuName'
-> CatalogsUndelete
catalogsUndelete pCuPayload_ pCuName_ =
CatalogsUndelete'
{ _cuXgafv = Nothing
, _cuUploadProtocol = Nothing
, _cuAccessToken = Nothing
, _cuUploadType = Nothing
, _cuPayload = pCuPayload_
, _cuName = pCuName_
, _cuCallback = Nothing
}
-- | V1 error format.
cuXgafv :: Lens' CatalogsUndelete (Maybe Xgafv)
cuXgafv = lens _cuXgafv (\ s a -> s{_cuXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
cuUploadProtocol :: Lens' CatalogsUndelete (Maybe Text)
cuUploadProtocol
= lens _cuUploadProtocol
(\ s a -> s{_cuUploadProtocol = a})
-- | OAuth access token.
cuAccessToken :: Lens' CatalogsUndelete (Maybe Text)
cuAccessToken
= lens _cuAccessToken
(\ s a -> s{_cuAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
cuUploadType :: Lens' CatalogsUndelete (Maybe Text)
cuUploadType
= lens _cuUploadType (\ s a -> s{_cuUploadType = a})
-- | Multipart request metadata.
cuPayload :: Lens' CatalogsUndelete GoogleCloudPrivatecatalogproducerV1beta1UndeleteCatalogRequest
cuPayload
= lens _cuPayload (\ s a -> s{_cuPayload = a})
-- | The resource name of the catalog.
cuName :: Lens' CatalogsUndelete Text
cuName = lens _cuName (\ s a -> s{_cuName = a})
-- | JSONP
cuCallback :: Lens' CatalogsUndelete (Maybe Text)
cuCallback
= lens _cuCallback (\ s a -> s{_cuCallback = a})
instance GoogleRequest CatalogsUndelete where
type Rs CatalogsUndelete =
GoogleCloudPrivatecatalogproducerV1beta1Catalog
type Scopes CatalogsUndelete =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient CatalogsUndelete'{..}
= go _cuName _cuXgafv _cuUploadProtocol
_cuAccessToken
_cuUploadType
_cuCallback
(Just AltJSON)
_cuPayload
cloudPrivateCatalogProducerService
where go
= buildClient
(Proxy :: Proxy CatalogsUndeleteResource)
mempty
|
brendanhay/gogol
|
gogol-cloudprivatecatalogproducer/gen/Network/Google/Resource/CloudPrivateCatalogProducer/Catalogs/Undelete.hs
|
mpl-2.0
| 5,260 | 0 | 16 | 1,234 | 776 | 452 | 324 | 115 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Container.Projects.Zones.Clusters.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists all clusters owned by a project in either the specified zone or
-- all zones.
--
-- /See:/ <https://cloud.google.com/container-engine/ Kubernetes Engine API Reference> for @container.projects.zones.clusters.list@.
module Network.Google.Resource.Container.Projects.Zones.Clusters.List
(
-- * REST Resource
ProjectsZonesClustersListResource
-- * Creating a Request
, projectsZonesClustersList
, ProjectsZonesClustersList
-- * Request Lenses
, proParent
, proXgafv
, proUploadProtocol
, proAccessToken
, proUploadType
, proZone
, proProjectId
, proCallback
) where
import Network.Google.Container.Types
import Network.Google.Prelude
-- | A resource alias for @container.projects.zones.clusters.list@ method which the
-- 'ProjectsZonesClustersList' request conforms to.
type ProjectsZonesClustersListResource =
"v1" :>
"projects" :>
Capture "projectId" Text :>
"zones" :>
Capture "zone" Text :>
"clusters" :>
QueryParam "parent" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListClustersResponse
-- | Lists all clusters owned by a project in either the specified zone or
-- all zones.
--
-- /See:/ 'projectsZonesClustersList' smart constructor.
data ProjectsZonesClustersList =
ProjectsZonesClustersList'
{ _proParent :: !(Maybe Text)
, _proXgafv :: !(Maybe Xgafv)
, _proUploadProtocol :: !(Maybe Text)
, _proAccessToken :: !(Maybe Text)
, _proUploadType :: !(Maybe Text)
, _proZone :: !Text
, _proProjectId :: !Text
, _proCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsZonesClustersList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'proParent'
--
-- * 'proXgafv'
--
-- * 'proUploadProtocol'
--
-- * 'proAccessToken'
--
-- * 'proUploadType'
--
-- * 'proZone'
--
-- * 'proProjectId'
--
-- * 'proCallback'
projectsZonesClustersList
:: Text -- ^ 'proZone'
-> Text -- ^ 'proProjectId'
-> ProjectsZonesClustersList
projectsZonesClustersList pProZone_ pProProjectId_ =
ProjectsZonesClustersList'
{ _proParent = Nothing
, _proXgafv = Nothing
, _proUploadProtocol = Nothing
, _proAccessToken = Nothing
, _proUploadType = Nothing
, _proZone = pProZone_
, _proProjectId = pProProjectId_
, _proCallback = Nothing
}
-- | The parent (project and location) where the clusters will be listed.
-- Specified in the format \`projects\/*\/locations\/*\`. Location \"-\"
-- matches all zones and all regions.
proParent :: Lens' ProjectsZonesClustersList (Maybe Text)
proParent
= lens _proParent (\ s a -> s{_proParent = a})
-- | V1 error format.
proXgafv :: Lens' ProjectsZonesClustersList (Maybe Xgafv)
proXgafv = lens _proXgafv (\ s a -> s{_proXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
proUploadProtocol :: Lens' ProjectsZonesClustersList (Maybe Text)
proUploadProtocol
= lens _proUploadProtocol
(\ s a -> s{_proUploadProtocol = a})
-- | OAuth access token.
proAccessToken :: Lens' ProjectsZonesClustersList (Maybe Text)
proAccessToken
= lens _proAccessToken
(\ s a -> s{_proAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
proUploadType :: Lens' ProjectsZonesClustersList (Maybe Text)
proUploadType
= lens _proUploadType
(\ s a -> s{_proUploadType = a})
-- | Deprecated. The name of the Google Compute Engine
-- [zone](https:\/\/cloud.google.com\/compute\/docs\/zones#available) in
-- which the cluster resides, or \"-\" for all zones. This field has been
-- deprecated and replaced by the parent field.
proZone :: Lens' ProjectsZonesClustersList Text
proZone = lens _proZone (\ s a -> s{_proZone = a})
-- | Deprecated. The Google Developers Console [project ID or project
-- number](https:\/\/support.google.com\/cloud\/answer\/6158840). This
-- field has been deprecated and replaced by the parent field.
proProjectId :: Lens' ProjectsZonesClustersList Text
proProjectId
= lens _proProjectId (\ s a -> s{_proProjectId = a})
-- | JSONP
proCallback :: Lens' ProjectsZonesClustersList (Maybe Text)
proCallback
= lens _proCallback (\ s a -> s{_proCallback = a})
instance GoogleRequest ProjectsZonesClustersList
where
type Rs ProjectsZonesClustersList =
ListClustersResponse
type Scopes ProjectsZonesClustersList =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsZonesClustersList'{..}
= go _proProjectId _proZone _proParent _proXgafv
_proUploadProtocol
_proAccessToken
_proUploadType
_proCallback
(Just AltJSON)
containerService
where go
= buildClient
(Proxy :: Proxy ProjectsZonesClustersListResource)
mempty
|
brendanhay/gogol
|
gogol-container/gen/Network/Google/Resource/Container/Projects/Zones/Clusters/List.hs
|
mpl-2.0
| 6,171 | 0 | 20 | 1,434 | 872 | 510 | 362 | 125 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Directory.Users.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- create user.
--
-- /See:/ <https://developers.google.com/admin-sdk/directory/ Admin Directory API Reference> for @directory.users.insert@.
module Network.Google.Resource.Directory.Users.Insert
(
-- * REST Resource
UsersInsertResource
-- * Creating a Request
, usersInsert
, UsersInsert
-- * Request Lenses
, uiPayload
) where
import Network.Google.Directory.Types
import Network.Google.Prelude
-- | A resource alias for @directory.users.insert@ method which the
-- 'UsersInsert' request conforms to.
type UsersInsertResource =
"admin" :>
"directory" :>
"v1" :>
"users" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] User :> Post '[JSON] User
-- | create user.
--
-- /See:/ 'usersInsert' smart constructor.
newtype UsersInsert = UsersInsert'
{ _uiPayload :: User
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'UsersInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'uiPayload'
usersInsert
:: User -- ^ 'uiPayload'
-> UsersInsert
usersInsert pUiPayload_ =
UsersInsert'
{ _uiPayload = pUiPayload_
}
-- | Multipart request metadata.
uiPayload :: Lens' UsersInsert User
uiPayload
= lens _uiPayload (\ s a -> s{_uiPayload = a})
instance GoogleRequest UsersInsert where
type Rs UsersInsert = User
type Scopes UsersInsert =
'["https://www.googleapis.com/auth/admin.directory.user"]
requestClient UsersInsert'{..}
= go (Just AltJSON) _uiPayload directoryService
where go
= buildClient (Proxy :: Proxy UsersInsertResource)
mempty
|
rueshyna/gogol
|
gogol-admin-directory/gen/Network/Google/Resource/Directory/Users/Insert.hs
|
mpl-2.0
| 2,556 | 0 | 13 | 597 | 308 | 189 | 119 | 48 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AdExchangeBuyer2.Accounts.Clients.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets a client buyer with a given client account ID.
--
-- /See:/ <https://developers.google.com/authorized-buyers/apis/reference/rest/ Ad Exchange Buyer API II Reference> for @adexchangebuyer2.accounts.clients.get@.
module Network.Google.Resource.AdExchangeBuyer2.Accounts.Clients.Get
(
-- * REST Resource
AccountsClientsGetResource
-- * Creating a Request
, accountsClientsGet
, AccountsClientsGet
-- * Request Lenses
, acgcXgafv
, acgcUploadProtocol
, acgcAccessToken
, acgcUploadType
, acgcAccountId
, acgcClientAccountId
, acgcCallback
) where
import Network.Google.AdExchangeBuyer2.Types
import Network.Google.Prelude
-- | A resource alias for @adexchangebuyer2.accounts.clients.get@ method which the
-- 'AccountsClientsGet' request conforms to.
type AccountsClientsGetResource =
"v2beta1" :>
"accounts" :>
Capture "accountId" (Textual Int64) :>
"clients" :>
Capture "clientAccountId" (Textual Int64) :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Client
-- | Gets a client buyer with a given client account ID.
--
-- /See:/ 'accountsClientsGet' smart constructor.
data AccountsClientsGet =
AccountsClientsGet'
{ _acgcXgafv :: !(Maybe Xgafv)
, _acgcUploadProtocol :: !(Maybe Text)
, _acgcAccessToken :: !(Maybe Text)
, _acgcUploadType :: !(Maybe Text)
, _acgcAccountId :: !(Textual Int64)
, _acgcClientAccountId :: !(Textual Int64)
, _acgcCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccountsClientsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'acgcXgafv'
--
-- * 'acgcUploadProtocol'
--
-- * 'acgcAccessToken'
--
-- * 'acgcUploadType'
--
-- * 'acgcAccountId'
--
-- * 'acgcClientAccountId'
--
-- * 'acgcCallback'
accountsClientsGet
:: Int64 -- ^ 'acgcAccountId'
-> Int64 -- ^ 'acgcClientAccountId'
-> AccountsClientsGet
accountsClientsGet pAcgcAccountId_ pAcgcClientAccountId_ =
AccountsClientsGet'
{ _acgcXgafv = Nothing
, _acgcUploadProtocol = Nothing
, _acgcAccessToken = Nothing
, _acgcUploadType = Nothing
, _acgcAccountId = _Coerce # pAcgcAccountId_
, _acgcClientAccountId = _Coerce # pAcgcClientAccountId_
, _acgcCallback = Nothing
}
-- | V1 error format.
acgcXgafv :: Lens' AccountsClientsGet (Maybe Xgafv)
acgcXgafv
= lens _acgcXgafv (\ s a -> s{_acgcXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
acgcUploadProtocol :: Lens' AccountsClientsGet (Maybe Text)
acgcUploadProtocol
= lens _acgcUploadProtocol
(\ s a -> s{_acgcUploadProtocol = a})
-- | OAuth access token.
acgcAccessToken :: Lens' AccountsClientsGet (Maybe Text)
acgcAccessToken
= lens _acgcAccessToken
(\ s a -> s{_acgcAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
acgcUploadType :: Lens' AccountsClientsGet (Maybe Text)
acgcUploadType
= lens _acgcUploadType
(\ s a -> s{_acgcUploadType = a})
-- | Numerical account ID of the client\'s sponsor buyer. (required)
acgcAccountId :: Lens' AccountsClientsGet Int64
acgcAccountId
= lens _acgcAccountId
(\ s a -> s{_acgcAccountId = a})
. _Coerce
-- | Numerical account ID of the client buyer to retrieve. (required)
acgcClientAccountId :: Lens' AccountsClientsGet Int64
acgcClientAccountId
= lens _acgcClientAccountId
(\ s a -> s{_acgcClientAccountId = a})
. _Coerce
-- | JSONP
acgcCallback :: Lens' AccountsClientsGet (Maybe Text)
acgcCallback
= lens _acgcCallback (\ s a -> s{_acgcCallback = a})
instance GoogleRequest AccountsClientsGet where
type Rs AccountsClientsGet = Client
type Scopes AccountsClientsGet =
'["https://www.googleapis.com/auth/adexchange.buyer"]
requestClient AccountsClientsGet'{..}
= go _acgcAccountId _acgcClientAccountId _acgcXgafv
_acgcUploadProtocol
_acgcAccessToken
_acgcUploadType
_acgcCallback
(Just AltJSON)
adExchangeBuyer2Service
where go
= buildClient
(Proxy :: Proxy AccountsClientsGetResource)
mempty
|
brendanhay/gogol
|
gogol-adexchangebuyer2/gen/Network/Google/Resource/AdExchangeBuyer2/Accounts/Clients/Get.hs
|
mpl-2.0
| 5,403 | 0 | 18 | 1,236 | 818 | 473 | 345 | 119 | 1 |
-- Example of an international dialog box.
import Graphics.UI.Gtk
import Data.Char
import qualified Data.Text as T
import Control.Exception
import Control.Applicative
import Data.Text (Text)
main :: IO ()
main = do
initGUI
dia <- dialogNew
dialogAddButton dia stockYes ResponseYes
dialogAddButton dia stockNo ResponseNo
contain <- castToBox <$> dialogGetContentArea dia
theText <- labelNew (Nothing :: Maybe Text)
labelSetMarkup theText (T.pack arabic)
boxPackStart contain theText PackNatural 0
widgetShowAll dia
res <- dialogRun dia
case res of
ResponseNo -> yell
_ -> return ()
arabic :: String
arabic = markSpan [FontSize (SizePoint 36)] $
--"Is Haskell a "++markSpan [FontForeground "red"] "fantastic"++" language?"++
-- Do you find Haskell a fantastic language? (language has a grammatical
-- mistake in it)
map chr [0x647,0x644,32,0x62A,0x62C,0x62F,0x646,32]++
markSpan [FontForeground "red"]
(map chr [0x647,0x622,0x633,0x643,0x622,0x644])++
map chr [32,0x644,0x63A,0x62A,32,0x645,0x62F,0x647,0x634,0x62A,0x61F]
yell :: IO ()
yell = do
dia <- dialogNew
dialogAddButton dia stockOk ResponseOk
contain <- castToBox <$> dialogGetContentArea dia
msg <- labelNew (Just "This is not an option.")
boxPackStart contain msg PackNatural 0
widgetShow msg
dialogRun dia
return ()
|
juhp/gtk2hs
|
gtk/demo/unicode/Arabic.hs
|
lgpl-3.0
| 1,340 | 0 | 12 | 230 | 425 | 215 | 210 | 37 | 2 |
{-# LANGUAGE ScopedTypeVariables #-}
module CommonMain where
import Print
import Trie
import Data.List(intersperse)
import System.Environment(getArgs, getEnv)
import GeneralIO
import System.IO
import Dictionary
import Frontend
import Data.Char
import ErrM
-- import Monad
import Control.Exception (catch, IOException)
gfTypes :: Language a => a -> String
gfTypes l = "types." ++ name l ++ ".gf"
readDict :: Language a => a -> FilePath -> IO Dictionary
readDict l f =
do database <- parseDict l f
return $ unionDictionary (internDict l) database
readTrie :: Language a => a -> FilePath -> IO SATrie
readTrie l f = do d <- readDict l f
prInfo d
return $ trieDict d
uName :: Language a => a -> String
uName l = case name l of
[] -> []
(x:xs) -> toUpper x : xs
commonMain :: Language a => a -> IO ()
commonMain l = do
xx <- getArgs
lex <- catch (getEnv (env l)) (\(_::IOException) ->
do prErr $ "\n[" ++ (env l) ++ " is undefined, using \"./" ++ (dbaseName l) ++ "\".]\n"
return $ "./" ++ (dbaseName l))
case xx of
[] -> do prErr $ welcome l
t <- readTrie l lex
run (analysis t (composition l))
["-h"] -> help
["-s"] -> do prErr $ welcome l
putStrLn $ "\n[Synthesiser mode]\n"
putStrLn $ "Enter a " ++ (uName l) ++ " word in any form.\n"
putStrLn $ "If the word is not found, a [command] with [arguments].\n"
putStrLn $ "Type 'c' to list commands.\n"
putStrLn $ "Type 'q' to quit.\n"
theDictionary <- readDict l lex
trieDictL <- readTrie l lex
synthesiser l theDictionary trieDictL
["-i"] -> do prErr $ welcome l
putStrLn $ "\n[Inflection mode]\n"
putStrLn $ "Enter [command] [dictionary form].\n"
putStrLn $ "Type 'c' to list commands.\n"
putStrLn $ "Type 'q' to quit.\n"
infMode l
["-ib"] -> do prErr $ welcome l
imode l
_ ->
do theDictionary <- readDict l lex
case xx of
["-lex"] -> outputLex theDictionary
["-lex",file] -> do writeLex file theDictionary
prErr $ "Wrote full form lexicon: " ++ file
["-tables"] -> outputTables theDictionary
["-tables",file] -> do writeTables file theDictionary
prErr $ "Wrote tables: " ++ file
["-gf"] -> outputGF (gfTypes l) theDictionary
["-gf",file] -> do writeGF file (gfTypes l) theDictionary
prErr $ "Wrote GF source code: " ++ file
["-gfr"] -> outputGFRes (gfTypes l) theDictionary
["-gfr",file] -> do writeGFRes file (gfTypes l) theDictionary
prErr $ "Wrote GF resource: " ++ file
["-latex"] -> outputLatex theDictionary
["-latex",file] -> do writeLatex file theDictionary
prErr $ "Wrote LaTeX document: " ++ file
["-xml"] -> outputXML theDictionary
["-xml",file] -> do writeXML file theDictionary
prErr $ "Wrote XML source code: " ++ file
["-lexc"] -> outputLEXC theDictionary
["-lexc",file] -> do writeLEXC file theDictionary
prErr $ "Wrote LEXC source code: " ++ file
["-xfst"] -> outputXFST theDictionary
["-xfst",file] -> do writeXFST file theDictionary
prErr $ "Wrote XFST source code: " ++ file
["-sql"] -> outputSQL theDictionary
["-sql",file] -> do writeSQL file theDictionary
prErr $ "Wrote SQL source code: " ++ file
xs -> do prErr $ "Invalid parameter" ++ unwords xs
help
run :: (String -> [[String]]) -> IO ()
run f = interact $ unlines . analyze (f) . nWords
analyze :: (String -> [[String]]) -> [String] -> [String]
analyze _ [] = []
analyze f (s:ss)
= case (f s) of
[] -> ("[ <" ++ s ++ "> NONE]") : analyze f ss
xs -> ("[ <" ++ s ++ ">") : (prA xs ++ "]") : analyze f ss
where
prA xs = unlines [show n ++ ". " ++ s | (n,s) <- zip [1..] (map pr xs)]
pr [] = []
pr [x] = x
pr xs = "Composite: " ++ concat (intersperse " | " xs)
welcome :: Language a => a -> String
welcome l = unlines
[
"********************************************",
"* " ++ uName l ++ " Morphology" ++ (padding (uName l) 30) ++ "*",
"********************************************",
"* Functional Morphology v1.10 *",
"* (c) Markus Forsberg & Aarne Ranta 2004 *",
"* under GNU General Public License. *",
"********************************************",
""
]
where padding s n = replicate (max (n - length s) 0) ' '
prInfo :: Dictionary -> IO()
prInfo dict = prErr $ "Dictionary loaded: DF = " ++ show (size dict) ++ " and WF = " ++ show (sizeW dict) ++ ".\n"
help :: IO()
help = prErr help_text
help_text :: String
help_text = unlines ["",
" |---------------------------------------|",
" | Program parameters |",
" |---------------------------------------|",
" | -h | Display this message |",
" |---------------------------------------|",
" | <None> | Enter tagger mode |",
" |---------------------------------------|",
" | -s | Enter interactive |",
" | | synthesiser mode |",
" |---------------------------------------|",
" | -i | Enter inflection |",
" | | mode |",
" |---------------------------------------|",
" | -ib | Inflection batch |",
" | | mode |",
" |---------------------------------------|",
" | -lex [file] | Full form lexicon |",
" | -tables [file] | Tables |",
" | -gf [file] | GF top-level code |",
" | -gfr [file] | GF resource code |",
" | -latex [file] | LaTeX source code |",
" | -xml [file] | XML source code |",
" | -lexc [file] | LexC source code |",
" | -xfst [file] | XFST source code |",
" | -sql [file] | SQL source code |",
" |---------------------------------------|",
""
]
|
johnjcamilleri/maltese-functional-morphology
|
lib-1.1/CommonMain.hs
|
lgpl-3.0
| 6,389 | 132 | 16 | 2,142 | 1,806 | 912 | 894 | 144 | 24 |
-- Flatten a list
-- http://www.haskell.org/haskellwiki/99_questions/Solutions/7
-- http://stackoverflow.com/questions/5994051/is-there-a-function-to-flatten-a-nested-list-of-elements
-- http://alumni.media.mit.edu/~tpminka/PLE/haskell/haskell-sol.html
module Problem07 where
flatten :: [a] -> [b]
flatten [] = []
--flatten [x] = x
--flatten (h:t) = flatten h ++ flatten t
|
jstolarek/sandbox
|
haskell/99.problems/Problem07.hs
|
unlicense
| 386 | 0 | 6 | 47 | 37 | 24 | 13 | 3 | 1 |
module FreePalace.Messages.PalaceProtocol.ObfuscateSpec (spec) where
import qualified Data.ByteString.Lazy as LazyByteString
import Test.Hspec
import FreePalace.Messages.PalaceProtocol.Obfuscate
spec :: Spec
spec = do
describe "obfuscationKeys" $ do
it "contains the right bytes" $ do
let expectedBytes = [
0x37, 0xC5, 0x60, 0x72, 0xCD, 0xA5, 0x0B, 0x06, 0xD1, 0xF9, 0xCB, 0x3C, 0x57, 0x0C,
0x0D, 0x1B, 0x79, 0x1E, 0x2D, 0x72, 0xB4, 0x04, 0xB3, 0x60, 0xB2, 0x80, 0xDD, 0x89,
0xA0, 0xED, 0xF2, 0x84, 0x7C, 0xFA, 0xC4, 0x99, 0x9F, 0x27, 0xED, 0xDE, 0x6C, 0xDE,
0x1E, 0xB8, 0x48, 0x34, 0x64, 0x29, 0x76, 0xA5, 0x01, 0x86, 0x21, 0x0B, 0xBF, 0xB0,
0x99, 0xED, 0x20, 0x2D, 0x75, 0x42, 0x4C, 0x6A, 0xC0, 0x75, 0x7E, 0x89, 0x33, 0x00,
0x65, 0x82, 0x03, 0x04, 0x93, 0x3D, 0x57, 0x4A, 0xAA, 0x2F, 0xA5, 0xD5, 0x74, 0xBD,
0xD7, 0x31, 0xDA, 0x83, 0x96, 0xE2, 0x18, 0x59, 0xEE, 0x20, 0x53, 0x24, 0x3D, 0x88,
0xAB, 0x77, 0x4A, 0x99, 0xB9, 0x04, 0xE1, 0xD3, 0x9D, 0xD0, 0x89, 0xBF, 0x89, 0x44,
0x32, 0x64, 0xC7, 0x2E, 0x93, 0x33, 0xD5, 0x6B, 0x29, 0x95, 0xBB, 0x3B, 0x82, 0x96,
0x3D, 0x60, 0x26, 0xD0, 0x84, 0x5B, 0x45, 0x5A, 0x8B, 0x4C, 0x5A, 0x3C, 0x5B, 0x8A,
0x58, 0x94, 0xB8, 0x9B, 0x13, 0xDB, 0x6B, 0xBE, 0x1A, 0x0A, 0x60, 0x73, 0x74, 0x55,
0xE7, 0x59, 0xB0, 0xBA, 0x44, 0x76, 0xAC, 0xE2, 0x85, 0x16, 0xC4, 0x10, 0x53, 0x97,
0x38, 0xC0, 0xEE, 0xA8, 0x06, 0x42, 0xA9, 0x1B, 0xBD, 0xD2, 0x97, 0xE2, 0xDD, 0x25,
0xDA, 0xA5, 0x44, 0xC7, 0x20, 0x51, 0x3A, 0xFB, 0x92, 0x94, 0x17, 0x71, 0x2F, 0x02,
0x55, 0x60, 0x96, 0xB5, 0x26, 0x4C, 0x57, 0xF7, 0x8B, 0x46, 0x78, 0x19, 0xE2, 0xB6,
0x60, 0x8F, 0xD0, 0xF7, 0x94, 0x69, 0x04, 0x2D, 0xC5, 0x41, 0xD2, 0x40, 0x83, 0x89,
0x67, 0xC7, 0xEE, 0xF5, 0x5D, 0xA7, 0x89, 0x31, 0xD7, 0x92, 0x52, 0x57, 0xD8, 0xA7,
0x10, 0xF6, 0xD3, 0x14, 0x8A, 0x12, 0x76, 0x7F, 0x1A, 0xE2, 0x17, 0xD5, 0xB1, 0xFF,
0x7E, 0x82, 0xD8, 0x32, 0xB3, 0x89, 0x1D, 0xB2, 0x44, 0x6A, 0x78, 0xAF, 0x0F, 0x23,
0xF9, 0x64, 0x7A, 0x3B, 0x7A, 0x13, 0x21, 0x50, 0x86, 0x80, 0x07, 0x47, 0x98, 0xBC,
0x84, 0x24, 0x6E, 0xE0, 0xAB, 0x84, 0xDE, 0xDC, 0x01, 0x20, 0xED, 0x03, 0x02, 0x99,
0xA3, 0x2F, 0x9E, 0x89, 0x67, 0x03, 0x69, 0xC5, 0xA5, 0x2E, 0x12, 0xB1, 0x54, 0x5F,
0xD3, 0x36, 0xF9, 0x80, 0xDC, 0x37, 0x5F, 0x23, 0x69, 0xB2, 0xCB, 0x39, 0x99, 0x7B,
0xE9, 0x59, 0x63, 0x28, 0x09, 0x05, 0xB6, 0x92, 0xAA, 0x45, 0x79, 0x54, 0xD4, 0xE3,
0x2D, 0x6B, 0x6C, 0x5A, 0xA7, 0x29, 0xE7, 0x6C, 0x66, 0x95, 0x40, 0xC9, 0x2D, 0xCE,
0x33, 0x39, 0x55, 0x66, 0xCA, 0x7E, 0xD1, 0x7A, 0xC0, 0x20, 0x40, 0x09, 0x4E, 0x29,
0xB3, 0xDE, 0x09, 0xCD, 0x99, 0xAE, 0xFB, 0x18, 0xAD, 0x73, 0xD5, 0x7E, 0xBA, 0x02,
0x5B, 0x8F, 0x5C, 0xA3, 0xF9, 0xA9, 0x2B, 0xF2, 0x63, 0xDE, 0x98, 0x2F, 0x26, 0x99,
0x28, 0xCA, 0xBE, 0xEF, 0x77, 0x72, 0xAE, 0xC8, 0x5D, 0x29, 0x72, 0x6C, 0x2A, 0x18,
0x3B, 0x8B, 0xA1, 0x15, 0xAC, 0x1E, 0x5D, 0x09, 0x48, 0x7D, 0xDD, 0x2C, 0xAB, 0x8A,
0x07, 0xE4, 0x4A, 0xA4, 0x6B, 0x96, 0x11, 0x88, 0xCD, 0x9C, 0x30, 0x15, 0xCF, 0x55,
0x12, 0xF1, 0x82, 0x4C, 0x04, 0x24, 0xEC, 0xA8, 0xAE, 0xD2, 0x1C, 0x2E, 0x1C, 0x12,
0x42, 0x9A, 0xE8, 0x20, 0x64, 0x5C, 0xBF, 0xDE, 0xE8, 0xF4, 0xE5, 0x4C, 0x77, 0xE4,
0x51, 0xB0, 0x26, 0xDA, 0xA7, 0x86, 0xF3, 0x10, 0xB4, 0x86, 0xE8, 0x0E, 0x79, 0x91,
0x4E, 0xDE, 0x75, 0x7B, 0xD5, 0xD1, 0xD7, 0xAA, 0x16, 0xBD, 0x5B, 0x03, 0xA5, 0x31,
0x38, 0x93, 0x33, 0x66, 0xF2, 0x0E, 0x6F, 0x10, 0xA8, 0xD8, 0x35, 0x95, 0x50, 0x2D,
0xA1, 0x3F, 0xFD, 0x06, 0x09, 0x90, 0x86, 0xCC
]
obfuscationKeys `shouldBe` expectedBytes
describe "illuminate" $ do
it "produces the right plain text for a string of length 1" $ do
let encodedText = [ 0x6F ]
plaintext = "X"
illuminate encodedText `shouldBe` plaintext
it "produces the right plain text for a string of length 2" $ do
let encodedText = [ 0x90, 0x04 ]
plaintext = "13"
illuminate encodedText `shouldBe` plaintext
it "produces the right plain text for a longer string" $ do
let encodedText = [
0x59, 0x18, 0x9B, 0x94, 0x15, 0xEE, 0x7B, 0x78, 0x8C, 0x66, 0x23, 0xFB, 0x82, 0x47,
0x3E, 0x08, 0xE5, 0xAA, 0x71, 0xAE, 0x2A, 0x1F, 0x3A, 0xA7, 0x17, 0xC0, 0x6E, 0x48,
0x19, 0x97, 0xED, 0xB6, 0xCB, 0x6D, 0xB2, 0x11, 0x02, 0x0C, 0x6C, 0x7D, 0x36, 0xC1,
0x0B, 0xDB, 0x19
]
plaintext = "The quick brown fox jumped over the lazy dog."
illuminate encodedText `shouldBe` plaintext
describe "illuminateRecursive" $ do
it "produces the right bytes for a string of length 2" $ do
let encodedText = [ 0x90, 0x04 ]
reversed = reverse encodedText
initialPartiallyObfuscatedByte = 0
illuminated = illuminateRecursive obfuscationKeys initialPartiallyObfuscatedByte [] reversed
illuminated `shouldBe` [ 0x31, 0x33 ]
describe "obfuscate" $ do
it "produces the right obfuscated bytes for a string of length 1" $ do
let plaintext = "X"
encodedText = LazyByteString.pack [0x6F]
obfuscate plaintext `shouldBe` encodedText
it "produces the right obfuscated bytes for a string of length 2" $ do
let plaintext = "13"
encodedText = LazyByteString.pack [ 0x90, 0x04 ]
obfuscate plaintext `shouldBe` encodedText
it "produces the right obfuscated bytes for a longer string" $ do
let plaintext = "The quick brown fox jumped over the lazy dog."
encodedText = LazyByteString.pack [
0x59, 0x18, 0x9B, 0x94, 0x15, 0xEE, 0x7B, 0x78, 0x8C, 0x66, 0x23, 0xFB, 0x82, 0x47,
0x3E, 0x08, 0xE5, 0xAA, 0x71, 0xAE, 0x2A, 0x1F, 0x3A, 0xA7, 0x17, 0xC0, 0x6E, 0x48,
0x19, 0x97, 0xED, 0xB6, 0xCB, 0x6D, 0xB2, 0x11, 0x02, 0x0C, 0x6C, 0x7D, 0x36, 0xC1,
0x0B, 0xDB, 0x19
]
obfuscate plaintext `shouldBe` encodedText
|
psfblair/freepalace
|
spec/FreePalace/Messages/PalaceProtocol/ObfuscateSpec.hs
|
apache-2.0
| 6,232 | 0 | 17 | 1,694 | 2,236 | 1,419 | 817 | 88 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
-- * Relating initial and final representations
module TTIF where
import TTF as F hiding (main) -- The final embedding
-- * Initial embedding of our language, in HOAS
-- * (superset of the running example in Xi, Chen and Chen (POPL2003))
-- In the latter paper, the tagless interpretation of the
-- language was the motivation for GADT.
-- The initial embedding is not-extensible.
-- The Var operation is used only during evaluation.
-- Var corresponds to a CSP value in MetaOCaml or
-- HOASLift in Xi, Chen and Chen (POPL2003)
-- The parameter h is the `hypothetical environment':
-- h t is the type of an environment `cell' holding a value of the type t.
data IR h t where
INT :: Int -> IR h Int
BOOL :: Bool -> IR h Bool
Add :: IR h Int -> IR h Int -> IR h Int
Mul :: IR h Int -> IR h Int -> IR h Int
Leq :: IR h Int -> IR h Int -> IR h Bool
IF :: IR h Bool -> IR h t -> IR h t -> IR h t
Var :: h t -> IR h t
Lam :: (IR h t1 -> IR h t2) -> IR h (t1->t2)
App :: IR h (t1->t2) -> IR h t1 -> IR h t2
Fix :: (IR h t -> IR h t) -> IR h t
-- * Sample terms and their inferred types
-- Compared to the terms in the final embedding, the initial
-- embedding terms below look capitalized (which is what I did in Emacs)
ti1 = Add (INT 1) (INT 2)
-- ti1 :: IR h Int
-- * th1 :: (Symantics repr) => repr Int
ti2 = Lam (\x -> Add x x)
-- ti2 :: IR h (Int -> Int)
-- th2 :: (Symantics repr) => repr (Int -> Int)
ti3 = Lam (\x -> Add (App x (INT 1)) (INT 2))
-- ti3 :: IR h ((Int -> Int) -> Int)
-- th3 :: (Symantics repr) => repr ((Int -> Int) -> Int)
tipow = Lam (\x -> Fix (\self -> Lam (\n ->
IF (Leq n (INT 0)) (INT 1)
(Mul x (App self (Add n (INT (-1))))))))
-- tipow :: IR h (Int -> Int -> Int)
-- * tpow :: (Symantics repr, BoolSYM repr, MulSYM repr, FixSYM repr) =>
-- * repr (Int -> Int -> Int)
tipow7 = Lam (\x -> App (App tipow x) (INT 7))
tipow72 = App tipow7 (INT 2)
-- tipow72 :: IR h Int
-- tpow72 :: (Symantics repr, BoolSYM repr, MulSYM repr, FixSYM repr) =>
-- repr Int
-- * //
-- * Initial evaluator
-- It is total (modulo potential non-termination in fix)
-- No exceptions are to be raised, and no pattern-match failure
-- may occur. All pattern-matching is _syntactically_, patently complete.
-- We use the same R as in TTF.hs
evalI :: IR R t -> t
evalI (INT n) = n
evalI (BOOL n) = n
evalI (Add e1 e2) = evalI e1 + evalI e2
evalI (Mul e1 e2) = evalI e1 * evalI e2
evalI (Leq e1 e2) = evalI e1 <= evalI e2
evalI (IF be et ee) = if (evalI be) then evalI et else evalI ee
evalI (Var v) = unR v
evalI (Lam b) = \x -> evalI (b . Var . R $ x)
evalI (App e1 e2) = (evalI e1) (evalI e2)
evalI (Fix f) = evalI (f (Fix f))
ti1_eval = evalI ti1
-- 3
ti2_eval = evalI ti2
-- ti2_eval :: Int -> Int
-- ti2_eval is a function, can't be shown, but can be applied
ti2_eval' = ti2_eval 21
-- 42
ti3_eval = evalI ti3
-- ti3_eval :: (Int -> Int) -> Int
tipow72_eval = evalI tipow72
-- 128
-- * //
-- * Initial viewer (another evaluator)
-- The code essentially the same as the final one:
-- unS is replaced by viewI'
viewI' :: IR S t -> VarCounter -> String
viewI' (INT x) = const $ show x
viewI' (BOOL x) = const $ show x
viewI' (Add e1 e2) = \h ->
"(" ++ viewI' e1 h ++ "+" ++ viewI' e2 h ++ ")"
viewI' (Mul e1 e2) = \h ->
"(" ++ viewI' e1 h ++ "*" ++ viewI' e2 h ++ ")"
viewI' (Leq e1 e2) = \h ->
"(" ++ viewI' e1 h ++ "<=" ++ viewI' e2 h ++ ")"
viewI' (IF be et ee) = \h ->
unwords["(if", viewI' be h, "then", viewI' et h,
"else", viewI' ee h,")"]
viewI' (Var x) = unS x
viewI' (Lam e) = \h ->
let x = "x" ++ show h
in "(\\" ++ x ++ " -> " ++
viewI' (e (Var . S $ const x)) (succ h) ++ ")"
viewI' (App e1 e2) = \h ->
"(" ++ viewI' e1 h ++ " " ++ viewI' e2 h ++ ")"
viewI' (Fix e) = \h ->
let self = "self" ++ show h
in "(fix " ++ self ++ "." ++
viewI' (e (Var . S $ const self)) (succ h) ++ ")"
viewI e = viewI' e 0
ti1_view = viewI ti1
-- "(1+2)"
ti2_view = viewI ti2
-- "(\\x0 -> (x0+x0))"
ti3_view = viewI ti3
-- "(\\x0 -> ((x0 1)+2))"
tipow_view = viewI tipow
-- "(\\x0 -> (fix self1.(\\x2 ->
-- (if (x2<=0) then 1 else (x0*(self1 (x2+-1))) ))))"
-- * This is all very similar to the final approach
-- * The same guarantees
-- No pattern-matching failure, only closed terms can be evaluated
-- The reason is that GADT IR encodes all and only well-typed terms
-- of the object language.
-- * IR is not extensible though
-- Because both initial and final embedding encode all and only
-- well-typed object terms, one may wonder if the two embeddings
-- may be related.
-- * //
-- * Relating Initial and Final Tagless representations
instance Symantics (IR h) where
int = INT
add = Add
lam = Lam
app = App
instance MulSYM (IR h) where
mul = Mul
instance BoolSYM (IR h) where
bool = BOOL
leq = Leq
if_ = IF
instance FixSYM (IR h) where
fix = Fix
-- * Looks like the identity, doesn't it?
-- * //
-- * From Final to Initial
f2i :: IR h t -> IR h t
f2i = id
-- It does look like the identity
-- Conversion of sample terms
ti2' = f2i F.th2
-- Now we can evaluate it using different initial interpreters
ti2'_eval = evalI ti2'
-- ti2'_eval :: Int -> Int
ti2'_eval' = ti2'_eval 21
-- 42
ti2'_view = viewI ti2'
--"(\\x0 -> (x0+x0))"
-- The same for the power example
tipow72' = f2i F.tpow72
tipow72'_eval = evalI tipow72'
-- 128
tipow72'_view = viewI tipow72'
-- "((\\x0 -> (((\\x1 ->
-- (fix self2.(\\x3 -> (if (x3<=0) then 1 else (x1*(self2 (x3+-1))) ))))
-- x0) 7)) 2)"
-- * //
-- * From Initial to Final
i2f :: (Symantics repr, BoolSYM repr, MulSYM repr, FixSYM repr) =>
IR repr t -> repr t
i2f (INT x) = int x
i2f (BOOL x) = bool x
i2f (Add e1 e2) = add (i2f e1) (i2f e2)
i2f (Mul e1 e2) = mul (i2f e1) (i2f e2)
i2f (Leq e1 e2) = leq (i2f e1) (i2f e2)
i2f (IF be et ee) = if_ (i2f be) (i2f et) (i2f ee)
i2f (Var v) = v -- polymorphic lift
i2f (Lam e) = lam(\x -> i2f (e (Var x)))
i2f (App e1 e2) = app (i2f e1) (i2f e2)
i2f (Fix e) = fix(\x -> i2f (e (Var x)))
-- Convert sample terms
tf2' = i2f ti2
-- tf2' :: (Symantics repr, BoolSYM repr, MulSYM repr, FixSYM repr) =>
-- repr (Int -> Int)
-- Now we can evaluate it using different final interpreters
tf2'_eval = F.eval tf2'
-- tf2'_eval :: Int -> Int
tf2'_eval' = tf2'_eval 21
-- 42
tf2'_view = F.view tf2'
-- "(\\x0 -> (x0+x0))"
-- The same for the power term
tfpow72' = i2f tipow72
tfpow72'_eval = F.eval tfpow72'
-- 128
tfpow72'_view = F.view tfpow72'
-- the same as tipow72'_view
-- * //
-- * Verifying Initial -> Final -> Initial
-- * ifi :: IR (IR h) t -> IR h t
ifi ir = f2i . i2f $ ir
tipow72'_eval'' = evalI . ifi $ tipow72
-- 128
tipow72'_view'' = viewI . ifi $ tipow72
-- * Verifying Final -> Initial -> Final
fif fr = i2f . f2i $ fr
-- fif :: (Symantics repr, BoolSYM repr, MulSYM repr, FixSYM repr) =>
-- IR repr t -> repr t
tfpow72'_eval'' = F.eval . fif $ F.tpow72
-- 128
tfpow72'_view'' = F.view . fif $ F.tpow72
-- * Relation between initial and final is a bijection
main = do
print ti1_eval
print ti2_eval'
print tipow72_eval
print ti1_view
print ti2_view
print ti3_view
print tipow_view
print ti2'_eval'
print tipow72'_eval
print ti2'_view
print tipow72'_view
print tf2'_eval'
print tfpow72'_eval
print tf2'_view
print tfpow72'_view
print tipow72'_eval''
print tipow72'_view''
print tfpow72'_eval''
print tfpow72'_view''
|
egaburov/funstuff
|
Haskell/tytag/codes3/TTIF.hs
|
apache-2.0
| 7,773 | 2 | 24 | 2,051 | 2,284 | 1,170 | 1,114 | 134 | 2 |
{-# LANGUAGE BangPatterns #-}
{-| External data loader.
This module holds the external data loading, and thus is the only one
depending (via the specialized Text\/Rapi\/Luxi modules) on the actual
libraries implementing the low-level protocols.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.HTools.ExtLoader
( loadExternalData
, commonSuffix
, maybeSaveData
, queryAllMonDDCs
, pMonDData
) where
import Control.Monad
import Control.Exception
import Data.Maybe (isJust, fromJust, catMaybes)
import Network.Curl
import System.FilePath
import System.IO
import System.Time (getClockTime)
import Text.Printf (hPrintf)
import qualified Text.JSON as J
import qualified Data.Map as Map
import qualified Data.List as L
import qualified Ganeti.Constants as C
import qualified Ganeti.DataCollectors.CPUload as CPUload
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Backend.Luxi as Luxi
import qualified Ganeti.HTools.Backend.Rapi as Rapi
import qualified Ganeti.HTools.Backend.Simu as Simu
import qualified Ganeti.HTools.Backend.Text as Text
import qualified Ganeti.HTools.Backend.IAlloc as IAlloc
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Instance as Instance
import Ganeti.HTools.Loader (mergeData, checkData, ClusterData(..)
, commonSuffix, clearDynU)
import Ganeti.BasicTypes
import Ganeti.Cpu.Types
import Ganeti.DataCollectors.Types
import Ganeti.HTools.Types
import Ganeti.HTools.CLI
import Ganeti.JSON
import Ganeti.Logging (logWarning)
import Ganeti.Utils (sepSplit, tryRead, exitIfBad, exitWhen)
-- | Error beautifier.
wrapIO :: IO (Result a) -> IO (Result a)
wrapIO = handle (\e -> return . Bad . show $ (e::IOException))
-- | Parses a user-supplied utilisation string.
parseUtilisation :: String -> Result (String, DynUtil)
parseUtilisation line =
case sepSplit ' ' line of
[name, cpu, mem, dsk, net] ->
do
rcpu <- tryRead name cpu
rmem <- tryRead name mem
rdsk <- tryRead name dsk
rnet <- tryRead name net
let du = DynUtil { cpuWeight = rcpu, memWeight = rmem
, dskWeight = rdsk, netWeight = rnet }
return (name, du)
_ -> Bad $ "Cannot parse line " ++ line
-- | External tool data loader from a variety of sources.
loadExternalData :: Options
-> IO ClusterData
loadExternalData opts = do
let mhost = optMaster opts
lsock = optLuxi opts
tfile = optDataFile opts
simdata = optNodeSim opts
iallocsrc = optIAllocSrc opts
setRapi = mhost /= ""
setLuxi = isJust lsock
setSim = (not . null) simdata
setFile = isJust tfile
setIAllocSrc = isJust iallocsrc
allSet = filter id [setRapi, setLuxi, setFile]
exTags = case optExTags opts of
Nothing -> []
Just etl -> map (++ ":") etl
selInsts = optSelInst opts
exInsts = optExInst opts
exitWhen (length allSet > 1) "Only one of the rapi, luxi, and data\
\ files options should be given."
util_contents <- maybe (return "") readFile (optDynuFile opts)
util_data <- exitIfBad "can't parse utilisation data" .
mapM parseUtilisation $ lines util_contents
input_data <-
case () of
_ | setRapi -> wrapIO $ Rapi.loadData mhost
| setLuxi -> wrapIO . Luxi.loadData $ fromJust lsock
| setSim -> Simu.loadData simdata
| setFile -> wrapIO . Text.loadData $ fromJust tfile
| setIAllocSrc -> wrapIO . IAlloc.loadData $ fromJust iallocsrc
| otherwise -> return $ Bad "No backend selected! Exiting."
now <- getClockTime
let ignoreDynU = optIgnoreDynu opts
eff_u = if ignoreDynU then [] else util_data
ldresult = input_data >>= (if ignoreDynU then clearDynU else return)
>>= mergeData eff_u exTags selInsts exInsts now
cdata <- exitIfBad "failed to load data, aborting" ldresult
cdata' <- if optMonD opts then queryAllMonDDCs cdata opts else return cdata
let (fix_msgs, nl) = checkData (cdNodes cdata') (cdInstances cdata')
unless (optVerbose opts == 0) $ maybeShowWarnings fix_msgs
return cdata' {cdNodes = nl}
-- | Function to save the cluster data to a file.
maybeSaveData :: Maybe FilePath -- ^ The file prefix to save to
-> String -- ^ The suffix (extension) to add
-> String -- ^ Informational message
-> ClusterData -- ^ The cluster data
-> IO ()
maybeSaveData Nothing _ _ _ = return ()
maybeSaveData (Just path) ext msg cdata = do
let adata = Text.serializeCluster cdata
out_path = path <.> ext
writeFile out_path adata
hPrintf stderr "The cluster state %s has been written to file '%s'\n"
msg out_path
-- | Type describing a data collector basic information.
data DataCollector = DataCollector
{ dName :: String -- ^ Name of the data collector
, dCategory :: Maybe DCCategory -- ^ The name of the category
}
-- | The actual data types for MonD's Data Collectors.
data Report = CPUavgloadReport CPUavgload
-- | The list of Data Collectors used by hail and hbal.
collectors :: Options -> [DataCollector]
collectors opts =
if optIgnoreDynu opts
then []
else [ DataCollector CPUload.dcName CPUload.dcCategory ]
-- | MonDs Data parsed by a mock file. Representing (node name, list of reports
-- produced by MonDs Data Collectors).
type MonDData = (String, [DCReport])
-- | A map storing MonDs data.
type MapMonDData = Map.Map String [DCReport]
-- | Parse MonD data file contents.
pMonDData :: String -> Result [MonDData]
pMonDData input =
loadJSArray "Parsing MonD's answer" input >>=
mapM (pMonDN . J.fromJSObject)
-- | Parse a node's JSON record.
pMonDN :: JSRecord -> Result MonDData
pMonDN a = do
node <- tryFromObj "Parsing node's name" a "node"
reports <- tryFromObj "Parsing node's reports" a "reports"
return (node, reports)
-- | Query all MonDs for all Data Collector.
queryAllMonDDCs :: ClusterData -> Options -> IO ClusterData
queryAllMonDDCs cdata opts = do
map_mDD <-
case optMonDFile opts of
Nothing -> return Nothing
Just fp -> do
monDData_contents <- readFile fp
monDData <- exitIfBad "can't parse MonD data"
. pMonDData $ monDData_contents
return . Just $ Map.fromList monDData
let (ClusterData _ nl il _ _) = cdata
(nl', il') <- foldM (queryAllMonDs map_mDD) (nl, il) (collectors opts)
return $ cdata {cdNodes = nl', cdInstances = il'}
-- | Query all MonDs for a single Data Collector.
queryAllMonDs :: Maybe MapMonDData -> (Node.List, Instance.List)
-> DataCollector -> IO (Node.List, Instance.List)
queryAllMonDs m (nl, il) dc = do
elems <- mapM (queryAMonD m dc) (Container.elems nl)
let elems' = catMaybes elems
if length elems == length elems'
then
let il' = foldl updateUtilData il elems'
nl' = zip (Container.keys nl) elems'
in return (Container.fromList nl', il')
else do
logWarning $ "Didn't receive an answer by all MonDs, " ++ dName dc
++ "'s data will be ignored."
return (nl,il)
-- | Query a specified MonD for a Data Collector.
fromCurl :: DataCollector -> Node.Node -> IO (Maybe DCReport)
fromCurl dc node = do
(code, !body) <- curlGetString (prepareUrl dc node) []
case code of
CurlOK ->
case J.decodeStrict body :: J.Result DCReport of
J.Ok r -> return $ Just r
J.Error _ -> return Nothing
_ -> do
logWarning $ "Failed to contact node's " ++ Node.name node
++ " MonD for DC " ++ dName dc
return Nothing
-- | Return the data from correct combination of a Data Collector
-- and a DCReport.
mkReport :: DataCollector -> Maybe DCReport -> Maybe Report
mkReport dc dcr =
case dcr of
Nothing -> Nothing
Just dcr' ->
case () of
_ | CPUload.dcName == dName dc ->
case fromJVal (dcReportData dcr') :: Result CPUavgload of
Ok cav -> Just $ CPUavgloadReport cav
Bad _ -> Nothing
| otherwise -> Nothing
-- | Get data report for the specified Data Collector and Node from the map.
fromFile :: DataCollector -> Node.Node -> MapMonDData -> Maybe DCReport
fromFile dc node m =
let matchDCName dcr = dName dc == dcReportName dcr
in maybe Nothing (L.find matchDCName) $ Map.lookup (Node.name node) m
-- | Query a MonD for a single Data Collector.
queryAMonD :: Maybe MapMonDData -> DataCollector -> Node.Node
-> IO (Maybe Node.Node)
queryAMonD m dc node = do
dcReport <-
case m of
Nothing -> fromCurl dc node
Just m' -> return $ fromFile dc node m'
case mkReport dc dcReport of
Nothing -> return Nothing
Just report ->
case report of
CPUavgloadReport cav ->
let ct = cavCpuTotal cav
du = Node.utilLoad node
du' = du {cpuWeight = ct}
in return $ Just node {Node.utilLoad = du'}
-- | Update utilization data.
updateUtilData :: Instance.List -> Node.Node -> Instance.List
updateUtilData il node =
let ct = cpuWeight (Node.utilLoad node)
n_uCpu = Node.uCpu node
upd inst =
if Node.idx node == Instance.pNode inst
then
let i_vcpus = Instance.vcpus inst
i_util = ct / fromIntegral n_uCpu * fromIntegral i_vcpus
i_du = Instance.util inst
i_du' = i_du {cpuWeight = i_util}
in inst {Instance.util = i_du'}
else inst
in Container.map upd il
-- | Prepare url to query a single collector.
prepareUrl :: DataCollector -> Node.Node -> URLString
prepareUrl dc node =
Node.name node ++ ":" ++ show C.defaultMondPort ++ "/"
++ show C.mondLatestApiVersion ++ "/report/" ++
getDCCName (dCategory dc) ++ "/" ++ dName dc
-- | Get Category Name.
getDCCName :: Maybe DCCategory -> String
getDCCName dcc =
case dcc of
Nothing -> "default"
Just c -> getCategoryName c
|
apyrgio/snf-ganeti
|
src/Ganeti/HTools/ExtLoader.hs
|
bsd-2-clause
| 11,383 | 0 | 18 | 2,763 | 2,679 | 1,379 | 1,300 | 218 | 5 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE RankNTypes #-}
module Scratch where
import Control.Spoon.Prim (throws)
import Data.Fixable
import Unsafe.Coerce
import Data.Default
import Data.Wrapped
class Resolvable a where
type Resolution a
resolve :: a -> Resolution a
instance Resolvable (Wrapped a b) where
type Resolution (Wrapped a b) = Wrapped a b
resolve w = w
instance Resolvable a => Resolvable (t -> a) where
type Resolution (t -> a) = Resolution a
resolve w = resolve $ w undefined
class FixResolvable a where
type FixResolution a
fixResolve :: a -> FixResolution a
instance FixResolvable (Wrapped a b) where
type FixResolution (Wrapped a b) = Wrapped a b
fixResolve w = w
instance FixResolvable a => FixResolvable (Fix t -> a) where
type FixResolution (Fix t -> a) = FixResolution a
fixResolve w = fixResolve $ w Unfixed
u x = if throws x then 1 else 0
t1 :: Int -> Wrapped (Int -> Int) Int
t1 x = defWrap $ u x
t2 :: Int -> Int -> Wrapped (Int -> Int -> Int) Int
t2 x y = defWrap $ u x + u y
t3 :: Int -> Int -> Int -> Wrapped (Int -> Int -> Int -> Int) Int
t3 x y z = defWrap $ u x + u y + u z
-- This is essentially scratch for a function to convert from infixr to infixl
--
-- E :: a -> a
-- \f g x -> f g x
-- id
-- E :: (a -> a1 -> a) -> a -> a1 -> a1 -> a
-- \f g x y -> f (f g x) y
-- (.) =<< (.)
-- E :: (b -> a -> b) -> b -> a -> a -> a -> b
-- \f g x y z -> f (f (f g x) y) z
-- ap ((.) . (.) . (.)) ((.) =<< (.))
-- E :: (b -> a -> b) -> b -> a -> a -> a -> a -> b
-- \f g x y z w -> f (f (f (f g x) y) z) w
-- ap ((.) . (.) . (.) . (.)) (ap ((.) . (.) . (.)) ((.) =<< (.)))
-- E :: (b -> a -> b) -> b -> a -> a -> a -> a -> a -> b
-- \f g x y z w q -> f (f (f (f (f g x) y) z) w) q
-- ap ((.) . (.) . (.) . (.) . (.)) (ap ((.) . (.) . (.) . (.)) (ap ((.) . (.) . (.)) ((.) =<< (.))))
-- g `f` x
-- (g `f` x) `f` y
-- ((g `f` x) `f` y) `f` z
-- (((g `f` x) `f` y) `f` z)
-- `f`
-- / \
-- `f` z
-- / \
-- `f` y
-- / \
-- g x
|
michaeljklein/CPlug
|
test/Scratch.hs
|
bsd-3-clause
| 2,289 | 0 | 12 | 616 | 510 | 277 | 233 | 40 | 2 |
{-# LANGUAGE RecordWildCards #-}
module Main where
import qualified Data.Map as M
import Control.Monad.State
import CHR2.AST.Untyped
import CHR2.Parser
import CHR2.Compile
import CHR2.Target.PostgreSQL
import GHC.Environment
makeSql :: String -> IO ()
makeSql n = do
e@Env{..} <- execStateT (do {parse fn; initEnv}) emptyEnv{_name = n}
publishScript e
where
fn = n ++ ".chr"
main :: IO ()
main = do
args <- getFullArgs
mapM_ makeSql args
|
awto/chr2sql
|
CHR2.hs
|
bsd-3-clause
| 457 | 0 | 12 | 84 | 159 | 87 | 72 | 18 | 1 |
{-| Both the GHC and GHCJS implementations of `Dhall.Import.Manager.Manager`
export a `Dhall.Import.Manager.Manager` type suitable for use within the
"Dhall.Import" module
For the GHC implementation the `Dhall.Import.Manager.Manager` type is a real
`Network.HTTP.Client.Manager` from the @http-client@ package. For the GHCJS
implementation the `Dhall.Import.Manager.Manager` type is a synonym for
@`Data.Void.Void`@ since GHCJS does not use a
`Network.HTTP.Client.Manager` for HTTP requests.
-}
module Dhall.Import.Manager
( -- * Manager
Manager
, defaultNewManager
) where
{-| The GHCJS implementation does not require a `Network.HTTP.Client.Manager`
The purpose of this synonym is so that "Dhall.Import.Types" can import a
`Dhall.Import.Manager.Manager` type from "Dhall.Import.HTTP" that does the
correct thing for both the GHC and GHCJS implementations
-}
type Manager = ()
defaultNewManager :: IO Manager
defaultNewManager = pure ()
|
Gabriel439/Haskell-Dhall-Library
|
dhall/ghcjs-src/Dhall/Import/Manager.hs
|
bsd-3-clause
| 1,001 | 0 | 6 | 182 | 44 | 27 | 17 | 7 | 1 |
{-|
Module : Pentago.Data.Matrix
Description : Basic square matrix/array operations
Basic square matrix/array operations
-}
module Pentago.Data.Matrix(
Symmetry
, horizontalSymmetry
, verticalSymmetry
, transposeSymmetry
, rotate90Symmetry
, rotate180Symmetry
, rotate270Symmetry
, boundSymmetry
, MatrixSymmetry
, BoundedMatrixSymmetry
, horizontalMatrixSymmetry
, verticalMatrixSymmetry
, rotate90Matrix
, rotate270Matrix
, rotate90BoundedMatrix
, rotate270BoundedMatrix
, subarray
, insertSubarray) where
import Data.Array.IArray
-- |Function type containing symmetry operations on array indexes.
type Symmetry i = (i, i, Bool)
-> (i, i)
-> (i, i)
-- s
horizontalSymmetry :: (Integral i) => Symmetry i
horizontalSymmetry (_, cY, True) = fmap (2 * cY + 1 -)
horizontalSymmetry (_, cY, _) = fmap (2 * cY -)
-- sr
transposeSymmetry :: (Integral i) => Symmetry i
transposeSymmetry (cX, cY, _) (x, y) = (cX + (y - cY), cY + (x - cX))
-- sr^2
verticalSymmetry :: (Integral i) => Symmetry i
verticalSymmetry (cX, _, True) = fmap (2 * cX + 1 -)
verticalSymmetry (cX, _, _) = fmap (2 * cX -)
-- r^2
rotate180Symmetry :: (Integral i) => Symmetry i
rotate180Symmetry center = horizontalSymmetry center . verticalSymmetry center
-- r
rotate90Symmetry :: (Integral i) => Symmetry i
rotate90Symmetry (cX, cY, False) (x, y) = (cX + (y - cY),
cY - (x - cX))
rotate90Symmetry (cX, cY, True) (x, y) = (cX + (y - cY),
cY - (x - cX) + 1)
-- r^3
rotate270Symmetry :: (Integral i) => Symmetry i
rotate270Symmetry (cX, cY, False) (x, y) =
(cX - (y - cY), cY + (x - cX))
rotate270Symmetry (cX, cY, True) (x, y) =
(cX - (y - cY) + 1, cY + (x - cX))
-- |Perform symmetry operation inside given bounds
boundSymmetry :: (Integral i, Ix i)
=> ((i, i), (i, i)) -- ^bounds for symmetry operation
-> Symmetry i
-> Symmetry i
boundSymmetry operationBounds symmetry center pos =
if inRange operationBounds pos
then symmetry center pos
else pos
-- |Group symmetry operations on square matrix
type MatrixSymmetry a i e = a (i, i) e -> a (i,i) e
type BoundedMatrixSymmetry a i e = ((i, i), (i, i)) -> a (i, i) e -> a (i,i) e
matrixSymmetry :: (Ix i, Integral i, IArray a e)
=> Symmetry i -> MatrixSymmetry a i e
matrixSymmetry symmetry matrix = ixmap (bounds matrix) (symmetry center) matrix
where
((begX, begY), (endX, endY)) = bounds matrix
center = (div (begX + endX) 2, div (begY + endY) 2, even $ endY - begY + 1)
boundedMatrixSymmetry :: (Ix i, Integral i, IArray a e)
=> Symmetry i -> BoundedMatrixSymmetry a i e
boundedMatrixSymmetry symmetry symmetryBounds matrix =
ixmap (bounds matrix) mySymmetry matrix
where
((begX, begY), (endX, endY)) = symmetryBounds
center = (div (begX + endX) 2, div (begY + endY) 2, odd $ endY - begY)
mySymmetry pos =
if inRange symmetryBounds pos
then symmetry center pos
else pos
-- |Perform OY symmetry on a matrix
horizontalMatrixSymmetry :: (Ix i, Integral i, IArray a e)
=> MatrixSymmetry a i e
horizontalMatrixSymmetry = matrixSymmetry horizontalSymmetry
-- |Perform OX symmetry on a matrix
verticalMatrixSymmetry :: (Ix i, Integral i, IArray a e)
=> MatrixSymmetry a i e
verticalMatrixSymmetry = matrixSymmetry verticalSymmetry
-- |Perform left rotation on a matrix
rotate90Matrix :: (Ix i, Integral i, IArray a e) => MatrixSymmetry a i e
rotate90Matrix = matrixSymmetry rotate270Symmetry
-- |Perform right rotation on a matrix
rotate270Matrix :: (Ix i, Integral i, IArray a e) => MatrixSymmetry a i e
rotate270Matrix = matrixSymmetry rotate90Symmetry
rotate90BoundedMatrix :: (Ix i, Integral i, IArray a e)
=> BoundedMatrixSymmetry a i e
rotate90BoundedMatrix = boundedMatrixSymmetry rotate270Symmetry
rotate270BoundedMatrix :: (Ix i, Integral i, IArray a e) =>
BoundedMatrixSymmetry a i e
rotate270BoundedMatrix = boundedMatrixSymmetry rotate90Symmetry
-- |Get subarray bounded by indexes
subarray :: (Ix i, IArray a e) => (i, i) -> a i e -> a i e
subarray newBounds = ixmap newBounds id
-- |Insert subarray into array
insertSubarray :: (Ix i, IArray a e) => a i e -> a i e -> a i e
insertSubarray newSubarray mainArray = mainArray // assocs newSubarray
|
gregorias/Pentago
|
src/Pentago/Data/Matrix.hs
|
bsd-3-clause
| 4,241 | 0 | 10 | 847 | 1,505 | 834 | 671 | 88 | 2 |
import Math.NumberTheory.Primes.Testing
import Data.List
import Debug.Trace
val :: Maybe t -> t
val (Just x) = x
spiralPrimes :: Num a => t -> a
spiralPrimes n = lengths !! (val . findIndex (\(a,b) -> a/b < 0.1) $ (drop 2 ratios))
where ratios = scanl (\(a,b) c -> (if isPrime c then a+1 else a, b+1)) (0,1) $ diagonals
diagonals = scanl (+) 1 ([2,4..] >>= replicate 4)
lengths = (1 : (zip (repeat 4) (iterate (2+) 3) >>= uncurry replicate))
main :: IO ()
main = print $ spiralPrimes 60
|
JacksonGariety/euler.hs
|
058.hs
|
bsd-3-clause
| 508 | 0 | 14 | 113 | 276 | 150 | 126 | 12 | 2 |
module Main where
import qualified Data.Map.Strict as Map
import Data.Char
import Control.Applicative
import Codec.Picture
import Graphics.Rasterific
import Graphics.Rasterific.Texture
data LSystem symbol = LSystem (Map.Map symbol [symbol])
deriving (Show)
getRules :: LSystem sym -> Map.Map sym [sym]
getRules (LSystem rules) = rules
nextLSys :: Ord sym => LSystem sym -> [sym] -> [sym]
nextLSys (LSystem rules) = concatMap getSym
where getSym x = if Map.member x rules
then (Map.!) rules x
else [x]
algaeSys = LSystem (Map.fromList [('A', "AB"), ('B', "A")])
nextAlgae :: String -> String
nextAlgae = nextLSys algaeSys
pythagorasTreeSys = LSystem (Map.fromList [ ('1', "11")
, ('0', "1[0]0")
, ('[', "[")
, (']', "]")])
x = "█"
nextPythagoras :: String -> String
nextPythagoras = nextLSys pythagorasTreeSys
candorDustSys = LSystem (Map.fromList [('█', "█ █"), (' ', " ")])
nextCandor :: String -> String
nextCandor = nextLSys candorDustSys
data PlantDirections = X
| Forward
| TurnL
| TurnR
| Save
| Restore
deriving (Ord, Eq)
charToPlantDirection :: Char -> PlantDirections
charToPlantDirection 'X' = X
charToPlantDirection 'F' = Forward
charToPlantDirection '-' = TurnL
charToPlantDirection '+' = TurnR
charToPlantDirection '[' = Save
charToPlantDirection ']' = Restore
strToPlantDirections :: String -> [PlantDirections]
strToPlantDirections = map charToPlantDirection
plantDirectionToChar :: PlantDirections -> Char
plantDirectionToChar X = 'X'
plantDirectionToChar Forward = 'F'
plantDirectionToChar TurnL = '-'
plantDirectionToChar TurnR = '+'
plantDirectionToChar Save = '['
plantDirectionToChar Restore = ']'
plantDirectionsToStr :: [PlantDirections] -> String
plantDirectionsToStr = map plantDirectionToChar
instance Show PlantDirections where
show = pure . plantDirectionToChar
plantSys = LSystem (Map.fromList [ (X, strToPlantDirections "F-[[X]+X]+F[+FX]-X")
, (Forward, strToPlantDirections "FF") ])
nextPlantSys = nextLSys plantSys
main :: IO ()
main = putStrLn "hello world"
|
qwertzdarth/lsys
|
src/Main.hs
|
bsd-3-clause
| 2,364 | 0 | 10 | 630 | 634 | 350 | 284 | 61 | 2 |
module Domains.Euclidean where
--import Prelude hiding (return)
import Domains.Additive
-- Accessor-like functions for the rtn argument of divide
_quo :: t -> t -> t
_quo q r = q
_rem :: t -> t -> t
_rem q r = r
infixl 7 /! -- divide or fail
(/!) :: Euclidean a => a -> a -> a
(/!) = divideOrFail
class (Show a, Eq a, Additive a) => Euclidean a where
quo :: a -> a -> a
rem :: a -> a -> a
divide :: (a -> a -> b) -> a -> a -> b
gcd :: a -> a -> a
divideOrFail :: a -> a -> a
sign :: a -> a
quo = divide _quo
rem = divide _rem
divide rtn n d = rtn (quo n d) (Domains.Euclidean.rem n d)
divideOrFail n d = divide (\q r -> if r /= zero then error ("Fail: " ++ show n ++ " // by " ++ show d) else q) n d
instance Euclidean Int where
quo = Prelude.quot
rem = Prelude.rem
gcd = Prelude.gcd
sign = Prelude.signum
instance Euclidean Integer where
quo = Prelude.quot
rem = Prelude.rem
gcd = Prelude.gcd
sign = Prelude.signum
|
pmilne/algebra
|
src/Domains/Euclidean.hs
|
bsd-3-clause
| 1,212 | 0 | 16 | 498 | 400 | 215 | 185 | 30 | 1 |
{-# LANGUAGE LambdaCase #-}
module Ling.Fwd
( fwdP
, fwdProc
, fwdProc'
, expandFwd
) where
import Prelude hiding (pred)
import Ling.Norm
import Ling.Prelude
import Ling.Proc
import Ling.Session.Core
type MkFwd a = (Session -> Session) -> UsedNames -> a -> [Channel] -> Proc
fwdSplit :: ([Proc] -> Proc) -> [TraverseKind] -> MkFwd Sessions
fwdSplit fprocs ks redSession used (Sessions rss) cs
| null cs = ø
| null rss = toProc $ Order (zipWith3 splitK ks cs (repeat []))
| otherwise = Order pref `dotP` fprocs ps
-- These splits are independant, they are put in sequence because
-- splitting always commutes anyway.
where
cdss = zipWith subChanDecs (transpose (fwds (length cs) <$> rss)) cs
css = map _cdChan <$> cdss
ps = zipWith3 (\k -> fwdR k redSession used) ks rss (transpose css)
pref = zipWith3 splitK ks cs cdss
fwdIO :: MkFwd (RW, VarDec, Session)
fwdIO _ _ _ [] = ø
fwdIO redSession used (Write, arg, s) (c:d:es) = fwdIO redSession used (Read, arg, dual s) (d:c:es)
fwdIO redSession used (Read, arg, s) (c:ds) = recv `dotP` Prll sends `dotP` fwdP redSession used' s (c:ds)
where (x, used') = avoidUsed (arg^.argName) c used
vx = mkVar x
recv = Recv c (arg & argName .~ x)
sends = [ Send d Nothing vx | d <- ds ]
fwdIO _ _ _ _ = error "fwdIO: Not enough channels for this forwarder (or the session is not a sink)"
fwdArray :: TraverseKind -> MkFwd Sessions
fwdArray = \case
SeqK -> fwdSplit dotsP $ repeat SeqK
TenK -> fwdSplit mconcat $ TenK : repeat ParK
ParK -> fwdSplit mconcat $ ParK : TenK : repeat ParK
fwdR :: TraverseKind -> MkFwd RSession
fwdR k redSession used (s `Repl` r)
| litR1 `is` r = fwdP redSession used s
| otherwise = mkReplicate k r anonName . fwdP redSession used s
fwdP :: MkFwd Session
fwdP _ _ _ [] = ø
fwdP redSession used s0 cs
| endS `is` s0 = ø
| otherwise =
case redSession s0 of
Array k ss -> fwdArray k redSession used ss cs
IO p t s -> fwdIO redSession used (p, t, s) cs
TermS{} -> Act $ Ax s0 cs
fwdProc' :: (Session -> Session) -> Session -> [Channel] -> Proc
fwdProc' redSession s cs = fwdP redSession ø s cs
expandFwd :: Endom Proc
expandFwd = \case
Act (Ax s cs) -> fwdProc' id s cs
proc0 -> proc0
-- The session 'Fwd n session' is a par.
-- This function builds a process which first splits this par.
fwdProc :: Int -> Session -> Channel -> Proc
fwdProc n s c = splitK ParK c cs `dotP` fwdP id ø s (_cdChan <$> cs)
where
ss = oneS <$> fwds n s
cs = subChanDecs ss c
|
np/ling
|
Ling/Fwd.hs
|
bsd-3-clause
| 2,664 | 0 | 14 | 712 | 1,039 | 536 | 503 | 58 | 3 |
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 709
{-# LANGUAGE AutoDeriveTypeable #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.Trans.Except
-- Copyright : (C) 2013 Ross Paterson
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- This monad transformer extends a monad with the ability throw exceptions.
--
-- A sequence of actions terminates normally, producing a value,
-- only if none of the actions in the sequence throws an exception.
-- If one throws an exception, the rest of the sequence is skipped and
-- the composite action exits with that exception.
--
-- If the value of the exception is not required, the variant in
-- "Control.Monad.Trans.Maybe" may be used instead.
-----------------------------------------------------------------------------
module Control.Monad.Trans.Except (
-- * The Except monad
Except,
except,
runExcept,
mapExcept,
withExcept,
-- * The ExceptT monad transformer
ExceptT(ExceptT),
runExceptT,
mapExceptT,
withExceptT,
-- * Exception operations
throwE,
catchE,
-- * Lifting other operations
liftCallCC,
liftListen,
liftPass,
) where
import Control.Monad.IO.Class
import Control.Monad.Signatures
import Control.Monad.Trans.Class
import Data.Functor.Classes
import Data.Functor.Identity
import Control.Applicative
import Control.Monad
import Control.Monad.Fix
import Data.Foldable (Foldable(foldMap))
import Data.Monoid
import Data.Traversable (Traversable(traverse))
-- | The parameterizable exception monad.
--
-- Computations are either exceptions or normal values.
--
-- The 'return' function returns a normal value, while @>>=@ exits on
-- the first exception. For a variant that continies after an error
-- and collects all the errors, see 'Control.Applicative.Lift.Errors'.
type Except e = ExceptT e Identity
-- | Constructor for computations in the exception monad.
-- (The inverse of 'runExcept').
except :: Either e a -> Except e a
except m = ExceptT (Identity m)
-- | Extractor for computations in the exception monad.
-- (The inverse of 'except').
runExcept :: Except e a -> Either e a
runExcept (ExceptT m) = runIdentity m
-- | Map the unwrapped computation using the given function.
--
-- * @'runExcept' ('mapExcept' f m) = f ('runExcept' m)@
mapExcept :: (Either e a -> Either e' b)
-> Except e a
-> Except e' b
mapExcept f = mapExceptT (Identity . f . runIdentity)
-- | Transform any exceptions thrown by the computation using the given
-- function (a specialization of 'withExceptT').
withExcept :: (e -> e') -> Except e a -> Except e' a
withExcept = withExceptT
-- | A monad transformer that adds exceptions to other monads.
--
-- @ExceptT@ constructs a monad parameterized over two things:
--
-- * e - The exception type.
--
-- * m - The inner monad.
--
-- The 'return' function yields a computation that produces the given
-- value, while @>>=@ sequences two subcomputations, exiting on the
-- first exception.
newtype ExceptT e m a = ExceptT (m (Either e a))
instance (Eq e, Eq1 m, Eq a) => Eq (ExceptT e m a) where
ExceptT x == ExceptT y = eq1 x y
instance (Ord e, Ord1 m, Ord a) => Ord (ExceptT e m a) where
compare (ExceptT x) (ExceptT y) = compare1 x y
instance (Read e, Read1 m, Read a) => Read (ExceptT e m a) where
readsPrec = readsData $ readsUnary1 "ExceptT" ExceptT
instance (Show e, Show1 m, Show a) => Show (ExceptT e m a) where
showsPrec d (ExceptT m) = showsUnary1 "ExceptT" d m
instance (Eq e, Eq1 m) => Eq1 (ExceptT e m) where eq1 = (==)
instance (Ord e, Ord1 m) => Ord1 (ExceptT e m) where compare1 = compare
instance (Read e, Read1 m) => Read1 (ExceptT e m) where readsPrec1 = readsPrec
instance (Show e, Show1 m) => Show1 (ExceptT e m) where showsPrec1 = showsPrec
-- | The inverse of 'ExceptT'.
runExceptT :: ExceptT e m a -> m (Either e a)
runExceptT (ExceptT m) = m
-- | Map the unwrapped computation using the given function.
--
-- * @'runExceptT' ('mapExceptT' f m) = f ('runExceptT' m)@
mapExceptT :: (m (Either e a) -> n (Either e' b))
-> ExceptT e m a
-> ExceptT e' n b
mapExceptT f m = ExceptT $ f (runExceptT m)
-- | Transform any exceptions thrown by the computation using the
-- given function.
withExceptT :: (Functor m) => (e -> e') -> ExceptT e m a -> ExceptT e' m a
withExceptT f = mapExceptT $ fmap $ either (Left . f) Right
instance (Functor m) => Functor (ExceptT e m) where
fmap f = ExceptT . fmap (fmap f) . runExceptT
instance (Foldable f) => Foldable (ExceptT e f) where
foldMap f (ExceptT a) = foldMap (either (const mempty) f) a
instance (Traversable f) => Traversable (ExceptT e f) where
traverse f (ExceptT a) =
ExceptT <$> traverse (either (pure . Left) (fmap Right . f)) a
instance (Functor m, Monad m) => Applicative (ExceptT e m) where
pure a = ExceptT $ return (Right a)
ExceptT f <*> ExceptT v = ExceptT $ do
mf <- f
case mf of
Left e -> return (Left e)
Right k -> do
mv <- v
case mv of
Left e -> return (Left e)
Right x -> return (Right (k x))
instance (Functor m, Monad m, Monoid e) => Alternative (ExceptT e m) where
empty = mzero
(<|>) = mplus
instance (Monad m) => Monad (ExceptT e m) where
return a = ExceptT $ return (Right a)
m >>= k = ExceptT $ do
a <- runExceptT m
case a of
Left e -> return (Left e)
Right x -> runExceptT (k x)
fail = ExceptT . fail
instance (Monad m, Monoid e) => MonadPlus (ExceptT e m) where
mzero = ExceptT $ return (Left mempty)
ExceptT m `mplus` ExceptT n = ExceptT $ do
a <- m
case a of
Left e -> liftM (either (Left . mappend e) Right) n
Right x -> return (Right x)
instance (MonadFix m) => MonadFix (ExceptT e m) where
mfix f = ExceptT (mfix (runExceptT . f . either (const bomb) id))
where bomb = error "mfix (ExceptT): inner computation returned Left value"
instance MonadTrans (ExceptT e) where
lift = ExceptT . liftM Right
instance (MonadIO m) => MonadIO (ExceptT e m) where
liftIO = lift . liftIO
-- | Signal an exception value @e@.
--
-- * @'runExceptT' ('throwE' e) = 'return' ('Left' e)@
--
-- * @'throwE' e >>= m = 'throwE' e@
throwE :: (Monad m) => e -> ExceptT e m a
throwE = ExceptT . return . Left
-- | Handle an exception.
--
-- * @'catchE' h ('lift' m) = 'lift' m@
--
-- * @'catchE' h ('throwE' e) = h e@
catchE :: (Monad m) =>
ExceptT e m a -- ^ the inner computation
-> (e -> ExceptT e' m a) -- ^ a handler for exceptions in the inner
-- computation
-> ExceptT e' m a
m `catchE` h = ExceptT $ do
a <- runExceptT m
case a of
Left l -> runExceptT (h l)
Right r -> return (Right r)
-- | Lift a @callCC@ operation to the new monad.
liftCallCC :: CallCC m (Either e a) (Either e b) -> CallCC (ExceptT e m) a b
liftCallCC callCC f = ExceptT $
callCC $ \ c ->
runExceptT (f (\ a -> ExceptT $ c (Right a)))
-- | Lift a @listen@ operation to the new monad.
liftListen :: (Monad m) => Listen w m (Either e a) -> Listen w (ExceptT e m) a
liftListen listen = mapExceptT $ \ m -> do
(a, w) <- listen m
return $! fmap (\ r -> (r, w)) a
-- | Lift a @pass@ operation to the new monad.
liftPass :: (Monad m) => Pass w m (Either e a) -> Pass w (ExceptT e m) a
liftPass pass = mapExceptT $ \ m -> pass $ do
a <- m
return $! case a of
Left l -> (Left l, id)
Right (r, f) -> (Right r, f)
|
DavidAlphaFox/ghc
|
libraries/transformers/Control/Monad/Trans/Except.hs
|
bsd-3-clause
| 7,768 | 0 | 21 | 1,883 | 2,291 | 1,201 | 1,090 | 130 | 2 |
{-# LANGUAGE BangPatterns, CPP, PatternGuards #-}
#if __GLASGOW_HASKELL__ >= 702
{-# LANGUAGE Trustworthy #-}
#endif
module Data.HashMap.Strict
(
HashMap
, toList
, fromListWith
) where
import Data.Bits ((.&.), (.|.))
import qualified Data.List as L
import Data.Hashable (Hashable)
import Prelude hiding (map)
import qualified Data.HashMap.Array as A
import qualified Data.HashMap.Base as HM
import Data.HashMap.Base (HashMap(..), Hash, Leaf(..)
, empty, bitsPerSubkey, sparseIndex, mask, two, index, hash, bitmapIndexedOrFull
, collision, update16, toList
)
import Data.HashMap.Unsafe (runST)
import Control.DeepSeq
insertWith :: (Eq k, Hashable k) => (v -> v -> v) -> k -> v -> HashMap k v
-> HashMap k v
insertWith f k0 v0 m0 = go h0 k0 v0 0 m0
where
h0 = hash k0
go !h !k x !_ Empty = leaf h k x
go h k x s (Leaf hy l@(L ky y))
| hy == h = if ky == k
then leaf h k (f x y)
else x `seq` (collision h l (L k x))
| otherwise = x `seq` runST (two s h k x hy ky y)
go h k x s (BitmapIndexed b ary)
| b .&. m == 0 =
let ary' = A.insert ary i $! leaf h k x
in bitmapIndexedOrFull (b .|. m) ary'
| otherwise =
let st = A.index ary i
st' = go h k x (s+bitsPerSubkey) st
ary' = A.update ary i $! st'
in BitmapIndexed b ary'
where m = mask h s
i = sparseIndex b m
go h k x s (Full ary) =
let st = A.index ary i
st' = go h k x (s+bitsPerSubkey) st
ary' = update16 ary i $! st'
in Full ary'
where i = index h s
go h k x s t@(Collision hy v)
| h == hy = Collision h (updateOrSnocWith f k x v)
| otherwise = go h k x s $ BitmapIndexed (mask hy s) (A.singleton t)
{-# INLINABLE insertWith #-}
-- | In-place update version of insertWith
unsafeInsertWith :: (Eq k, Hashable k, NFData k, NFData v) => (v -> v -> v) -> k -> v -> HashMap k v
-> HashMap k v
unsafeInsertWith f k0 v0 m0 = rnf m0 `seq` runST (go h0 k0 v0 0 m0)
where
h0 = hash k0
go !h !k x !_ Empty = return $! leaf h k x
go h k x s (Leaf hy l@(L ky y))
| hy == h = if ky == k
then return $! leaf h k (f x y)
else do
return $! collision h l (L k x)
| otherwise = two s h k x hy ky y
go h k x s t@(BitmapIndexed b ary)
| b .&. m == 0 = do
let ary' = A.insert ary i $! leaf h k x
return $! bitmapIndexedOrFull (b .|. m) ary'
| otherwise = do
st <- A.indexM ary i
st' <- rnf x `seq` go h k x (s+bitsPerSubkey) st
A.unsafeUpdateM ary i st'
return t
where m = mask h s
i = sparseIndex b m
go h k x s t@(Full ary) = do
st <- A.indexM ary i
st' <- go h k x (s+bitsPerSubkey) st
A.unsafeUpdateM ary i st'
return t
where i = index h s
go h k x s t@(Collision hy v)
| h == hy = return $! Collision h (updateOrSnocWith f k x v)
| otherwise = go h k x s $ BitmapIndexed (mask hy s) (A.singleton t)
{-# INLINABLE unsafeInsertWith #-}
fromListWith :: (NFData k, NFData v, Eq k, Hashable k) => (v -> v -> v) -> [(k, v)] -> HashMap k v
fromListWith f = L.foldl' (\ !m (!k, !v) -> unsafeInsertWith f k v m) empty
{-# INLINE fromListWith #-}
updateOrSnocWith :: Eq k => (v -> v -> v) -> k -> v -> A.Array (Leaf k v)
-> A.Array (Leaf k v)
updateOrSnocWith f = updateOrSnocWithKey (const f)
{-# INLINABLE updateOrSnocWith #-}
updateOrSnocWithKey :: Eq k => (k -> v -> v -> v) -> k -> v -> A.Array (Leaf k v)
-> A.Array (Leaf k v)
updateOrSnocWithKey f k0 v0 ary0 = go k0 v0 ary0 0 (A.length ary0)
where
go !k v !ary !i !n
| i >= n = A.run $ do
-- Not found, append to the end.
mary <- A.new_ (n + 1)
A.copy ary 0 mary 0 n
let !l = v `seq` (L k v)
A.write mary n l
return mary
| otherwise = case A.index ary i of
(L kx y) | k == kx -> let !v' = f k v y in A.update ary i (L k v')
| otherwise -> go k v ary (i+1) n
{-# INLINABLE updateOrSnocWithKey #-}
leaf :: Hash -> k -> v -> HashMap k v
leaf h k !v = Leaf h (L k v)
{-# INLINE leaf #-}
|
pacak/cuddly-bassoon
|
unordered-containers-0.2.8.0/Data/HashMap/Strict.hs
|
bsd-3-clause
| 4,441 | 0 | 16 | 1,594 | 1,991 | 993 | 998 | 100 | 6 |
import Test.Hspec
import Test.QuickCheck
import Control.Exception
import Data.ByteString.Lazy.Char8
import qualified UnitTests as UT
import qualified IntegrationTests as IT
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "Integration" IT.spec
describe "Unit" UT.spec
|
sphaso/firebase-haskell-client
|
test/Spec.hs
|
bsd-3-clause
| 302 | 0 | 8 | 57 | 83 | 47 | 36 | 12 | 1 |
{-# LANGUAGE DeriveGeneric, DeriveDataTypeable #-}
-- | Types used while planning how to build everything in a project.
--
-- Primarily this is the 'ElaboratedInstallPlan'.
--
module Distribution.Client.ProjectPlanning.Types (
SolverInstallPlan,
-- * Elaborated install plan types
ElaboratedInstallPlan,
ElaboratedConfiguredPackage(..),
ElaboratedPlanPackage,
ElaboratedSharedConfig(..),
ElaboratedReadyPackage,
BuildStyle(..),
CabalFileText,
-- * Types used in executing an install plan
--TODO: [code cleanup] these types should live with execution, not with
-- plan definition. Need to better separate InstallPlan definition.
GenericBuildResult(..),
BuildResult,
BuildSuccess(..),
BuildFailure(..),
DocsResult(..),
TestsResult(..),
-- * Build targets
PackageTarget(..),
ComponentTarget(..),
SubComponentTarget(..),
-- * Setup script
SetupScriptStyle(..),
) where
import Distribution.Client.PackageHash
import Distribution.Client.Types
hiding ( BuildResult, BuildSuccess(..), BuildFailure(..)
, DocsResult(..), TestsResult(..) )
import Distribution.Client.InstallPlan
( GenericInstallPlan, SolverInstallPlan, GenericPlanPackage )
import Distribution.Client.ComponentDeps (ComponentDeps)
import Distribution.Package
hiding (InstalledPackageId, installedPackageId)
import Distribution.System
import qualified Distribution.PackageDescription as Cabal
import Distribution.InstalledPackageInfo (InstalledPackageInfo)
import Distribution.Simple.Compiler
import Distribution.Simple.Program.Db
import Distribution.ModuleName (ModuleName)
import Distribution.Simple.LocalBuildInfo (ComponentName(..))
import qualified Distribution.Simple.InstallDirs as InstallDirs
import Distribution.Simple.InstallDirs (PathTemplate)
import Distribution.Version
import Data.Map (Map)
import Data.Set (Set)
import qualified Data.ByteString.Lazy as LBS
import Distribution.Compat.Binary
import GHC.Generics (Generic)
import Data.Typeable (Typeable)
import Control.Exception
-- | The combination of an elaborated install plan plus a
-- 'ElaboratedSharedConfig' contains all the details necessary to be able
-- to execute the plan without having to make further policy decisions.
--
-- It does not include dynamic elements such as resources (such as http
-- connections).
--
type ElaboratedInstallPlan
= GenericInstallPlan InstalledPackageInfo
ElaboratedConfiguredPackage
BuildSuccess BuildFailure
type ElaboratedPlanPackage
= GenericPlanPackage InstalledPackageInfo
ElaboratedConfiguredPackage
BuildSuccess BuildFailure
--TODO: [code cleanup] decide if we really need this, there's not much in it, and in principle
-- even platform and compiler could be different if we're building things
-- like a server + client with ghc + ghcjs
data ElaboratedSharedConfig
= ElaboratedSharedConfig {
pkgConfigPlatform :: Platform,
pkgConfigCompiler :: Compiler, --TODO: [code cleanup] replace with CompilerInfo
-- | The programs that the compiler configured (e.g. for GHC, the progs
-- ghc & ghc-pkg). Once constructed, only the 'configuredPrograms' are
-- used.
pkgConfigCompilerProgs :: ProgramDb
}
deriving (Show, Generic)
--TODO: [code cleanup] no Eq instance
instance Binary ElaboratedSharedConfig
data ElaboratedConfiguredPackage
= ElaboratedConfiguredPackage {
pkgInstalledId :: InstalledPackageId,
pkgSourceId :: PackageId,
-- | TODO: [code cleanup] we don't need this, just a few bits from it:
-- build type, spec version
pkgDescription :: Cabal.PackageDescription,
-- | A total flag assignment for the package
pkgFlagAssignment :: Cabal.FlagAssignment,
-- | The original default flag assignment, used only for reporting.
pkgFlagDefaults :: Cabal.FlagAssignment,
-- | The exact dependencies (on other plan packages)
--
pkgDependencies :: ComponentDeps [ConfiguredId],
-- | Which optional stanzas (ie testsuites, benchmarks) can be built.
-- This means the solver produced a plan that has them available.
-- This doesn't necessary mean we build them by default.
pkgStanzasAvailable :: Set OptionalStanza,
-- | Which optional stanzas the user explicitly asked to enable or
-- to disable. This tells us which ones we build by default, and
-- helps with error messages when the user asks to build something
-- they explicitly disabled.
--
-- TODO: The 'Bool' here should be refined into an ADT with three
-- cases: NotRequested, ExplicitlyRequested and
-- ImplicitlyRequested. A stanza is explicitly requested if
-- the user asked, for this *specific* package, that the stanza
-- be enabled; it's implicitly requested if the user asked for
-- all global packages to have this stanza enabled. The
-- difference between an explicit and implicit request is
-- error reporting behavior: if a user asks for tests to be
-- enabled for a specific package that doesn't have any tests,
-- we should warn them about it, but we shouldn't complain
-- that a user enabled tests globally, and some local packages
-- just happen not to have any tests. (But perhaps we should
-- warn if ALL local packages don't have any tests.)
pkgStanzasRequested :: Map OptionalStanza Bool,
-- | Which optional stanzas (ie testsuites, benchmarks) will actually
-- be enabled during the package configure step.
pkgStanzasEnabled :: Set OptionalStanza,
-- | Where the package comes from, e.g. tarball, local dir etc. This
-- is not the same as where it may be unpacked to for the build.
pkgSourceLocation :: PackageLocation (Maybe FilePath),
-- | The hash of the source, e.g. the tarball. We don't have this for
-- local source dir packages.
pkgSourceHash :: Maybe PackageSourceHash,
--pkgSourceDir ? -- currently passed in later because they can use temp locations
--pkgBuildDir ? -- but could in principle still have it here, with optional instr to use temp loc
pkgBuildStyle :: BuildStyle,
pkgSetupPackageDBStack :: PackageDBStack,
pkgBuildPackageDBStack :: PackageDBStack,
pkgRegisterPackageDBStack :: PackageDBStack,
-- | The package contains a library and so must be registered
pkgRequiresRegistration :: Bool,
pkgDescriptionOverride :: Maybe CabalFileText,
pkgVanillaLib :: Bool,
pkgSharedLib :: Bool,
pkgDynExe :: Bool,
pkgGHCiLib :: Bool,
pkgProfLib :: Bool,
pkgProfExe :: Bool,
pkgProfLibDetail :: ProfDetailLevel,
pkgProfExeDetail :: ProfDetailLevel,
pkgCoverage :: Bool,
pkgOptimization :: OptimisationLevel,
pkgSplitObjs :: Bool,
pkgStripLibs :: Bool,
pkgStripExes :: Bool,
pkgDebugInfo :: DebugInfoLevel,
pkgProgramPaths :: Map String FilePath,
pkgProgramArgs :: Map String [String],
pkgProgramPathExtra :: [FilePath],
pkgConfigureScriptArgs :: [String],
pkgExtraLibDirs :: [FilePath],
pkgExtraFrameworkDirs :: [FilePath],
pkgExtraIncludeDirs :: [FilePath],
pkgProgPrefix :: Maybe PathTemplate,
pkgProgSuffix :: Maybe PathTemplate,
pkgInstallDirs :: InstallDirs.InstallDirs FilePath,
pkgHaddockHoogle :: Bool,
pkgHaddockHtml :: Bool,
pkgHaddockHtmlLocation :: Maybe String,
pkgHaddockExecutables :: Bool,
pkgHaddockTestSuites :: Bool,
pkgHaddockBenchmarks :: Bool,
pkgHaddockInternal :: Bool,
pkgHaddockCss :: Maybe FilePath,
pkgHaddockHscolour :: Bool,
pkgHaddockHscolourCss :: Maybe FilePath,
pkgHaddockContents :: Maybe PathTemplate,
-- Setup.hs related things:
-- | One of four modes for how we build and interact with the Setup.hs
-- script, based on whether it's a build-type Custom, with or without
-- explicit deps and the cabal spec version the .cabal file needs.
pkgSetupScriptStyle :: SetupScriptStyle,
-- | The version of the Cabal command line interface that we are using
-- for this package. This is typically the version of the Cabal lib
-- that the Setup.hs is built against.
pkgSetupScriptCliVersion :: Version,
-- Build time related:
pkgBuildTargets :: [ComponentTarget],
pkgReplTarget :: Maybe ComponentTarget,
pkgBuildHaddocks :: Bool
}
deriving (Eq, Show, Generic)
instance Binary ElaboratedConfiguredPackage
instance Package ElaboratedConfiguredPackage where
packageId = pkgSourceId
instance HasUnitId ElaboratedConfiguredPackage where
installedUnitId = pkgInstalledId
instance PackageFixedDeps ElaboratedConfiguredPackage where
depends = fmap (map installedPackageId) . pkgDependencies
-- | This is used in the install plan to indicate how the package will be
-- built.
--
data BuildStyle =
-- | The classic approach where the package is built, then the files
-- installed into some location and the result registered in a package db.
--
-- If the package came from a tarball then it's built in a temp dir and
-- the results discarded.
BuildAndInstall
-- | The package is built, but the files are not installed anywhere,
-- rather the build dir is kept and the package is registered inplace.
--
-- Such packages can still subsequently be installed.
--
-- Typically 'BuildAndInstall' packages will only depend on other
-- 'BuildAndInstall' style packages and not on 'BuildInplaceOnly' ones.
--
| BuildInplaceOnly
deriving (Eq, Show, Generic)
instance Binary BuildStyle
type CabalFileText = LBS.ByteString
type ElaboratedReadyPackage = GenericReadyPackage ElaboratedConfiguredPackage
--TODO: [code cleanup] this duplicates the InstalledPackageInfo quite a bit in an install plan
-- because the same ipkg is used by many packages. So the binary file will be big.
-- Could we keep just (ipkgid, deps) instead of the whole InstalledPackageInfo?
-- or transform to a shared form when serialising / deserialising
data GenericBuildResult ipkg iresult ifailure
= BuildFailure ifailure
| BuildSuccess (Maybe ipkg) iresult
deriving (Eq, Show, Generic)
instance (Binary ipkg, Binary iresult, Binary ifailure) =>
Binary (GenericBuildResult ipkg iresult ifailure)
type BuildResult = GenericBuildResult InstalledPackageInfo
BuildSuccess BuildFailure
data BuildSuccess = BuildOk DocsResult TestsResult
deriving (Eq, Show, Generic)
data DocsResult = DocsNotTried | DocsFailed | DocsOk
deriving (Eq, Show, Generic)
data TestsResult = TestsNotTried | TestsOk
deriving (Eq, Show, Generic)
data BuildFailure = PlanningFailed --TODO: [required eventually] not yet used
| DependentFailed PackageId
| DownloadFailed String --TODO: [required eventually] not yet used
| UnpackFailed String --TODO: [required eventually] not yet used
| ConfigureFailed String
| BuildFailed String
| TestsFailed String --TODO: [required eventually] not yet used
| InstallFailed String
deriving (Eq, Show, Typeable, Generic)
instance Exception BuildFailure
instance Binary BuildFailure
instance Binary BuildSuccess
instance Binary DocsResult
instance Binary TestsResult
---------------------------
-- Build targets
--
-- | The various targets within a package. This is more of a high level
-- specification than a elaborated prescription.
--
data PackageTarget =
-- | Build the default components in this package. This usually means
-- just the lib and exes, but it can also mean the testsuites and
-- benchmarks if the user explicitly requested them.
BuildDefaultComponents
-- | Build a specific component in this package.
| BuildSpecificComponent ComponentTarget
| ReplDefaultComponent
| ReplSpecificComponent ComponentTarget
| HaddockDefaultComponents
deriving (Eq, Show, Generic)
data ComponentTarget = ComponentTarget ComponentName SubComponentTarget
deriving (Eq, Show, Generic)
data SubComponentTarget = WholeComponent
| ModuleTarget ModuleName
| FileTarget FilePath
deriving (Eq, Show, Generic)
instance Binary PackageTarget
instance Binary ComponentTarget
instance Binary SubComponentTarget
---------------------------
-- Setup.hs script policy
--
-- | There are four major cases for Setup.hs handling:
--
-- 1. @build-type@ Custom with a @custom-setup@ section
-- 2. @build-type@ Custom without a @custom-setup@ section
-- 3. @build-type@ not Custom with @cabal-version > $our-cabal-version@
-- 4. @build-type@ not Custom with @cabal-version <= $our-cabal-version@
--
-- It's also worth noting that packages specifying @cabal-version: >= 1.23@
-- or later that have @build-type@ Custom will always have a @custom-setup@
-- section. Therefore in case 2, the specified @cabal-version@ will always be
-- less than 1.23.
--
-- In cases 1 and 2 we obviously have to build an external Setup.hs script,
-- while in case 4 we can use the internal library API. In case 3 we also have
-- to build an external Setup.hs script because the package needs a later
-- Cabal lib version than we can support internally.
--
data SetupScriptStyle = SetupCustomExplicitDeps
| SetupCustomImplicitDeps
| SetupNonCustomExternalLib
| SetupNonCustomInternalLib
deriving (Eq, Show, Generic)
instance Binary SetupScriptStyle
|
gbaz/cabal
|
cabal-install/Distribution/Client/ProjectPlanning/Types.hs
|
bsd-3-clause
| 14,636 | 0 | 11 | 3,838 | 1,536 | 968 | 568 | 185 | 0 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.ARB.ES31Compatibility
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.ARB.ES31Compatibility (
-- * Extension Support
glGetARBES31Compatibility,
gl_ARB_ES3_1_compatibility,
-- * Enums
pattern GL_BACK,
-- * Functions
glMemoryBarrierByRegion
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
|
haskell-opengl/OpenGLRaw
|
src/Graphics/GL/ARB/ES31Compatibility.hs
|
bsd-3-clause
| 728 | 0 | 5 | 101 | 57 | 43 | 14 | 9 | 0 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Text.PrettyPrint.ListLike (
UString, AString
) where
import Control.DeepSeq ( NFData )
import qualified Data.ListLike as LL
import Data.ListLike.UTF8 ()
import Data.String (IsString)
import Data.Text as Strict (Text)
import Data.Text.Lazy as Lazy (Text)
import Data.Text.Lazy.Builder (Builder)
class (Eq string,
IsString string,
LL.ListLike string Char,
LL.ListLikeIO string Char,
LL.StringLike string,
NFData string) =>
UString string
instance UString String
instance UString Builder
instance UString LL.Chars
-- | AString is an instance of UString to be used for internal
-- operations when we need to disambiguate an expression.
-- type AString = String -- -- UnitLargeDoc takes 5 to 6 seconds
type AString = LL.Chars
instance UString Strict.Text
instance UString Lazy.Text
|
ddssff/pretty-listlike
|
src/Text/PrettyPrint/ListLike.hs
|
bsd-3-clause
| 1,048 | 0 | 7 | 189 | 203 | 119 | 84 | -1 | -1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Create new a new project directory populated with a basic working
-- project.
module Stack.New
( new
, NewOpts(..)
, defaultTemplateName
, templateNameArgument
, getTemplates
, TemplateName
, listTemplates)
where
import Stack.Prelude
import Control.Monad.Trans.Writer.Strict
import Data.Aeson
import Data.Aeson.Types
import qualified Data.ByteString as SB
import qualified Data.ByteString.Lazy as LB
import Data.Conduit
import qualified Data.HashMap.Strict as HM
import Data.List
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Encoding.Error as T (lenientDecode)
import qualified Data.Text.IO as T
import qualified Data.Text.Lazy as LT
import Data.Time.Calendar
import Data.Time.Clock
import qualified Data.Yaml as Yaml
import Network.HTTP.Download
import Network.HTTP.Simple
import Path
import Path.IO
import Stack.Constants
import Stack.Constants.Config
import Stack.Types.Config
import Stack.Types.PackageName
import Stack.Types.TemplateName
import System.Process.Run
import Text.Hastache
import Text.Hastache.Context
import Text.Printf
import Text.ProjectTemplate
--------------------------------------------------------------------------------
-- Main project creation
-- | Options for creating a new project.
data NewOpts = NewOpts
{ newOptsProjectName :: PackageName
-- ^ Name of the project to create.
, newOptsCreateBare :: Bool
-- ^ Whether to create the project without a directory.
, newOptsTemplate :: Maybe TemplateName
-- ^ Name of the template to use.
, newOptsNonceParams :: Map Text Text
-- ^ Nonce parameters specified just for this invocation.
}
-- | Create a new project with the given options.
new :: HasConfig env => NewOpts -> Bool -> RIO env (Path Abs Dir)
new opts forceOverwrite = do
when (newOptsProjectName opts `elem` wiredInPackages) $
throwM $ Can'tUseWiredInName (newOptsProjectName opts)
pwd <- getCurrentDir
absDir <- if bare then return pwd
else do relDir <- parseRelDir (packageNameString project)
liftM (pwd </>) (return relDir)
exists <- doesDirExist absDir
configTemplate <- view $ configL.to configDefaultTemplate
let template = fromMaybe defaultTemplateName $ asum [ cliOptionTemplate
, configTemplate
]
if exists && not bare
then throwM (AlreadyExists absDir)
else do
templateText <- loadTemplate template (logUsing absDir template)
files <-
applyTemplate
project
template
(newOptsNonceParams opts)
absDir
templateText
when (not forceOverwrite && bare) $ checkForOverwrite (M.keys files)
writeTemplateFiles files
runTemplateInits absDir
return absDir
where
cliOptionTemplate = newOptsTemplate opts
project = newOptsProjectName opts
bare = newOptsCreateBare opts
logUsing absDir template templateFrom =
let loading = case templateFrom of
LocalTemp -> "Loading local"
RemoteTemp -> "Downloading"
in
logInfo
(loading <> " template \"" <> templateName template <>
"\" to create project \"" <>
packageNameText project <>
"\" in " <>
if bare then "the current directory"
else T.pack (toFilePath (dirname absDir)) <>
" ...")
data TemplateFrom = LocalTemp | RemoteTemp
-- | Download and read in a template's text content.
loadTemplate
:: forall env. HasConfig env
=> TemplateName
-> (TemplateFrom -> RIO env ())
-> RIO env Text
loadTemplate name logIt = do
templateDir <- view $ configL.to templatesDir
case templatePath name of
AbsPath absFile -> logIt LocalTemp >> loadLocalFile absFile
UrlPath s -> do
req <- parseRequest s
let rel = fromMaybe backupUrlRelPath (parseRelFile s)
downloadTemplate req (templateDir </> rel)
RelPath relFile ->
catch
(do f <- loadLocalFile relFile
logIt LocalTemp
return f)
(\(e :: NewException) ->
case relRequest relFile of
Just req -> downloadTemplate req
(templateDir </> relFile)
Nothing -> throwM e
)
where
loadLocalFile :: Path b File -> RIO env Text
loadLocalFile path = do
logDebug ("Opening local template: \"" <> T.pack (toFilePath path)
<> "\"")
exists <- doesFileExist path
if exists
then liftIO (fmap (T.decodeUtf8With T.lenientDecode) (SB.readFile (toFilePath path)))
else throwM (FailedToLoadTemplate name (toFilePath path))
relRequest :: MonadThrow n => Path Rel File -> n Request
relRequest rel = parseRequest (defaultTemplateUrl <> "/" <> toFilePath rel)
downloadTemplate :: Request -> Path Abs File -> RIO env Text
downloadTemplate req path = do
logIt RemoteTemp
_ <-
catch
(redownload req path)
(throwM . FailedToDownloadTemplate name)
loadLocalFile path
backupUrlRelPath = $(mkRelFile "downloaded.template.file.hsfiles")
-- | Apply and unpack a template into a directory.
applyTemplate
:: HasConfig env
=> PackageName
-> TemplateName
-> Map Text Text
-> Path Abs Dir
-> Text
-> RIO env (Map (Path Abs File) LB.ByteString)
applyTemplate project template nonceParams dir templateText = do
config <- view configL
currentYear <- do
now <- liftIO getCurrentTime
(year, _, _) <- return $ toGregorian . utctDay $ now
return $ T.pack . show $ year
let context = M.union (M.union nonceParams extraParams) configParams
where
nameAsVarId = T.replace "-" "_" $ packageNameText project
nameAsModule = T.filter (/= '-') $ T.toTitle $ packageNameText project
extraParams = M.fromList [ ("name", packageNameText project)
, ("name-as-varid", nameAsVarId)
, ("name-as-module", nameAsModule)
, ("year", currentYear) ]
configParams = configTemplateParams config
(applied,missingKeys) <-
runWriterT
(hastacheStr
defaultConfig { muEscapeFunc = id }
templateText
(mkStrContextM (contextFunction context)))
unless (S.null missingKeys)
(logInfo ("\n" <> T.pack (show (MissingParameters project template missingKeys (configUserConfigPath config))) <> "\n"))
files :: Map FilePath LB.ByteString <-
catch (execWriterT $
yield (T.encodeUtf8 (LT.toStrict applied)) $$
unpackTemplate receiveMem id
)
(\(e :: ProjectTemplateException) ->
throwM (InvalidTemplate template (show e)))
when (M.null files) $
throwM (InvalidTemplate template "Template does not contain any files")
let isPkgSpec f = ".cabal" `isSuffixOf` f || f == "package.yaml"
unless (any isPkgSpec . M.keys $ files) $
throwM (InvalidTemplate template "Template does not contain a .cabal \
\or package.yaml file")
liftM
M.fromList
(mapM
(\(fp,bytes) ->
do path <- parseRelFile fp
return (dir </> path, bytes))
(M.toList files))
where
-- | Does a lookup in the context and returns a moustache value,
-- on the side, writes out a set of keys that were requested but
-- not found.
contextFunction
:: Monad m
=> Map Text Text
-> String
-> WriterT (Set String) m (MuType (WriterT (Set String) m))
contextFunction context key =
case M.lookup (T.pack key) context of
Nothing -> do
tell (S.singleton key)
return MuNothing
Just value -> return (MuVariable value)
-- | Check if we're going to overwrite any existing files.
checkForOverwrite :: (MonadIO m, MonadThrow m) => [Path Abs File] -> m ()
checkForOverwrite files = do
overwrites <- filterM doesFileExist files
unless (null overwrites) $ throwM (AttemptedOverwrites overwrites)
-- | Write files to the new project directory.
writeTemplateFiles
:: MonadIO m
=> Map (Path Abs File) LB.ByteString -> m ()
writeTemplateFiles files =
forM_
(M.toList files)
(\(fp,bytes) ->
do ensureDir (parent fp)
liftIO (LB.writeFile (toFilePath fp) bytes))
-- | Run any initialization functions, such as Git.
runTemplateInits
:: HasConfig env
=> Path Abs Dir -> RIO env ()
runTemplateInits dir = do
menv <- getMinimalEnvOverride
config <- view configL
case configScmInit config of
Nothing -> return ()
Just Git ->
catch (callProcess $ Cmd (Just dir) "git" menv ["init"])
(\(_ :: ProcessExitedUnsuccessfully) ->
logInfo "git init failed to run, ignoring ...")
-- | Display the set of templates accompanied with description if available.
listTemplates :: HasLogFunc env => RIO env ()
listTemplates = do
templates <- getTemplates
templateInfo <- getTemplateInfo
if not . M.null $ templateInfo then do
let keySizes = map (T.length . templateName) $ S.toList templates
padWidth = show $ maximum keySizes
outputfmt = "%-" <> padWidth <> "s %s\n"
headerfmt = "%-" <> padWidth <> "s %s\n"
liftIO $ printf headerfmt ("Template"::String) ("Description"::String)
forM_ (S.toList templates) (\x -> do
let name = templateName x
desc = fromMaybe "" $ liftM (mappend "- ") (M.lookup name templateInfo >>= description)
liftIO $ printf outputfmt (T.unpack name) (T.unpack desc))
else mapM_ (liftIO . T.putStrLn . templateName) (S.toList templates)
-- | Get the set of templates.
getTemplates :: HasLogFunc env => RIO env (Set TemplateName)
getTemplates = do
req <- liftM setGithubHeaders (parseUrlThrow defaultTemplatesList)
resp <- catch (httpJSON req) (throwM . FailedToDownloadTemplates)
case getResponseStatusCode resp of
200 -> return $ unTemplateSet $ getResponseBody resp
code -> throwM (BadTemplatesResponse code)
getTemplateInfo :: HasLogFunc env => RIO env (Map Text TemplateInfo)
getTemplateInfo = do
req <- liftM setGithubHeaders (parseUrlThrow defaultTemplateInfoUrl)
resp <- catch (liftM Right $ httpLbs req) (\(ex :: HttpException) -> return . Left $ "Failed to download template info. The HTTP error was: " <> show ex)
case resp >>= is200 of
Left err -> do
logInfo $ T.pack err
return M.empty
Right resp' ->
case Yaml.decodeEither (LB.toStrict $ getResponseBody resp') :: Either String Object of
Left err ->
throwM $ BadTemplateInfo err
Right o ->
return (M.mapMaybe (Yaml.parseMaybe Yaml.parseJSON) (M.fromList . HM.toList $ o) :: Map Text TemplateInfo)
where
is200 resp =
case getResponseStatusCode resp of
200 -> return resp
code -> Left $ "Unexpected status code while retrieving templates info: " <> show code
newtype TemplateSet = TemplateSet { unTemplateSet :: Set TemplateName }
instance FromJSON TemplateSet where
parseJSON = fmap TemplateSet . parseTemplateSet
-- | Parser the set of templates from the JSON.
parseTemplateSet :: Value -> Parser (Set TemplateName)
parseTemplateSet a = do
xs <- parseJSON a
fmap S.fromList (mapMaybeM parseTemplate xs)
where
parseTemplate v = do
o <- parseJSON v
name <- o .: "name"
if ".hsfiles" `isSuffixOf` name
then case parseTemplateNameFromString name of
Left{} ->
fail ("Unable to parse template name from " <> name)
Right template -> return (Just template)
else return Nothing
--------------------------------------------------------------------------------
-- Defaults
-- | The default template name you can use if you don't have one.
defaultTemplateName :: TemplateName
defaultTemplateName = $(mkTemplateName "new-template")
-- | Default web root URL to download from.
defaultTemplateUrl :: String
defaultTemplateUrl =
"https://raw.githubusercontent.com/commercialhaskell/stack-templates/master"
-- | Default web URL to get a yaml file containing template metadata.
defaultTemplateInfoUrl :: String
defaultTemplateInfoUrl =
"https://raw.githubusercontent.com/commercialhaskell/stack-templates/master/template-info.yaml"
-- | Default web URL to list the repo contents.
defaultTemplatesList :: String
defaultTemplatesList =
"https://api.github.com/repos/commercialhaskell/stack-templates/contents/"
--------------------------------------------------------------------------------
-- Exceptions
-- | Exception that might occur when making a new project.
data NewException
= FailedToLoadTemplate !TemplateName
!FilePath
| FailedToDownloadTemplate !TemplateName
!DownloadException
| FailedToDownloadTemplates !HttpException
| BadTemplatesResponse !Int
| AlreadyExists !(Path Abs Dir)
| MissingParameters !PackageName !TemplateName !(Set String) !(Path Abs File)
| InvalidTemplate !TemplateName !String
| AttemptedOverwrites [Path Abs File]
| FailedToDownloadTemplateInfo !HttpException
| BadTemplateInfo !String
| BadTemplateInfoResponse !Int
| Can'tUseWiredInName !PackageName
deriving (Typeable)
instance Exception NewException
instance Show NewException where
show (FailedToLoadTemplate name path) =
"Failed to load download template " <> T.unpack (templateName name) <>
" from " <>
path
show (FailedToDownloadTemplate name (RedownloadFailed _ _ resp)) =
case getResponseStatusCode resp of
404 ->
"That template doesn't exist. Run `stack templates' to see a list of available templates."
code ->
"Failed to download template " <> T.unpack (templateName name) <>
": unknown reason, status code was: " <>
show code
show (AlreadyExists path) =
"Directory " <> toFilePath path <> " already exists. Aborting."
show (FailedToDownloadTemplates ex) =
"Failed to download templates. The HTTP error was: " <> show ex
show (BadTemplatesResponse code) =
"Unexpected status code while retrieving templates list: " <> show code
show (MissingParameters name template missingKeys userConfigPath) =
intercalate
"\n"
[ "The following parameters were needed by the template but not provided: " <>
intercalate ", " (S.toList missingKeys)
, "You can provide them in " <>
toFilePath userConfigPath <>
", like this:"
, "templates:"
, " params:"
, intercalate
"\n"
(map
(\key ->
" " <> key <> ": value")
(S.toList missingKeys))
, "Or you can pass each one as parameters like this:"
, "stack new " <> packageNameString name <> " " <>
T.unpack (templateName template) <>
" " <>
unwords
(map
(\key ->
"-p \"" <> key <> ":value\"")
(S.toList missingKeys))]
show (InvalidTemplate name why) =
"The template \"" <> T.unpack (templateName name) <>
"\" is invalid and could not be used. " <>
"The error was: \"" <> why <> "\""
show (AttemptedOverwrites fps) =
"The template would create the following files, but they already exist:\n" <>
unlines (map ((" " ++) . toFilePath) fps) <>
"Use --force to ignore this, and overwite these files."
show (FailedToDownloadTemplateInfo ex) =
"Failed to download templates info. The HTTP error was: " <> show ex
show (BadTemplateInfo err) =
"Template info couldn't be parsed: " <> err
show (BadTemplateInfoResponse code) =
"Unexpected status code while retrieving templates info: " <> show code
show (Can'tUseWiredInName name) =
"The name \"" <> packageNameString name <> "\" is used by GHC wired-in packages, and so shouldn't be used as a package name"
|
MichielDerhaeg/stack
|
src/Stack/New.hs
|
bsd-3-clause
| 17,638 | 1 | 23 | 5,399 | 3,983 | 2,002 | 1,981 | 400 | 5 |
module Data.Classifier.NaiveBayes
( NaiveBayes
, fromClassifier
, remove
, test
, probabilities ) where
import Data.Binary
import Data.Classifier
import Data.Counter (Counter(..))
import Data.List
import Data.Map.Strict (Map)
import Data.Monoid
import Data.Ord
import Data.Ratio ((%))
import qualified Data.Counter as Counter
import qualified Data.Map.Strict as Map
data NaiveBayes a b =
NaiveBayes { _vocab :: Counter b
, _classInstances :: Counter a
, _totalWordsInClass :: Counter a
, _wordCounts :: Map a (Counter b) }
deriving (Show, Read, Eq)
instance (Ord a, Ord b) => Monoid (NaiveBayes a b) where
mempty = NaiveBayes mempty mempty mempty mempty
NaiveBayes v1 ci1 t1 wc1 `mappend` NaiveBayes v2 ci2 t2 wc2 =
NaiveBayes (v1 <> v2) (ci1 <> ci2) (t1 <> t2) (Map.unionWith (<>) wc1 wc2)
instance (Binary a, Binary b) => Binary (NaiveBayes a b) where
get = NaiveBayes <$> get <*> get <*> get <*> get
put (NaiveBayes v c t w) = do
put v
put c
put t
put w
fromClassifier :: (Ord a, Ord b) => Classifier a b -> NaiveBayes a b
fromClassifier (Classifier m) = NaiveBayes v is t cs
where v = Map.foldr (mappend . mconcat) mempty m
is = Counter.fromMap $ fmap (fromIntegral . length) m
t = Counter.fromMap $ fmap Counter.total cs
cs = fmap mconcat m
remove :: (Ord a, Ord b) => Classifier a b -> NaiveBayes a b -> NaiveBayes a b
remove (Classifier m) nb = mappend nb $ fromClassifier $ Classifier $ fmap (fmap (Counter.unsafeFromMap . fmap negate . Counter.toMap)) m
probabilities :: (Ord a, Ord b) => NaiveBayes a b -> Counter b -> Map a Rational
probabilities (NaiveBayes (Counter.toMap -> v) (Counter.toMap -> c) t w) (Counter.toMap -> m) =
Map.intersectionWith (*) priors' $ fmap (Map.foldr (*) 1) rationals
where totalUniqueWords = Map.foldr (+) 0 $ fmap (const 1) v
totalInstances = Counter.total $ Counter.fromMap c
priors' = fmap (% totalInstances) c
rationals = Map.intersectionWith (\ l (Counter.toMap -> r) ->
Map.mergeWithKey (\ _ l' r' -> Just $ ((l' + 1) % l) ^ r') (const mempty) (fmap ((1 % l)^)) r m) divisors w
divisors = fmap (+ totalUniqueWords) (Counter.toMap t)
test :: (Ord a, Ord b) => NaiveBayes a b -> Counter b -> Maybe a
test cls cnt =
case sortBy (comparing (Down . snd)) $ Map.toList $ probabilities cls cnt of
[] -> Nothing
(x, _):_ -> Just x
|
intolerable/naive-bayes
|
src/Data/Classifier/NaiveBayes.hs
|
bsd-3-clause
| 2,441 | 0 | 18 | 566 | 1,040 | 547 | 493 | -1 | -1 |
module Exercises105 where
-- 1. a) has an error; b and c both return the same result
-- 2. foldl (flip (*)) 1 [1,2,3,4,5]
{-
(((((1 * 1) * 2) * 3) * 4) * 5)
((((1 * 2) * 3) * 4) * 5)
(((2 * 3) * 4) * 5)
((6 * 4) * 5)
(24 * 5)
120
-}
-- 3. One difference between foldl and foldr is: c) foldr, but not foldl, associates to the right
-- 4. Folds are catamorphisms, which means they are generally used to:
-- a) reduce structure
{-
5.
a) foldr (++) "" ["woot", "WOOT", "woot"]
b) foldr max "" ["fear", "is", "the", "little", "death"]
c) foldr (&&) True [False, True]
d) No, it will always return True
e) foldr ((++) . show) "" [1..5]
f) foldr (flip const) 'a' [1..5]
g) foldr (flip const) 0 "tacos"
h) foldl const 0 "burritos"
i) foldl const 'z' [1..5]
-}
|
pdmurray/haskell-book-ex
|
src/ch10/Exercises10.5.hs
|
bsd-3-clause
| 761 | 0 | 2 | 166 | 11 | 10 | 1 | 1 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
import Control.DeepSeq
import Criterion.Main
import qualified Data.Algorithm.Diff as DA
import Data.Functor.Identity
import Data.Snakes
import qualified Data.Snakes.DiffLike as DL
import Data.Vector ( Vector )
import qualified Data.Vector as V
import GHC.Generics
import System.Random
#if __GLASGOW_HASKELL__ < 710
import Data.Functor ((<$>))
#endif
deriving instance Generic (DA.Diff a)
instance (NFData a) => NFData (DL.Diff a)
instance (NFData a) => NFData (DA.Diff a)
instance (NFData a) => NFData (Snake a)
instance (NFData a) => NFData (SnakeHead a)
instance (Monad m) => Stream (Vector a) m a where
uncons v | null v = return Nothing
| otherwise = return $ Just (V.unsafeHead v, V.unsafeTail v)
{-# INLINE uncons #-}
randomList :: (Random a) => Int -> IO [a]
randomList n = take n . randoms <$> newStdGen
similarLists :: (Random a) => Int -> Double -> IO ([a], [a])
similarLists n k = do
ls <- randomList n
rs <- randomList n
ks <- randomRs (0, 1) <$> newStdGen
return $ unzip $ map (\(l, r, k') -> (l, if k < k' then l else r)) $ zip3 ls rs ks
randomVector :: (Random a) => Int -> IO (Vector a)
randomVector n = V.fromList <$> randomList n
snakeInt :: (Stream s m t, Eq t) => Maybe Int -> s -> s -> m (Maybe (Snake Int))
snakeInt = snake
{-# INLINE snakeInt #-}
vectorBench :: Benchmark
vectorBench = bgroup "vectorBench"
[ env (randomVector $ 2^10) $ \ (v :: Vector Char) ->
bench "2^10 Core" $ (nf $ runIdentity . uncurry (snakeInt Nothing)) (v, v)
, env (randomVector $ 2^20) $ \ (v :: Vector Char) ->
bench "2^20 Core" $ (nf $ runIdentity . uncurry (snakeInt Nothing)) (v, v)
]
diffLikeBench :: Benchmark
diffLikeBench = bgroup "Snakes vs Diff"
[ env (randomList $ 2^10) $ \ (s :: String) ->
bgroup "Same streams 2^10"
[ bench "Snakes-Core" $ (nf $ runIdentity . uncurry (snakeInt Nothing)) (s, s)
, bench "Snakes-Diff" $ (nf $ uncurry DL.getDiff) (s, s)
, bench "Diff" $ (nf $ uncurry DA.getDiff) (s, s)
]
, env (randomList $ 2^20) $ \ (s :: String) ->
bgroup "Same streams 2^20"
[ bench "Snakes-Core" $ (nf $ runIdentity . uncurry (snakeInt Nothing)) (s, s)
, bench "Snakes" $ (nf $ uncurry DL.getDiff) (s, s)
, bench "Diff" $ (nf $ uncurry DA.getDiff) (s, s)
]
, env (randomList $ 2^10) $ \ (s1 :: String) ->
env (randomList $ 2^10) $ \ (s2 :: String) ->
bgroup "Random streams 2^10"
[ bench "Snakes-Core (weak head)" $ (whnf $ runIdentity . uncurry (snakeInt Nothing)) (s1, s2)
, bench "Snakes" $ (nf $ uncurry DL.getDiff) (s1, s2)
, bench "Diff" $ (nf $ uncurry DA.getDiff) (s1, s2)
]
, env (similarLists (2^10) 0.1) $ \ ~(s1 :: String, s2) ->
bgroup "Random streams 2^10 (10% diff)"
[ bench "Snakes-Core (weak head)" $ (whnf $ runIdentity . uncurry (snakeInt Nothing)) (s1, s2)
, bench "Snakes" $ (nf $ uncurry DL.getDiff) (s1, s2)
, bench "Diff" $ (nf $ uncurry DA.getDiff) (s1, s2)
]
, env (similarLists (2^16) 0.01) $ \ ~(s1 :: String, s2) ->
bgroup "Random streams 2^16 (1% diff)"
[ bench "Snakes-Core (weak head)" $ (whnf $ runIdentity . uncurry (snakeInt Nothing)) (s1, s2)
, bench "Snakes" $ (nf $ uncurry DL.getDiff) (s1, s2)
, bench "Diff" $ (nf $ uncurry DA.getDiff) (s1, s2)
]
]
-- Our benchmark harness.
main :: IO ()
main = defaultMain
[ diffLikeBench
, vectorBench
]
|
schernichkin/snakes
|
bench/Main.hs
|
bsd-3-clause
| 3,730 | 0 | 19 | 895 | 1,511 | 794 | 717 | 78 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
module CrossCodegen where
{-
A special cross-compilation mode for hsc2hs, which generates a .hs
file without needing to run the executables that the C compiler
outputs.
Instead, it uses the output of compilations only -- specifically,
whether compilation fails. This is the same trick that autoconf uses
when cross compiling; if you want to know if sizeof(int) <= 4, then try
compiling:
> int x() {
> static int ary[1 - 2*(sizeof(int) <= 4)];
> }
and see if it fails. If you want to know sizeof(int), then
repeatedly apply this kind of test with differing values, using
binary search.
-}
import Prelude hiding (concatMap)
import System.IO (hPutStr, openFile, IOMode(..), hClose)
import System.Directory (removeFile)
import Data.Char (toLower,toUpper,isSpace)
import Control.Exception (assert, onException)
import Control.Monad (when, liftM, forM, ap)
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative (Applicative(..))
#endif
import Data.Foldable (concatMap)
import Data.Maybe (fromMaybe)
import qualified Data.Sequence as S
import Data.Sequence ((|>),ViewL(..))
import System.Exit ( ExitCode(..) )
import System.Process
import C
import Common
import Flags
import HSCParser
-- A monad over IO for performing tests; keeps the commandline flags
-- and a state counter for unique filename generation.
-- equivalent to ErrorT String (StateT Int (ReaderT TestMonadEnv IO))
newtype TestMonad a = TestMonad { runTest :: TestMonadEnv -> Int -> IO (Either String a, Int) }
instance Functor TestMonad where
fmap = liftM
instance Applicative TestMonad where
pure = return
(<*>) = ap
instance Monad TestMonad where
return a = TestMonad (\_ c -> return $ (Right a, c))
x >>= fn = TestMonad (\e c -> (runTest x e c) >>=
(\(a,c') -> either (\err -> return (Left err, c'))
(\result -> runTest (fn result) e c')
a))
data TestMonadEnv = TestMonadEnv {
testIsVerbose_ :: Bool,
testLogNestCount_ :: Int,
testKeepFiles_ :: Bool,
testGetBaseName_ :: FilePath,
testGetFlags_ :: [Flag],
testGetConfig_ :: Config,
testGetCompiler_ :: FilePath
}
testAsk :: TestMonad TestMonadEnv
testAsk = TestMonad (\e c -> return (Right e, c))
testIsVerbose :: TestMonad Bool
testIsVerbose = testIsVerbose_ `fmap` testAsk
testGetCompiler :: TestMonad FilePath
testGetCompiler = testGetCompiler_ `fmap` testAsk
testKeepFiles :: TestMonad Bool
testKeepFiles = testKeepFiles_ `fmap` testAsk
testGetFlags :: TestMonad [Flag]
testGetFlags = testGetFlags_ `fmap` testAsk
testGetConfig :: TestMonad Config
testGetConfig = testGetConfig_ `fmap` testAsk
testGetBaseName :: TestMonad FilePath
testGetBaseName = testGetBaseName_ `fmap` testAsk
testIncCount :: TestMonad Int
testIncCount = TestMonad (\_ c -> let next=succ c
in next `seq` return (Right c, next))
testFail' :: String -> TestMonad a
testFail' s = TestMonad (\_ c -> return (Left s, c))
testFail :: SourcePos -> String -> TestMonad a
testFail (SourcePos file line) s = testFail' (file ++ ":" ++ show line ++ " " ++ s)
-- liftIO for TestMonad
liftTestIO :: IO a -> TestMonad a
liftTestIO x = TestMonad (\_ c -> x >>= \r -> return (Right r, c))
-- finally for TestMonad
testFinally :: TestMonad a -> TestMonad b -> TestMonad a
testFinally action cleanup = do r <- action `testOnException` cleanup
_ <- cleanup
return r
-- onException for TestMonad. This rolls back the state on an
-- IO exception, which isn't great but shouldn't matter for now
-- since only the test count is stored there.
testOnException :: TestMonad a -> TestMonad b -> TestMonad a
testOnException action cleanup = TestMonad (\e c -> runTest action e c
`onException` runTest cleanup e c >>= \(actionResult,c') ->
case actionResult of
Left _ -> do (_,c'') <- runTest cleanup e c'
return (actionResult,c'')
Right _ -> return (actionResult,c'))
-- prints the string to stdout if verbose mode is enabled.
-- Maintains a nesting count and pads with spaces so that:
-- testLog "a" $
-- testLog "b" $ return ()
-- will print
-- a
-- b
testLog :: String -> TestMonad a -> TestMonad a
testLog s a = TestMonad (\e c -> do let verbose = testIsVerbose_ e
nestCount = testLogNestCount_ e
when verbose $ putStrLn $ (concat $ replicate nestCount " ") ++ s
runTest a (e { testLogNestCount_ = nestCount+1 }) c)
testLog' :: String -> TestMonad ()
testLog' s = testLog s (return ())
testLogAtPos :: SourcePos -> String -> TestMonad a -> TestMonad a
testLogAtPos (SourcePos file line) s a = testLog (file ++ ":" ++ show line ++ " " ++ s) a
-- Given a list of file suffixes, will generate a list of filenames
-- which are all unique and have the given suffixes. On exit from this
-- action, all those files will be removed (unless keepFiles is active)
makeTest :: [String] -> ([String] -> TestMonad a) -> TestMonad a
makeTest fileSuffixes fn = do
c <- testIncCount
fileBase <- testGetBaseName
keepFiles <- testKeepFiles
let files = zipWith (++) (repeat (fileBase ++ show c)) fileSuffixes
testFinally (fn files)
(when (not keepFiles)
(mapM_ removeOrIgnore files))
where
removeOrIgnore f = liftTestIO (catchIO (removeFile f) (const $ return ()))
-- Convert from lists to tuples (to avoid "incomplete pattern" warnings in the callers)
makeTest2 :: (String,String) -> ((String,String) -> TestMonad a) -> TestMonad a
makeTest2 (a,b) fn = makeTest [a,b] helper
where helper [a',b'] = fn (a',b')
helper _ = error "makeTest: internal error"
makeTest3 :: (String,String,String) -> ((String,String,String) -> TestMonad a) -> TestMonad a
makeTest3 (a,b,c) fn = makeTest [a,b,c] helper
where helper [a',b',c'] = fn (a',b',c')
helper _ = error "makeTest: internal error"
-- A Zipper over lists. Unlike ListZipper, this separates at the type level
-- a list which may have a currently focused item (Zipper a) from
-- a list which _definitely_ has a focused item (ZCursor a), so
-- that zNext can be total.
data Zipper a = End { zEnd :: S.Seq a }
| Zipper (ZCursor a)
data ZCursor a = ZCursor { zCursor :: a,
zAbove :: S.Seq a, -- elements prior to the cursor
-- in regular order (not reversed!)
zBelow :: S.Seq a -- elements after the cursor
}
zipFromList :: [a] -> Zipper a
zipFromList [] = End S.empty
zipFromList (l:ls) = Zipper (ZCursor l S.empty (S.fromList ls))
zNext :: ZCursor a -> Zipper a
zNext (ZCursor c above below) =
case S.viewl below of
S.EmptyL -> End (above |> c)
c' :< below' -> Zipper (ZCursor c' (above |> c) below')
-- Generates the .hs file from the .hsc file, by looping over each
-- Special element and calling outputSpecial to find out what it needs.
diagnose :: String -> (String -> TestMonad ()) -> [Token] -> TestMonad ()
diagnose inputFilename output input = do
checkValidity input
output ("{-# LINE 1 \"" ++ inputFilename ++ "\" #-}\n")
loop (zipFromList input)
where
loop (End _) = return ()
loop (Zipper z@ZCursor {zCursor=Special _ key _}) =
case key of
_ | key `elem` ["if","ifdef","ifndef","elif","else"] -> do
condHolds <- checkConditional z
if condHolds
then loop (zNext z)
else loop =<< (either testFail' return (skipFalseConditional (zNext z)))
"endif" -> loop (zNext z)
_ -> do
outputSpecial output z
loop (zNext z)
loop (Zipper z@ZCursor {zCursor=Text pos txt}) = do
outputText output pos txt
loop (zNext z)
outputSpecial :: (String -> TestMonad ()) -> ZCursor Token -> TestMonad ()
outputSpecial output (z@ZCursor {zCursor=Special pos@(SourcePos file line) key value}) =
case key of
"const" -> outputConst value show
"offset" -> outputConst ("offsetof(" ++ value ++ ")") (\i -> "(" ++ show i ++ ")")
"size" -> outputConst ("sizeof(" ++ value ++ ")") (\i -> "(" ++ show i ++ ")")
"peek" -> outputConst ("offsetof(" ++ value ++ ")")
(\i -> "(\\hsc_ptr -> peekByteOff hsc_ptr " ++ show i ++ ")")
"poke" -> outputConst ("offsetof(" ++ value ++ ")")
(\i -> "(\\hsc_ptr -> pokeByteOff hsc_ptr " ++ show i ++ ")")
"ptr" -> outputConst ("offsetof(" ++ value ++ ")")
(\i -> "(\\hsc_ptr -> hsc_ptr `plusPtr` " ++ show i ++ ")")
"type" -> computeType z >>= output
"enum" -> computeEnum z >>= output
"error" -> testFail pos ("#error " ++ value)
"warning" -> liftTestIO $ putStrLn (file ++ ":" ++ show line ++ " warning: " ++ value)
"include" -> return ()
"define" -> return ()
"undef" -> return ()
_ -> testFail pos ("directive " ++ key ++ " cannot be handled in cross-compilation mode")
where outputConst value' formatter = computeConst z value' >>= (output . formatter)
outputSpecial _ _ = error "outputSpecial's argument isn't a Special"
outputText :: (String -> TestMonad ()) -> SourcePos -> String -> TestMonad ()
outputText output (SourcePos file line) txt =
case break (=='\n') txt of
(noNewlines, []) -> output noNewlines
(firstLine, _:restOfLines) ->
output (firstLine ++ "\n" ++
"{-# LINE " ++ show (line+1) ++ " \"" ++ file ++ "\" #-}\n" ++
restOfLines)
-- Bleh, messy. For each test we're compiling, we have a specific line of
-- code that may cause compiler errors -- that's the test we want to perform.
-- However, we *really* don't want any other kinds of compiler errors sneaking
-- in (which might be e.g. due to the user's syntax errors) or we'll make the
-- wrong conclusions on our tests.
--
-- So before we compile any of the tests, take a pass over the whole file and
-- generate a .c file which should fail if there are any syntax errors in what
-- the user gaves us. Hopefully, then the only reason our later compilations
-- might fail is the particular reason we want.
--
-- Another approach would be to try to parse the stdout of GCC and diagnose
-- whether the error is the one we want. That's tricky because of localization
-- etc. etc., though it would be less nerve-wracking. FYI it's not the approach
-- that autoconf went with.
checkValidity :: [Token] -> TestMonad ()
checkValidity input = do
config <- testGetConfig
flags <- testGetFlags
let test = outTemplateHeaderCProg (cTemplate config) ++
concatMap outFlagHeaderCProg flags ++
concatMap (uncurry outValidityCheck) (zip input [0..])
testLog ("checking for compilation errors") $ do
success <- makeTest2 (".c",".o") $ \(cFile,oFile) -> do
liftTestIO $ writeBinaryFile cFile test
compiler <- testGetCompiler
runCompiler compiler
(["-c",cFile,"-o",oFile]++[f | CompFlag f <- flags])
Nothing
when (not success) $ testFail' "compilation failed"
testLog' "compilation is error-free"
outValidityCheck :: Token -> Int -> String
outValidityCheck s@(Special pos key value) uniq =
case key of
"const" -> checkValidConst value
"offset" -> checkValidConst ("offsetof(" ++ value ++ ")")
"size" -> checkValidConst ("sizeof(" ++ value ++ ")")
"peek" -> checkValidConst ("offsetof(" ++ value ++ ")")
"poke" -> checkValidConst ("offsetof(" ++ value ++ ")")
"ptr" -> checkValidConst ("offsetof(" ++ value ++ ")")
"type" -> checkValidType
"enum" -> checkValidEnum
_ -> outHeaderCProg' s
where
checkValidConst value' = "void _hsc2hs_test" ++ show uniq ++ "()\n{\n" ++ validConstTest value' ++ "}\n";
checkValidType = "void _hsc2hs_test" ++ show uniq ++ "()\n{\n" ++ outCLine pos ++ " (void)(" ++ value ++ ")1;\n}\n";
checkValidEnum =
case parseEnum value of
Nothing -> ""
Just (_,_,enums) ->
"void _hsc2hs_test" ++ show uniq ++ "()\n{\n" ++
concatMap (\(_,cName) -> validConstTest cName) enums ++
"}\n"
-- we want this to fail if the value is syntactically invalid or isn't a constant
validConstTest value' = outCLine pos ++ " {\n static int test_array[(" ++ value' ++ ") > 0 ? 2 : 1];\n (void)test_array;\n }\n";
outValidityCheck (Text _ _) _ = ""
-- Skips over some #if or other conditional that we found to be false.
-- I.e. the argument should be a zipper whose cursor is one past the #if,
-- and returns a zipper whose cursor points at the next item which
-- could possibly be compiled.
skipFalseConditional :: Zipper Token -> Either String (Zipper Token)
skipFalseConditional (End _) = Left "unterminated endif"
skipFalseConditional (Zipper z@(ZCursor {zCursor=Special _ key _})) =
case key of
"if" -> either Left skipFalseConditional $ skipFullConditional 0 (zNext z)
"ifdef" -> either Left skipFalseConditional $ skipFullConditional 0 (zNext z)
"ifndef" -> either Left skipFalseConditional $ skipFullConditional 0 (zNext z)
"elif" -> Right $ Zipper z
"else" -> Right $ Zipper z
"endif" -> Right $ zNext z
_ -> skipFalseConditional (zNext z)
skipFalseConditional (Zipper z) = skipFalseConditional (zNext z)
-- Skips over an #if all the way to the #endif
skipFullConditional :: Int -> Zipper Token -> Either String (Zipper Token)
skipFullConditional _ (End _) = Left "unterminated endif"
skipFullConditional nest (Zipper z@(ZCursor {zCursor=Special _ key _})) =
case key of
"if" -> skipFullConditional (nest+1) (zNext z)
"ifdef" -> skipFullConditional (nest+1) (zNext z)
"ifndef" -> skipFullConditional (nest+1) (zNext z)
"endif" | nest > 0 -> skipFullConditional (nest-1) (zNext z)
"endif" | otherwise -> Right $ zNext z
_ -> skipFullConditional nest (zNext z)
skipFullConditional nest (Zipper z) = skipFullConditional nest (zNext z)
data IntegerConstant = Signed Integer |
Unsigned Integer deriving (Show)
-- Prints an syntatically valid integer in C
cShowInteger :: IntegerConstant -> String
cShowInteger (Signed x) | x < 0 = "(" ++ show (x+1) ++ "-1)"
-- Trick to avoid overflowing large integer constants
-- http://www.hardtoc.com/archives/119
cShowInteger (Signed x) = show x
cShowInteger (Unsigned x) = show x ++ "u"
data IntegerComparison = GreaterOrEqual IntegerConstant |
LessOrEqual IntegerConstant
instance Show IntegerComparison where
showsPrec _ (GreaterOrEqual c) = showString "`GreaterOrEqual` " . shows c
showsPrec _ (LessOrEqual c) = showString "`LessOrEqual` " . shows c
cShowCmpTest :: IntegerComparison -> String
cShowCmpTest (GreaterOrEqual x) = ">=" ++ cShowInteger x
cShowCmpTest (LessOrEqual x) = "<=" ++ cShowInteger x
-- The cursor should point at #{const SOME_VALUE} or something like that.
-- Determines the value of SOME_VALUE using binary search; this
-- is a trick which is cribbed from autoconf's AC_COMPUTE_INT.
computeConst :: ZCursor Token -> String -> TestMonad Integer
computeConst zOrig@(ZCursor (Special pos _ _) _ _) value = do
testLogAtPos pos ("computing " ++ value) $ do
nonNegative <- compareConst z (GreaterOrEqual (Signed 0))
integral <- checkValueIsIntegral z nonNegative
when (not integral) $ testFail pos $ value ++ " is not an integer"
(lower,upper) <- bracketBounds z nonNegative
int <- binarySearch z nonNegative lower upper
testLog' $ "result: " ++ show int
return int
where -- replace the Special's value with the provided value; e.g. the special
-- is #{size SOMETHING} and we might replace value with "sizeof(SOMETHING)".
z = zOrig {zCursor=specialSetValue value (zCursor zOrig)}
specialSetValue v (Special p k _) = Special p k v
specialSetValue _ _ = error "computeConst argument isn't a Special"
computeConst _ _ = error "computeConst argument isn't a Special"
-- Binary search, once we've bracketed the integer.
binarySearch :: ZCursor Token -> Bool -> Integer -> Integer -> TestMonad Integer
binarySearch _ _ l u | l == u = return l
binarySearch z nonNegative l u = do
let mid :: Integer
mid = (l+u+1) `div` 2
inTopHalf <- compareConst z (GreaterOrEqual $ (if nonNegative then Unsigned else Signed) mid)
let (l',u') = if inTopHalf then (mid,u) else (l,(mid-1))
assert (l < mid && mid <= u && -- l < mid <= u
l <= l' && l' <= u' && u' <= u && -- l <= l' <= u' <= u
u'-l' < u-l) -- |u' - l'| < |u - l|
(binarySearch z nonNegative l' u')
-- Establishes bounds on the unknown integer. By searching increasingly
-- large powers of 2, it'll bracket an integer x by lower & upper
-- such that lower <= x <= upper.
--
-- Assumes 2's complement integers.
bracketBounds :: ZCursor Token -> Bool -> TestMonad (Integer, Integer)
bracketBounds z nonNegative = do
let -- test against integers 2**x-1 when positive, and 2**x when negative,
-- to avoid generating constants that'd overflow the machine's integers.
-- I.e. suppose we're searching for #{const INT_MAX} (e.g. 2^32-1).
-- If we're comparing against all 2**x-1, we'll stop our search
-- before we ever overflow int.
powersOfTwo = iterate (\a -> 2*a) 1
positiveBounds = map pred powersOfTwo
negativeBounds = map negate powersOfTwo
-- Test each element of the bounds list until we find one that exceeds
-- the integer.
loop cmp inner (maybeOuter:bounds') = do
outerBounded <- compareConst z (cmp maybeOuter)
if outerBounded
then return (inner,maybeOuter)
else loop cmp maybeOuter bounds'
loop _ _ _ = error "bracketBounds: infinite list exhausted"
if nonNegative
then do (inner,outer) <- loop (LessOrEqual . Unsigned) (-1) positiveBounds
return (inner+1,outer)
else do (inner,outer) <- loop (GreaterOrEqual . Signed) 0 negativeBounds
return (outer,inner-1)
-- For #{enum} codegen; mimics template-hsc.h's hsc_haskellize
haskellize :: String -> String
haskellize [] = []
haskellize (firstLetter:next) = toLower firstLetter : loop False next
where loop _ [] = []
loop _ ('_':as) = loop True as
loop upper (a:as) = (if upper then toUpper a else toLower a) : loop False as
-- For #{enum} codegen; in normal hsc2hs, any whitespace in the enum types &
-- constructors will be mangled by the C preprocessor. This mimics the same
-- mangling.
stringify :: String -> String
-- Spec: stringify = unwords . words
stringify = go False . dropWhile isSpace
where
go _haveSpace [] = []
go haveSpace (x:xs)
| isSpace x = go True xs
| otherwise = if haveSpace
then ' ' : x : go False xs
else x : go False xs
computeEnum :: ZCursor Token -> TestMonad String
computeEnum z@(ZCursor (Special _ _ enumText) _ _) =
case parseEnum enumText of
Nothing -> return ""
Just (enumType,constructor,enums) ->
concatM enums $ \(maybeHsName, cName) -> do
constValue <- computeConst z cName
let hsName = fromMaybe (haskellize cName) maybeHsName
return $
hsName ++ " :: " ++ stringify enumType ++ "\n" ++
hsName ++ " = " ++ stringify constructor ++ " " ++ showsPrec 11 constValue "\n"
where concatM l = liftM concat . forM l
computeEnum _ = error "computeEnum argument isn't a Special"
-- Implementation of #{type}, using computeConst
computeType :: ZCursor Token -> TestMonad String
computeType z@(ZCursor (Special pos _ value) _ _) = do
testLogAtPos pos ("computing type of " ++ value) $ do
integral <- testLog ("checking if type " ++ value ++ " is an integer") $ do
success <- runCompileBooleanTest z $ "(" ++ value ++ ")(int)(" ++ value ++ ")1.4 == (" ++ value ++ ")1.4"
testLog' $ "result: " ++ (if success then "integer" else "floating")
return success
typeRet <- if integral
then do
signed <- testLog ("checking if type " ++ value ++ " is signed") $ do
success <- runCompileBooleanTest z $ "(" ++ value ++ ")(-1) < (" ++ value ++ ")0"
testLog' $ "result: " ++ (if success then "signed" else "unsigned")
return success
size <- computeConst z ("sizeof(" ++ value ++ ")")
return $ (if signed then "Int" else "Word") ++ (show (size * 8))
else do
let checkSize test = testLog ("checking if " ++ test) $ do
success <- runCompileBooleanTest z test
testLog' $ "result: " ++ show success
return success
ldouble <- checkSize ("sizeof(" ++ value ++ ") > sizeof(double)")
if ldouble
then return "LDouble"
else do
double <- checkSize ("sizeof(" ++ value ++ ") == sizeof(double)")
if double
then return "Double"
else return "Float"
testLog' $ "result: " ++ typeRet
return typeRet
computeType _ = error "computeType argument isn't a Special"
outHeaderCProg' :: Token -> String
outHeaderCProg' (Special pos key value) = outHeaderCProg (pos,key,value)
outHeaderCProg' _ = ""
-- Checks if an #if/#ifdef etc. etc. is true by inserting a #error
-- and seeing if the compile fails.
checkConditional :: ZCursor Token -> TestMonad Bool
checkConditional (ZCursor s@(Special pos key value) above below) = do
config <- testGetConfig
flags <- testGetFlags
let test = outTemplateHeaderCProg (cTemplate config) ++
(concatMap outFlagHeaderCProg flags) ++
(concatMap outHeaderCProg' above) ++
outHeaderCProg' s ++ "#error T\n" ++
(concatMap outHeaderCProg' below)
testLogAtPos pos ("checking #" ++ key ++ " " ++ value) $ do
condTrue <- not `fmap` runCompileTest test
testLog' $ "result: " ++ show condTrue
return condTrue
checkConditional _ = error "checkConditional argument isn't a Special"
-- Make sure the value we're trying to binary search isn't floating point.
checkValueIsIntegral :: ZCursor Token -> Bool -> TestMonad Bool
checkValueIsIntegral z@(ZCursor (Special _ _ value) _ _) nonNegative = do
let intType = if nonNegative then "unsigned long long" else "long long"
testLog ("checking if " ++ value ++ " is an integer") $ do
success <- runCompileBooleanTest z $ "(" ++ intType ++ ")(" ++ value ++ ") == (" ++ value ++ ")"
testLog' $ "result: " ++ (if success then "integer" else "floating")
return success
checkValueIsIntegral _ _ = error "checkConditional argument isn't a Special"
compareConst :: ZCursor Token -> IntegerComparison -> TestMonad Bool
compareConst z@(ZCursor (Special _ _ value) _ _) cmpTest = do
testLog ("checking " ++ value ++ " " ++ show cmpTest) $ do
success <- runCompileBooleanTest z $ "(" ++ value ++ ") " ++ cShowCmpTest cmpTest
testLog' $ "result: " ++ show success
return success
compareConst _ _ = error "compareConst argument isn't a Special"
-- Given a compile-time constant with boolean type, this extracts the
-- value of the constant by compiling a .c file only.
--
-- The trick comes from autoconf: use the fact that the compiler must
-- perform constant arithmetic for computation of array dimensions, and
-- will generate an error if the array has negative size.
runCompileBooleanTest :: ZCursor Token -> String -> TestMonad Bool
runCompileBooleanTest (ZCursor s above below) booleanTest = do
config <- testGetConfig
flags <- testGetFlags
let test = -- all the surrounding code
outTemplateHeaderCProg (cTemplate config) ++
(concatMap outFlagHeaderCProg flags) ++
(concatMap outHeaderCProg' above) ++
outHeaderCProg' s ++
-- the test
"void _hsc2hs_test() {\n" ++
" static int test_array[1 - 2 * !(" ++ booleanTest ++ ")];\n" ++
" test_array[0] = 0;\n" ++
"}\n" ++
(concatMap outHeaderCProg' below)
runCompileTest test
runCompileTest :: String -> TestMonad Bool
runCompileTest testStr = do
makeTest3 (".c", ".o",".txt") $ \(cFile,oFile,stdout) -> do
liftTestIO $ writeBinaryFile cFile testStr
flags <- testGetFlags
compiler <- testGetCompiler
runCompiler compiler
(["-c",cFile,"-o",oFile]++[f | CompFlag f <- flags])
(Just stdout)
runCompiler :: FilePath -> [String] -> Maybe FilePath -> TestMonad Bool
runCompiler prog args mStdoutFile = do
let cmdLine =
#if MIN_VERSION_process(1,1,0)
showCommandForUser prog args
#else
unwords (prog : args)
#endif
testLog ("executing: " ++ cmdLine) $ liftTestIO $ do
mHOut <- case mStdoutFile of
Nothing -> return Nothing
Just stdoutFile -> liftM Just $ openFile stdoutFile WriteMode
process <- runProcess prog args Nothing Nothing Nothing mHOut mHOut
case mHOut of
Just hOut -> hClose hOut
Nothing -> return ()
exitStatus <- waitForProcess process
return $ case exitStatus of
ExitSuccess -> True
ExitFailure _ -> False
-- The main driver for cross-compilation mode
outputCross :: Config -> String -> String -> String -> String -> [Token] -> IO ()
outputCross config outName outDir outBase inName toks =
runTestMonad $ do
file <- liftTestIO $ openFile outName WriteMode
(diagnose inName (liftTestIO . hPutStr file) toks
`testFinally` (liftTestIO $ hClose file))
`testOnException` (liftTestIO $ removeFile outName) -- cleanup on errors
where
tmenv = TestMonadEnv (cVerbose config) 0 (cKeepFiles config) (outDir++outBase++"_hsc_test") (cFlags config) config (cCompiler config)
runTestMonad x = runTest x tmenv 0 >>= (handleError . fst)
handleError (Left e) = die (e++"\n")
handleError (Right ()) = return ()
|
DavidAlphaFox/ghc
|
utils/hsc2hs/CrossCodegen.hs
|
bsd-3-clause
| 27,155 | 0 | 24 | 7,399 | 7,207 | 3,665 | 3,542 | 423 | 14 |
{-# LANGUAGE GADTs, RankNTypes, ConstraintKinds, TypeOperators, FlexibleInstances #-}
module Numeric.Trainee.Params (
Parametric,
Params(..), NoParams(..),
onParams, liftParams,
onParams2, liftParams2,
castParams
) where
import Prelude.Unicode
import Control.DeepSeq
import Data.Function (fix)
import Data.List (intercalate, intersperse)
import Data.Maybe (fromMaybe)
import Data.Typeable
import qualified Data.Vector as V
-- | Constraints to smth, that can be parameters
type Parametric w = (Show w, Num w, Fractional w, Floating w, NFData w, Typeable w)
-- | Parameters holder, we use it in @Learnee@ not to pass params type, as long as
-- combining many @Learnee@s will produce huge and unreadable params type
data Params where
Params ∷ Parametric w ⇒ w → Params
-- | Implicitly converts to any params in `onParams2`, used as target for `fromIntegral` and `fromRational` implementation
AnyParam ∷ Double → Params
deriving (Typeable)
instance Show Params where
show (Params ws) = show ws
show (AnyParam r) = "any[" ++ show r ++ "]"
instance Num Params where
(+) = liftParams2 (+)
(*) = liftParams2 (*)
abs = liftParams abs
signum = liftParams signum
fromInteger = AnyParam ∘ fromInteger
negate = liftParams negate
instance Fractional Params where
fromRational = AnyParam ∘ fromRational
recip = liftParams recip
instance Floating Params where
pi = AnyParam pi
exp = liftParams exp
log = liftParams log
sqrt = liftParams sqrt
(**) = liftParams2 (**)
logBase = liftParams2 logBase
sin = liftParams sin
cos = liftParams cos
tan = liftParams tan
asin = liftParams asin
acos = liftParams acos
atan = liftParams atan
sinh = liftParams sinh
cosh = liftParams cosh
tanh = liftParams tanh
asinh = liftParams asinh
acosh = liftParams acosh
atanh = liftParams atanh
instance NFData Params where
rnf (Params ws) = rnf ws
rnf (AnyParam r) = rnf r
instance {-# OVERLAPPING #-} Show (Params, Params) where
show (l, r) = intercalate "\n" $ intersperse (replicate 10 '-') $
filter (not ∘ null) [show l, show r]
instance Num (Params, Params) where
(l, r) + (l', r') = (l + l', r + r')
(l, r) * (l', r') = (l * l', r * r')
abs (l, r) = (abs l, abs r)
signum (l, r) = (signum l, signum r)
fromInteger i = (fromInteger i, fromInteger i)
negate (l, r) = (negate l, negate r)
instance Fractional (Params, Params) where
fromRational r = (fromRational r, fromRational r)
recip (l, r) = (recip l, recip r)
instance Floating (Params, Params) where
pi = (pi, pi)
exp (l, r) = (exp l, exp r)
log (l, r) = (log l, log r)
sqrt (l, r) = (sqrt l, sqrt r)
(l, r) ** (x, y) = (l ** x, r ** y)
logBase (l, r) (x, y) = (logBase l x, logBase r y)
sin (l, r) = (sin l, sin r)
cos (l, r) = (cos l, cos r)
tan (l, r) = (tan l, tan r)
asin (l, r) = (asin l, asin r)
acos (l, r) = (acos l, acos r)
atan (l, r) = (atan l, atan r)
sinh (l, r) = (sinh l, sinh r)
cosh (l, r) = (cosh l, cosh r)
tanh (l, r) = (tanh l, tanh r)
asinh (l, r) = (asinh l, asinh r)
acosh (l, r) = (acosh l, acosh r)
atanh (l, r) = (atanh l, atanh r)
instance {-# OVERLAPPING #-} Show (V.Vector Params) where
show ps = intercalate "\n" $ intersperse (replicate 10 '-') $
filter (not ∘ null) ∘ map show ∘ V.toList $ ps
instance Num (V.Vector Params) where
ls + rs = uncurry (V.zipWith (+)) $ unifyVecs ls rs
ls * rs = uncurry (V.zipWith (*)) $ unifyVecs ls rs
abs = V.map abs
signum = V.map signum
fromInteger = V.singleton ∘ fromInteger
negate = V.map negate
unifyVecs ∷ V.Vector Params → V.Vector Params → (V.Vector Params, V.Vector Params)
unifyVecs x y
| V.length x ≡ V.length y = (x, y)
| anyParamVec x ∨ anyParamVec y = (extend x, extend y)
| otherwise = error $ concat [
"unifyVecs: params vectors length mismatch: ",
show $ V.length x,
" and ",
show $ V.length y]
where
anyParamVec v = V.length v ≡ 1 ∧ case V.head v of
AnyParam _ → True
_ → False
l = max (V.length x) (V.length y)
extend v
| anyParamVec v = V.replicate l (V.head v)
| otherwise = v
instance Fractional (V.Vector Params) where
fromRational = V.singleton ∘ fromRational
recip = V.map recip
instance Floating (V.Vector Params) where
pi = V.singleton pi
exp = V.map exp
log = V.map log
sqrt = V.map sqrt
xs ** as = uncurry (V.zipWith (**)) $ unifyVecs xs as
logBase xs as = uncurry (V.zipWith logBase) $ unifyVecs xs as
sin = V.map sin
cos = V.map cos
tan = V.map tan
asin = V.map asin
acos = V.map acos
atan = V.map atan
sinh = V.map sinh
cosh = V.map cosh
tanh = V.map tanh
asinh = V.map asinh
acosh = V.map acosh
atanh = V.map atanh
-- | Empty params
data NoParams = NoParams deriving (Eq, Ord, Read, Enum, Bounded)
instance NFData NoParams where
rnf NoParams = ()
instance Show NoParams where
show NoParams = ""
instance Num NoParams where
_ + _ = NoParams
_ * _ = NoParams
abs _ = NoParams
signum _ = NoParams
fromInteger _ = NoParams
negate _ = NoParams
instance Fractional NoParams where
fromRational _ = NoParams
recip _ = NoParams
instance Floating NoParams where
pi = NoParams
exp = const NoParams
log = const NoParams
sqrt = const NoParams
_ ** _ = NoParams
logBase _ _ = NoParams
sin = const NoParams
cos = const NoParams
tan = const NoParams
asin = const NoParams
acos = const NoParams
atan = const NoParams
sinh = const NoParams
cosh = const NoParams
tanh = const NoParams
asinh = const NoParams
acosh = const NoParams
atanh = const NoParams
onParams ∷ (forall w . Parametric w ⇒ w → a) → Params → a
onParams fn (AnyParam r) = fn r
onParams fn (Params ws) = fn ws
liftParams ∷ (forall w . Parametric w ⇒ w → w) → Params → Params
liftParams fn (AnyParam r) = AnyParam $ fn r
liftParams fn p = onParams (Params ∘ fn) p
onParams2 ∷ (forall w . Parametric w ⇒ w → w → a) → Params → Params → a
onParams2 fn (AnyParam lr) (AnyParam rr) = fn lr rr
onParams2 fn (AnyParam lr) (Params rws) = fn (fromDouble lr) rws
onParams2 fn (Params lws) (AnyParam rr) = fn lws (fromDouble rr)
onParams2 fn (Params lws) (Params rws) = case eqT' lws rws of
Just Refl → fn lws rws
_ → error $ "params type mismatch: '" ++ typeName lws ++ "' and '" ++ typeName rws ++ "'"
where
eqT' ∷ (Typeable u, Typeable v) ⇒ u → v → Maybe (u :~: v)
eqT' _ _ = eqT
liftParams2 ∷ (forall w . Parametric w ⇒ w → w → w) → Params → Params → Params
liftParams2 fn (AnyParam lr) (AnyParam rr) = AnyParam $ fn (fromDouble lr) (fromDouble rr)
liftParams2 fn l r = onParams2 ((Params ∘) ∘ fn) l r
-- | Force cast to specified type
castParams ∷ Parametric a ⇒ Params → a
castParams (AnyParam r) = fromDouble r
castParams (Params p) = fromMaybe (castError p) ∘ cast $ p where
castError ∷ (Parametric u, Parametric v) ⇒ u → v
castError x = fix $ \y → error $ "castParams: type mismatch, expected '" ++ typeName y ++ "', got '" ++ typeName x ++ "'"
-- Utils
typeName ∷ Typeable a ⇒ a → String
typeName = show ∘ typeRep ∘ proxy'
proxy' ∷ a → Proxy a
proxy' _ = Proxy
fromDouble ∷ Parametric w ⇒ Double → w
fromDouble = fromRational ∘ toRational
|
mvoidex/trainee
|
src/Numeric/Trainee/Params.hs
|
bsd-3-clause
| 7,154 | 24 | 13 | 1,498 | 3,137 | 1,621 | 1,516 | -1 | -1 |
module Main where
import Game.GoreAndAsh
import Logger.API
-- | Application monad that is used for implementation of game API
type AppMonad = LoggerT Spider GMSpider
-- The application should be generic in the host monad that is used
app :: LoggerMonad t m => m ()
app = do
msgE <- inputMessage
outputMessage $ fmap (\msg -> "You said: " ++ msg) msgE
main :: IO ()
main = runGM $ runLoggerT (app :: AppMonad ())
|
Teaspot-Studio/gore-and-ash
|
examples/Logger.hs
|
bsd-3-clause
| 420 | 0 | 11 | 84 | 115 | 61 | 54 | 10 | 1 |
{-# LANGUAGE DefaultSignatures,
TypeOperators,
ScopedTypeVariables,
DefaultSignatures,
FlexibleContexts,
FlexibleInstances,
OverloadedStrings,
TupleSections,
MagicHash,
CPP,
JavaScriptFFI,
ForeignFunctionInterface,
UnliftedFFITypes,
BangPatterns
#-}
module GHCJS.Marshal ( FromJSVal(..)
, ToJSVal(..)
, toJSVal_aeson
, toJSVal_pure
) where
import Control.Applicative
import Control.Monad
import Control.Monad.Trans.Maybe (MaybeT(..), runMaybeT)
import qualified Data.Aeson as AE
import Data.Attoparsec.Number (Number(..))
import Data.Bits ((.&.))
import Data.Char (chr, ord)
import qualified Data.HashMap.Strict as H
import Data.Int (Int8, Int16, Int32)
import qualified Data.JSString as JSS
import qualified Data.JSString.Text as JSS
import Data.Maybe
import Data.Scientific (Scientific, scientific, fromFloatDigits)
import Data.Text (Text)
import qualified Data.Vector as V
import Data.Word (Word8, Word16, Word32, Word)
import Data.Primitive.ByteArray
import Unsafe.Coerce (unsafeCoerce)
import GHC.Int
import GHC.Word
import GHC.Types
import GHC.Float
import GHC.Prim
import GHC.Generics
import GHCJS.Types
import GHCJS.Foreign.Internal
import GHCJS.Marshal.Pure
import qualified JavaScript.Array as A
import qualified JavaScript.Array.Internal as AI
import qualified JavaScript.Object as O
import qualified JavaScript.Object.Internal as OI
import GHCJS.Marshal.Internal
instance FromJSVal JSVal where
fromJSValUnchecked x = return x
{-# INLINE fromJSValUnchecked #-}
fromJSVal = return . Just
{-# INLINE fromJSVal #-}
instance FromJSVal () where
fromJSValUnchecked = fromJSValUnchecked_pure
{-# INLINE fromJSValUnchecked #-}
fromJSVal = fromJSVal_pure
-- {-# INLINE fromJSVal #-}
instance FromJSVal a => FromJSVal [a] where
fromJSVal = fromJSValListOf
{-# INLINE fromJSVal #-}
instance FromJSVal a => FromJSVal (Maybe a) where
fromJSValUnchecked x | isUndefined x || isNull x = return Nothing
| otherwise = fromJSVal x
{-# INLINE fromJSValUnchecked #-}
fromJSVal x | isUndefined x || isNull x = return (Just Nothing)
| otherwise = fmap (fmap Just) fromJSVal x
{-# INLINE fromJSVal #-}
instance FromJSVal JSString where
fromJSValUnchecked = fromJSValUnchecked_pure
{-# INLINE fromJSValUnchecked #-}
fromJSVal = fromJSVal_pure
{-# INLINE fromJSVal #-}
instance FromJSVal Text where
fromJSValUnchecked = fromJSValUnchecked_pure
{-# INLINE fromJSValUnchecked #-}
fromJSVal = fromJSVal_pure
{-# INLINE fromJSVal #-}
instance FromJSVal Char where
fromJSValUnchecked = fromJSValUnchecked_pure
{-# INLINE fromJSValUnchecked #-}
fromJSVal = fromJSVal_pure
{-# INLINE fromJSVal #-}
fromJSValUncheckedListOf = fromJSValUnchecked_pure
{-# INLINE fromJSValListOf #-}
fromJSValListOf = fromJSVal_pure
{-# INLINE fromJSValUncheckedListOf #-}
instance FromJSVal Bool where
fromJSValUnchecked = fromJSValUnchecked_pure
{-# INLINE fromJSValUnchecked #-}
fromJSVal = fromJSVal_pure
{-# INLINE fromJSVal #-}
instance FromJSVal Int where
fromJSValUnchecked = fromJSValUnchecked_pure
{-# INLINE fromJSValUnchecked #-}
fromJSVal = fromJSVal_pure
{-# INLINE fromJSVal #-}
instance FromJSVal Int8 where
fromJSValUnchecked = fromJSValUnchecked_pure
{-# INLINE fromJSValUnchecked #-}
fromJSVal = fromJSVal_pure
{-# INLINE fromJSVal #-}
instance FromJSVal Int16 where
fromJSValUnchecked = fromJSValUnchecked_pure
{-# INLINE fromJSValUnchecked #-}
fromJSVal = fromJSVal_pure
{-# INLINE fromJSVal #-}
instance FromJSVal Int32 where
fromJSValUnchecked = fromJSValUnchecked_pure
{-# INLINE fromJSValUnchecked #-}
fromJSVal = fromJSVal_pure
{-# INLINE fromJSVal #-}
instance FromJSVal Word where
fromJSValUnchecked = fromJSValUnchecked_pure
{-# INLINE fromJSValUnchecked #-}
fromJSVal = fromJSVal_pure
{-# INLINE fromJSVal #-}
instance FromJSVal Word8 where
fromJSValUnchecked = fromJSValUnchecked_pure
{-# INLINE fromJSValUnchecked #-}
fromJSVal = fromJSVal_pure
{-# INLINE fromJSVal #-}
instance FromJSVal Word16 where
fromJSValUnchecked = fromJSValUnchecked_pure
{-# INLINE fromJSValUnchecked #-}
fromJSVal = fromJSVal_pure
{-# INLINE fromJSVal #-}
instance FromJSVal Word32 where
fromJSValUnchecked = fromJSValUnchecked_pure
{-# INLINE fromJSValUnchecked #-}
fromJSVal = fromJSVal_pure
{-# INLINE fromJSVal #-}
instance FromJSVal Float where
fromJSValUnchecked = fromJSValUnchecked_pure
{-# INLINE fromJSValUnchecked #-}
fromJSVal = fromJSVal_pure
{-# INLINE fromJSVal #-}
instance FromJSVal Double where
fromJSValUnchecked = fromJSValUnchecked_pure
{-# INLINE fromJSValUnchecked #-}
fromJSVal = fromJSVal_pure
{-# INLINE fromJSVal #-}
instance FromJSVal AE.Value where
fromJSVal r = case jsonTypeOf r of
JSONNull -> return (Just AE.Null)
JSONInteger -> liftM (AE.Number . flip scientific 0 . (toInteger :: Int -> Integer))
<$> fromJSVal r
JSONFloat -> liftM (AE.Number . (fromFloatDigits :: Double -> Scientific))
<$> fromJSVal r
JSONBool -> liftM AE.Bool <$> fromJSVal r
JSONString -> liftM AE.String <$> fromJSVal r
JSONArray -> liftM (AE.Array . V.fromList) <$> fromJSVal r
JSONObject -> do
props <- OI.listProps (OI.Object r)
runMaybeT $ do
propVals <- forM props $ \p -> do
v <- MaybeT (fromJSVal =<< OI.getProp p (OI.Object r))
return (JSS.textFromJSString p, v)
return (AE.Object (H.fromList propVals))
{-# INLINE fromJSVal #-}
instance (FromJSVal a, FromJSVal b) => FromJSVal (a,b) where
fromJSVal r = runMaybeT $ (,) <$> jf r 0 <*> jf r 1
{-# INLINE fromJSVal #-}
instance (FromJSVal a, FromJSVal b, FromJSVal c) => FromJSVal (a,b,c) where
fromJSVal r = runMaybeT $ (,,) <$> jf r 0 <*> jf r 1 <*> jf r 2
{-# INLINE fromJSVal #-}
instance (FromJSVal a, FromJSVal b, FromJSVal c, FromJSVal d) => FromJSVal (a,b,c,d) where
fromJSVal r = runMaybeT $ (,,,) <$> jf r 0 <*> jf r 1 <*> jf r 2 <*> jf r 3
{-# INLINE fromJSVal #-}
instance (FromJSVal a, FromJSVal b, FromJSVal c, FromJSVal d, FromJSVal e) => FromJSVal (a,b,c,d,e) where
fromJSVal r = runMaybeT $ (,,,,) <$> jf r 0 <*> jf r 1 <*> jf r 2 <*> jf r 3 <*> jf r 4
{-# INLINE fromJSVal #-}
instance (FromJSVal a, FromJSVal b, FromJSVal c, FromJSVal d, FromJSVal e, FromJSVal f) => FromJSVal (a,b,c,d,e,f) where
fromJSVal r = runMaybeT $ (,,,,,) <$> jf r 0 <*> jf r 1 <*> jf r 2 <*> jf r 3 <*> jf r 4 <*> jf r 5
{-# INLINE fromJSVal #-}
instance (FromJSVal a, FromJSVal b, FromJSVal c, FromJSVal d, FromJSVal e, FromJSVal f, FromJSVal g) => FromJSVal (a,b,c,d,e,f,g) where
fromJSVal r = runMaybeT $ (,,,,,,) <$> jf r 0 <*> jf r 1 <*> jf r 2 <*> jf r 3 <*> jf r 4 <*> jf r 5 <*> jf r 6
{-# INLINE fromJSVal #-}
instance (FromJSVal a, FromJSVal b, FromJSVal c, FromJSVal d, FromJSVal e, FromJSVal f, FromJSVal g, FromJSVal h) => FromJSVal (a,b,c,d,e,f,g,h) where
fromJSVal r = runMaybeT $ (,,,,,,,) <$> jf r 0 <*> jf r 1 <*> jf r 2 <*> jf r 3 <*> jf r 4 <*> jf r 5 <*> jf r 6 <*> jf r 7
{-# INLINE fromJSVal #-}
jf :: FromJSVal a => JSVal -> Int -> MaybeT IO a
jf r n = MaybeT $ do
r' <- AI.read n (AI.SomeJSArray r)
if isUndefined r
then return Nothing
else fromJSVal r'
instance ToJSVal JSVal where
toJSVal = toJSVal_pure
{-# INLINE toJSVal #-}
instance ToJSVal AE.Value where
toJSVal = toJSVal_aeson
{-# INLINE toJSVal #-}
instance ToJSVal JSString where
toJSVal = toJSVal_pure
{-# INLINE toJSVal #-}
instance ToJSVal Text where
toJSVal = toJSVal_pure
{-# INLINE toJSVal #-}
instance ToJSVal Char where
toJSVal = return . pToJSVal
{-# INLINE toJSVal #-}
toJSValListOf = return . pToJSVal
{-# INLINE toJSValListOf #-}
instance ToJSVal Bool where
toJSVal = toJSVal_pure
{-# INLINE toJSVal #-}
instance ToJSVal Int where
toJSVal = toJSVal_pure
{-# INLINE toJSVal #-}
instance ToJSVal Int8 where
toJSVal = toJSVal_pure
{-# INLINE toJSVal #-}
instance ToJSVal Int16 where
toJSVal = toJSVal_pure
{-# INLINE toJSVal #-}
instance ToJSVal Int32 where
toJSVal = toJSVal_pure
{-# INLINE toJSVal #-}
instance ToJSVal Word where
toJSVal = toJSVal_pure
{-# INLINE toJSVal #-}
instance ToJSVal Word8 where
toJSVal = toJSVal_pure
{-# INLINE toJSVal #-}
instance ToJSVal Word16 where
toJSVal = toJSVal_pure
{-# INLINE toJSVal #-}
instance ToJSVal Word32 where
toJSVal = toJSVal_pure
{-# INLINE toJSVal #-}
instance ToJSVal Float where
toJSVal = toJSVal_pure
{-# INLINE toJSVal #-}
instance ToJSVal Double where
toJSVal = toJSVal_pure
{-# INLINE toJSVal #-}
instance ToJSVal a => ToJSVal [a] where
toJSVal = toJSValListOf
{-# INLINE toJSVal #-}
instance ToJSVal a => ToJSVal (Maybe a) where
toJSVal Nothing = return jsNull
toJSVal (Just a) = toJSVal a
{-# INLINE toJSVal #-}
instance (ToJSVal a, ToJSVal b) => ToJSVal (a,b) where
toJSVal (a,b) = join $ arr2 <$> toJSVal a <*> toJSVal b
{-# INLINE toJSVal #-}
instance (ToJSVal a, ToJSVal b, ToJSVal c) => ToJSVal (a,b,c) where
toJSVal (a,b,c) = join $ arr3 <$> toJSVal a <*> toJSVal b <*> toJSVal c
{-# INLINE toJSVal #-}
instance (ToJSVal a, ToJSVal b, ToJSVal c, ToJSVal d) => ToJSVal (a,b,c,d) where
toJSVal (a,b,c,d) = join $ arr4 <$> toJSVal a <*> toJSVal b <*> toJSVal c <*> toJSVal d
{-# INLINE toJSVal #-}
instance (ToJSVal a, ToJSVal b, ToJSVal c, ToJSVal d, ToJSVal e) => ToJSVal (a,b,c,d,e) where
toJSVal (a,b,c,d,e) = join $ arr5 <$> toJSVal a <*> toJSVal b <*> toJSVal c <*> toJSVal d <*> toJSVal e
{-# INLINE toJSVal #-}
instance (ToJSVal a, ToJSVal b, ToJSVal c, ToJSVal d, ToJSVal e, ToJSVal f) => ToJSVal (a,b,c,d,e,f) where
toJSVal (a,b,c,d,e,f) = join $ arr6 <$> toJSVal a <*> toJSVal b <*> toJSVal c <*> toJSVal d <*> toJSVal e <*> toJSVal f
{-# INLINE toJSVal #-}
instance (ToJSVal a, ToJSVal b, ToJSVal c, ToJSVal d, ToJSVal e, ToJSVal f, ToJSVal g) => ToJSVal (a,b,c,d,e,f,g) where
toJSVal (a,b,c,d,e,f,g) = join $ arr7 <$> toJSVal a <*> toJSVal b <*> toJSVal c <*> toJSVal d <*> toJSVal e <*> toJSVal f <*> toJSVal g
{-# INLINE toJSVal #-}
foreign import javascript unsafe "[$1]" arr1 :: JSVal -> IO JSVal
foreign import javascript unsafe "[$1,$2]" arr2 :: JSVal -> JSVal -> IO JSVal
foreign import javascript unsafe "[$1,$2,$3]" arr3 :: JSVal -> JSVal -> JSVal -> IO JSVal
foreign import javascript unsafe "[$1,$2,$3,$4]" arr4 :: JSVal -> JSVal -> JSVal -> JSVal -> IO JSVal
foreign import javascript unsafe "[$1,$2,$3,$4,$5]" arr5 :: JSVal -> JSVal -> JSVal -> JSVal -> JSVal -> IO JSVal
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6]" arr6 :: JSVal -> JSVal -> JSVal -> JSVal -> JSVal -> JSVal -> IO JSVal
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7]" arr7 :: JSVal -> JSVal -> JSVal -> JSVal -> JSVal -> JSVal -> JSVal -> IO JSVal
toJSVal_aeson :: AE.ToJSON a => a -> IO JSVal
toJSVal_aeson x = cv (AE.toJSON x)
where
cv = convertValue
convertValue :: AE.Value -> IO JSVal
convertValue AE.Null = return jsNull
convertValue (AE.String t) = return (pToJSVal t)
convertValue (AE.Array a) = (\(AI.SomeJSArray x) -> x) <$>
(AI.fromListIO =<< mapM convertValue (V.toList a))
convertValue (AE.Number n) = toJSVal (realToFrac n :: Double)
convertValue (AE.Bool b) = return (toJSBool b)
convertValue (AE.Object o) = do
obj@(OI.Object obj') <- OI.create
mapM_ (\(k,v) -> convertValue v >>= \v' -> OI.setProp (JSS.textToJSString k) v' obj) (H.toList o)
return obj'
|
ghcjs/ghcjs-base
|
GHCJS/Marshal.hs
|
mit
| 12,526 | 39 | 26 | 3,188 | 3,541 | 1,898 | 1,643 | 284 | 6 |
{-|
@since 2.9.0
Module: module Database.Persist.Sql.Raw.QQ
Description: QuasiQuoters for performing raw sql queries
This module exports convenient QuasiQuoters to perform raw SQL queries.
All QuasiQuoters follow the same pattern and are analogous to the similar named
functions exported from 'Database.Persist.Sql.Raw'. Neither the quoted
function's behaviour, nor it's return value is altered during the translation
and all documentation provided with it holds.
The QuasiQuoters in this module perform a simple substitution on the query text,
that allows value substitutions, table name substitutions as well as column name
substitutions.
-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Database.Persist.Sql.Raw.QQ (
-- * Sql QuasiQuoters
queryQQ
, queryResQQ
, sqlQQ
, executeQQ
, executeCountQQ
, ToRow(..)
) where
import Prelude
import Control.Arrow (first, second)
import Control.Monad.Reader (ask)
import Data.List.NonEmpty (NonEmpty(..), (<|))
import qualified Data.List.NonEmpty as NonEmpty
import qualified Data.List as List
import Data.Text (Text, pack, unpack, intercalate)
import Data.Maybe (fromMaybe, Maybe(..))
import Data.Monoid (mempty, (<>))
import qualified Language.Haskell.TH as TH
import Language.Haskell.TH.Quote
import Language.Haskell.Meta.Parse
import Database.Persist.Class (toPersistValue)
import Database.Persist
import Database.Persist.Sql
class ToRow a where
toRow :: a -> NonEmpty PersistValue
instance PersistField a => ToRow (Single a) where
toRow (Single a) = toPersistValue a :| []
instance (PersistField a, PersistField b) => ToRow (a, b) where
toRow (a, b) = toPersistValue a <| toRow (Single b)
instance (PersistField a, PersistField b, PersistField c) => ToRow (a, b, c) where
toRow (a, b, c) = toPersistValue a <| toRow (b, c)
instance (PersistField a, PersistField b, PersistField c, PersistField d) => ToRow (a, b, c, d) where
toRow (a, b, c, d) = toPersistValue a <| toRow (b, c, d)
instance (PersistField a, PersistField b, PersistField c, PersistField d, PersistField e) => ToRow (a, b, c, d, e) where
toRow (a, b, c, d, e) = toPersistValue a <| toRow (b, c, d, e)
data Token
= Literal String
| Value String
| Values String
| Rows String
| TableName String
| ColumnName String
deriving Show
parseHaskell :: (String -> Token) -> String -> String -> [Token]
parseHaskell cons = go
where
go a [] = [Literal (reverse a)]
go a ('\\':x:xs) = go (x:a) xs
go a ['\\'] = go ('\\':a) []
go a ('}':xs) = cons (reverse a) : parseStr [] xs
go a (x:xs) = go (x:a) xs
parseStr :: String -> String -> [Token]
parseStr a [] = [Literal (reverse a)]
parseStr a ('\\':x:xs) = parseStr (x:a) xs
parseStr a ['\\'] = parseStr ('\\':a) []
parseStr a ('#':'{':xs) = Literal (reverse a) : parseHaskell Value [] xs
parseStr a ('%':'{':xs) = Literal (reverse a) : parseHaskell Values [] xs
parseStr a ('*':'{':xs) = Literal (reverse a) : parseHaskell Rows [] xs
parseStr a ('^':'{':xs) = Literal (reverse a) : parseHaskell TableName [] xs
parseStr a ('@':'{':xs) = Literal (reverse a) : parseHaskell ColumnName [] xs
parseStr a (x:xs) = parseStr (x:a) xs
interpolateValues :: PersistField a => NonEmpty a -> (Text, [[PersistValue]]) -> (Text, [[PersistValue]])
interpolateValues xs =
first (mkPlaceholders values <>) .
second (NonEmpty.toList values :)
where
values = NonEmpty.map toPersistValue xs
interpolateRows :: ToRow a => NonEmpty a -> (Text, [[PersistValue]]) -> (Text, [[PersistValue]])
interpolateRows xs =
first (placeholders <>)
. second (values :)
where
rows :: NonEmpty (NonEmpty PersistValue)
rows = NonEmpty.map toRow xs
n = NonEmpty.length rows
placeholders = n `timesCommaSeparated` mkPlaceholders (NonEmpty.head rows)
values = List.concatMap NonEmpty.toList $ NonEmpty.toList rows
mkPlaceholders :: NonEmpty a -> Text
mkPlaceholders values = "(" <> n `timesCommaSeparated` "?" <> ")"
where
n = NonEmpty.length values
timesCommaSeparated :: Int -> Text -> Text
timesCommaSeparated n = intercalate "," . replicate n
makeExpr :: TH.ExpQ -> [Token] -> TH.ExpQ
makeExpr fun toks = do
TH.infixE
(Just [| uncurry $(fun) . second concat |])
([| (=<<) |])
(Just $ go toks)
where
go :: [Token] -> TH.ExpQ
go [] = [| return (mempty, []) |]
go (Literal a:xs) =
TH.appE
[| fmap $ first (pack a <>) |]
(go xs)
go (Value a:xs) =
TH.appE
[| fmap $ first ("?" <>) . second ([toPersistValue $(reifyExp a)] :) |]
(go xs)
go (Values a:xs) =
TH.appE
[| fmap $ interpolateValues $(reifyExp a) |]
(go xs)
go (Rows a:xs) =
TH.appE
[| fmap $ interpolateRows $(reifyExp a) |]
(go xs)
go (ColumnName a:xs) = do
colN <- TH.newName "field"
TH.infixE
(Just [| getFieldName $(reifyExp a) |])
[| (>>=) |]
(Just $ TH.lamE [ TH.varP colN ] $
TH.appE
[| fmap $ first ($(TH.varE colN) <>) |]
(go xs))
go (TableName a:xs) = do
typeN <- TH.lookupTypeName a >>= \case
Just t -> return t
Nothing -> fail $ "Type not in scope: " ++ show a
tableN <- TH.newName "table"
TH.infixE
(Just $
TH.appE
[| getTableName |]
(TH.sigE
[| error "record" |] $
(TH.conT typeN)))
[| (>>=) |]
(Just $ TH.lamE [ TH.varP tableN ] $
TH.appE
[| fmap $ first ($(TH.varE tableN) <>) |]
(go xs))
reifyExp :: String -> TH.Q TH.Exp
reifyExp s =
case parseExp s of
Left e -> TH.reportError e >> [| mempty |]
Right v -> return v
makeQQ :: TH.Q TH.Exp -> QuasiQuoter
makeQQ x = QuasiQuoter
(makeExpr x . parseStr [])
(error "Cannot use qc as a pattern")
(error "Cannot use qc as a type")
(error "Cannot use qc as a dec")
-- | QuasiQuoter for performing raw sql queries, analoguous to
-- 'Database.Persist.Sql.Raw.rawSql'
--
-- This and the following are convenient QuasiQuoters to perform raw SQL
-- queries. They each follow the same pattern and are analogous to
-- the similarly named @raw@ functions. Neither the quoted function's
-- behaviour, nor it's return value is altered during the translation and
-- all documentation provided with it holds.
--
-- These QuasiQuoters perform a simple substitution on the query text, that
-- allows value substitutions, table name substitutions as well as column name
-- substitutions.
--
-- Here is a small example:
--
-- Given the following simple model:
--
-- @
-- Category
-- rgt Int default=0
-- lft Int default=0
-- nam Text
-- @
--
-- We can now execute this raw query:
--
-- @
-- let lft = 10 :: `Int`
-- rgt = 20 :: `Int`
-- width = rgt `-` lft
-- nams = "first" `:|` ["second", "third"]
-- in [sqlQQ|
-- DELETE FROM ^{Category} WHERE \@{CategoryLft} BETWEEN \#{lft} AND \#{rgt};
-- UPDATE category SET \@{CategoryRgt} = \@{CategoryRgt} - \#{width} WHERE \@{CategoryRgt} > \#{rgt};
-- UPDATE category SET \@{CategoryLft} = \@{CategoryLft} - \#{width} WHERE \@{CategoryLft} > \#{rgt};
-- SELECT ?? FROM ^{Category} WHERE ^{Category}.\@{CategoryNam} IN %{nams};
-- INSERT INTO ^{Category}(\@{CategoryNam}) VALUES *{`Single` `<$>` nams};
-- |]
-- @
--
-- - @^{TableName}@ looks up the table's name and escapes it
-- - @\@{ColumnName}@ looks up the column's name and properly escapes it
-- - @#{value}@ inserts the value via the usual parameter substitution mechanism
-- - @%{values}@ inserts comma separated values (of a 'Data.List.NonEmpty.NonEmpty' list) (since 2.9.1)
-- - @*{rows}@ inserts a 'Data.List.NonEmpty.NonEmpty' list of tuples for use with a multirow @INSERT@ statement (since 2.9.2)
--
-- @since 2.9.0
sqlQQ :: QuasiQuoter
sqlQQ = makeQQ [| rawSql |]
-- | Analoguous to 'Database.Persist.Sql.Raw.rawExecute'
--
-- @since 2.9.0
executeQQ :: QuasiQuoter
executeQQ = makeQQ [| rawExecute |]
-- | Analoguous to 'Database.Persist.Sql.Raw.rawExecuteCount'
--
-- @since 2.9.0
executeCountQQ :: QuasiQuoter
executeCountQQ = makeQQ [| rawExecuteCount |]
-- | Analoguous to 'Database.Persist.Sql.Raw.rawQuery'
--
-- @since 2.9.0
queryQQ :: QuasiQuoter
queryQQ = makeQQ [| rawQuery |]
-- | Analoguous to 'Database.Persist.Sql.Raw.rawQueryRes'
--
-- @since 2.9.0
queryResQQ :: QuasiQuoter
queryResQQ = makeQQ [| rawQueryRes |]
|
paul-rouse/persistent
|
persistent-qq/src/Database/Persist/Sql/Raw/QQ.hs
|
mit
| 8,834 | 0 | 17 | 2,136 | 2,242 | 1,239 | 1,003 | 153 | 8 |
module Test.Chell.Types
( Test
, test
, testName
, TestOptions
, defaultTestOptions
, testOptionSeed
, testOptionTimeout
, TestResult(TestPassed, TestSkipped, TestFailed, TestAborted)
, Failure
, failure
, failureLocation
, failureMessage
, Location
, location
, locationFile
, locationModule
, locationLine
, Suite
, suite
, suiteName
, suiteTests
, SuiteOrTest
, skipIf
, skipWhen
, runTest
, handleJankyIO
) where
import qualified Control.Exception
import Control.Exception (SomeException, Handler(..), catches, throwIO)
import System.Timeout (timeout)
-- | A 'Test' is, essentially, an IO action that returns a 'TestResult'. Tests
-- are aggregated into suites (see 'Suite').
data Test = Test String (TestOptions -> IO TestResult)
instance Show Test where
showsPrec d (Test name _) = showParen (d > 10) (showString "Test " . shows name)
-- | Define a test, with the given name and implementation.
test :: String -> (TestOptions -> IO TestResult) -> Test
test = Test
-- | Get the name a test was given when it was defined; see 'test'.
testName :: Test -> String
testName (Test name _) = name
-- | Test options are passed to each test, and control details about how the
-- test should be run.
data TestOptions = TestOptions
{
-- | Get the RNG seed for this test run. The seed is generated once, in
-- 'defaultMain', and used for all tests. It is also logged to reports
-- using a note.
--
-- When using 'defaultMain', users may specify a seed using the
-- @--seed@ command-line option.
--
-- 'testOptionSeed' is a field accessor, and can be used to update
-- a 'TestOptions' value.
testOptionSeed :: Int
-- | An optional timeout, in millseconds. Tests which run longer than
-- this timeout will be aborted.
--
-- When using 'defaultMain', users may specify a timeout using the
-- @--timeout@ command-line option.
--
-- 'testOptionTimeout' is a field accessor, and can be used to update
-- a 'TestOptions' value.
, testOptionTimeout :: Maybe Int
}
deriving (Show, Eq)
-- | Default test options.
--
-- >$ ghci
-- >Prelude> import Test.Chell
-- >
-- >Test.Chell> testOptionSeed defaultTestOptions
-- >0
-- >
-- >Test.Chell> testOptionTimeout defaultTestOptions
-- >Nothing
defaultTestOptions :: TestOptions
defaultTestOptions = TestOptions
{ testOptionSeed = 0
, testOptionTimeout = Nothing
}
-- | The result of running a test.
--
-- To support future extensions to the testing API, any users of this module
-- who pattern-match against the 'TestResult' constructors should include a
-- default case. If no default case is provided, a warning will be issued.
data TestResult
-- | The test passed, and generated the given notes.
= TestPassed [(String, String)]
-- | The test did not run, because it was skipped with 'skipIf'
-- or 'skipWhen'.
| TestSkipped
-- | The test failed, generating the given notes and failures.
| TestFailed [(String, String)] [Failure]
-- | The test aborted with an error message, and generated the given
-- notes.
| TestAborted [(String, String)] String
-- Not exported; used to generate GHC warnings for users who don't
-- provide a default case.
| TestResultCaseMustHaveDefault
deriving (Show, Eq)
-- | Contains details about a test failure.
data Failure = Failure
{
-- | If given, the location of the failing assertion, expectation,
-- etc.
--
-- 'failureLocation' is a field accessor, and can be used to update
-- a 'Failure' value.
failureLocation :: Maybe Location
-- | If given, a message which explains why the test failed.
--
-- 'failureMessage' is a field accessor, and can be used to update
-- a 'Failure' value.
, failureMessage :: String
}
deriving (Show, Eq)
-- | An empty 'Failure'; use the field accessors to populate this value.
failure :: Failure
failure = Failure Nothing ""
-- | Contains details about a location in the test source file.
data Location = Location
{
-- | A path to a source file, or empty if not provided.
--
-- 'locationFile' is a field accessor, and can be used to update
-- a 'Location' value.
locationFile :: String
-- | A Haskell module name, or empty if not provided.
--
-- 'locationModule' is a field accessor, and can be used to update
-- a 'Location' value.
, locationModule :: String
-- | A line number, or Nothing if not provided.
--
-- 'locationLine' is a field accessor, and can be used to update
-- a 'Location' value.
, locationLine :: Maybe Integer
}
deriving (Show, Eq)
-- | An empty 'Location'; use the field accessors to populate this value.
location :: Location
location = Location "" "" Nothing
-- | A suite is a named collection of tests.
--
-- Note: earlier versions of Chell permitted arbitrary nesting of test suites.
-- This feature proved too unwieldy, and was removed. A similar result can be
-- achieved with 'suiteTests'; see the documentation for 'suite'.
data Suite = Suite String [Test]
deriving (Show)
class SuiteOrTest a where
skipIf_ :: Bool -> a -> a
skipWhen_ :: IO Bool -> a -> a
instance SuiteOrTest Suite where
skipIf_ skip s@(Suite name children) = if skip
then Suite name (map (skipIf_ skip) children)
else s
skipWhen_ p (Suite name children) = Suite name (map (skipWhen_ p) children)
instance SuiteOrTest Test where
skipIf_ skip t@(Test name _) = if skip
then Test name (\_ -> return TestSkipped)
else t
skipWhen_ p (Test name io) = Test name (\opts -> do
skip <- p
if skip then return TestSkipped else io opts)
-- | Conditionally skip tests. Use this to avoid commenting out tests
-- which are currently broken, or do not work on the current platform.
--
-- @
--tests :: Suite
--tests = 'suite' \"tests\"
-- [ test_Foo
-- , 'skipIf' builtOnUnix test_WindowsSpecific
-- , test_Bar
-- ]
-- @
--
skipIf :: SuiteOrTest a => Bool -> a -> a
skipIf = skipIf_
-- | Conditionally skip tests, depending on the result of a runtime check. The
-- predicate is checked before each test is started.
--
-- @
--tests :: Suite
--tests = 'suite' \"tests\"
-- [ test_Foo
-- , 'skipWhen' noNetwork test_PingGoogle
-- , test_Bar
-- ]
-- @
skipWhen :: SuiteOrTest a => IO Bool -> a -> a
skipWhen = skipWhen_
-- | Define a new 'Suite', with the given name and children.
--
-- Note: earlier versions of Chell permitted arbitrary nesting of test suites.
-- This feature proved too unwieldy, and was removed. A similar result can be
-- achieved with 'suiteTests':
--
-- @
--test_Addition :: Test
--test_Subtraction :: Test
--test_Show :: Test
--
--suite_Math :: Suite
--suite_Math = 'suite' \"math\"
-- [ test_Addition
-- , test_Subtraction
-- ]
--
--suite_Prelude :: Suite
--suite_Prelude = 'suite' \"prelude\"
-- (
-- [ test_Show
-- ]
-- ++ suiteTests suite_Math
-- )
-- @
suite :: String -> [Test] -> Suite
suite = Suite
-- | Get a suite's name. Suite names may be any string, but are typically
-- plain ASCII so users can easily type them on the command line.
--
-- >$ ghci chell-example.hs
-- >Ok, modules loaded: Main.
-- >
-- >*Main> suiteName tests_Math
-- >"math"
suiteName :: Suite -> String
suiteName (Suite name _) = name
-- | Get the full list of tests contained within this 'Suite'. Each test is
-- given its full name within the test hierarchy, where names are separated
-- by periods.
--
-- >$ ghci chell-example.hs
-- >Ok, modules loaded: Main.
-- >
-- >*Main> suiteTests tests_Math
-- >[Test "math.addition",Test "math.subtraction"]
suiteTests :: Suite -> [Test]
suiteTests = go "" where
prefixed prefix str = if null prefix
then str
else prefix ++ "." ++ str
go prefix (Suite name children) = concatMap (step (prefixed prefix name)) children
step prefix (Test name io) = [Test (prefixed prefix name) io]
-- | Run a test, wrapped in error handlers. This will return 'TestAborted' if
-- the test throws an exception or times out.
runTest :: Test -> TestOptions -> IO TestResult
runTest (Test _ io) options = handleJankyIO options (io options) (return [])
handleJankyIO :: TestOptions -> IO TestResult -> IO [(String, String)] -> IO TestResult
handleJankyIO opts getResult getNotes = do
let withTimeout = case testOptionTimeout opts of
Just time -> timeout (time * 1000)
Nothing -> fmap Just
let hitTimeout = str where
str = "Test timed out after " ++ show time ++ " milliseconds"
Just time = testOptionTimeout opts
tried <- withTimeout (try getResult)
case tried of
Just (Right ret) -> return ret
Nothing -> do
notes <- getNotes
return (TestAborted notes hitTimeout)
Just (Left err) -> do
notes <- getNotes
return (TestAborted notes err)
try :: IO a -> IO (Either String a)
try io = catches (fmap Right io) [Handler handleAsync, Handler handleExc] where
handleAsync :: Control.Exception.AsyncException -> IO a
handleAsync = throwIO
handleExc :: SomeException -> IO (Either String a)
handleExc exc = return (Left ("Test aborted due to exception: " ++ show exc))
|
shlevy/chell
|
lib/Test/Chell/Types.hs
|
mit
| 8,963 | 190 | 9 | 1,786 | 1,596 | 942 | 654 | 124 | 4 |
module Main where
import CPS.Syntax
import CPS.FromGHC
import GHC.Data as G
import GHC.Var as G
import GHC.Kind as G
import GHC.Type as G
import GHC.Syntax as G
import GHC.Primitives
import Name
import Utilities
import qualified Data.Set as S
import qualified Data.Map as M
functionExample :: G.Term
functionExample = G.Case (G.Value (G.Literal (Int 2))) intHashTy two [(G.DefaultAlt,
G.LetRec [(lifted_id, G.Value (G.Lambda (G.ATyVar a) (G.Value (G.Lambda (G.AnId x) (G.Var x))))),
(prim_id', G.Value (G.Lambda (G.AnId y) (G.Var y))),
(prim_id, G.Var lifted_id `G.TyApp` G.idType prim_id' `G.App` prim_id')] $
G.PrimOp Add [G.PrimOp Add [G.Value (G.Literal (Int 1)), G.Var prim_id `G.App` two], G.Var prim_id `G.App` two])] -- Use prim_id twice to test thunk update works
where
[a_n, id_n, prim_id_n, prim_id_n', x_n, y_n, two_n] = shadowyNames ["a", "id", "prim_id", "prim_id'", "x", "y", "two"]
a = G.TyVar { G.tyVarName = a_n, G.tyVarKind = G.LiftedTypeKind }
lifted_id = G.Id { G.idName = id_n, G.idType = G.ForAllTy a (G.TyVarTy a `G.mkFunTy` G.TyVarTy a) }
prim_id = G.Id { G.idName = prim_id_n, G.idType = intHashTy `G.mkFunTy` intHashTy }
prim_id' = G.Id { G.idName = prim_id_n', G.idType = intHashTy `G.mkFunTy` intHashTy }
x = G.Id { G.idName = x_n, G.idType = G.TyVarTy a }
y = G.Id { G.idName = y_n, G.idType = intHashTy }
two = G.Id { G.idName = two_n, G.idType = intHashTy }
dataExample :: G.Term
dataExample = G.Case (G.Value (G.Literal (Int 1))) intHashTy one [(G.DefaultAlt,
G.Case (G.Value (G.Data G.trueDataCon [] [] [])) intHashTy true [
(G.DefaultAlt, G.Value (G.Literal (Int 1))),
(G.DataAlt G.trueDataCon [] [], G.LetRec [(unboxy_fun, G.Value (G.Lambda (G.AnId one) (G.Value (G.Data (G.unboxedTupleDataCon 2) [G.intHashTy, G.boolTy] [] [one, true]))))] $
G.Case (G.Var unboxy_fun `G.App` one) G.intHashTy unbx [
(G.DataAlt (G.unboxedTupleDataCon 2) [] [x, y], G.Var x)])])]
where
[true_n, one_n, unbx_n, unboxy_fun_n, x_n, y_n] = shadowyNames ["true", "one", "unbx", "unboxy_fun", "x", "y"]
true = G.Id { G.idName = true_n, G.idType = G.boolTy }
one = G.Id { G.idName = one_n, G.idType = G.intHashTy }
unbx = G.Id { G.idName = unbx_n, G.idType = G.mkTyConAppTy (G.unboxedTupleTyCon 2) [G.intHashTy, G.boolTy] }
unboxy_fun = G.Id { G.idName = unboxy_fun_n, G.idType = G.mkFunTy G.intHashTy (G.idType unbx) }
x = G.Id { G.idName = x_n, G.idType = G.intHashTy }
y = G.Id { G.idName = y_n, G.idType = G.boolTy }
caseBoundFunctionExample :: G.Term
caseBoundFunctionExample = G.Case (G.Value (G.Lambda (G.ATyVar a) (G.Value (G.Lambda (G.AnId x) (G.Var x))))) intTy lifted_id [(G.DefaultAlt,
G.Case (G.Value (G.Literal (Int 1))) intTy one [(G.DefaultAlt,
G.LetRec [(boxed_one, G.Value (G.Data G.iHashDataCon [] [] [one]))] $
G.Var lifted_id `G.TyApp` intTy `G.App` boxed_one)])]
where
[a_n, one_n, boxed_one_n, lifted_id_n, x_n] = shadowyNames ["a", "one", "boxed_one", "lifted_id", "x"]
a = G.TyVar { G.tyVarName = a_n, G.tyVarKind = G.LiftedTypeKind }
one = G.Id { G.idName = one_n, G.idType = G.intHashTy }
boxed_one = G.Id { G.idName = boxed_one_n, G.idType = G.intTy }
lifted_id = G.Id { G.idName = lifted_id_n, G.idType = G.ForAllTy a (G.TyVarTy a `G.mkFunTy` G.TyVarTy a) }
x = G.Id { G.idName = x_n, G.idType = G.TyVarTy a }
the_example :: G.Term
--the_example = functionExample
--the_example = dataExample
the_example = caseBoundFunctionExample
main :: IO ()
main = do
ids <- initUniqueSupply 'x'
let (ids', halt_n) = freshName ids "halt"
halt = CoId { coIdName = halt_n, coIdType = [IntHashTy] }
steps e = e : unfoldr (\s -> let e = stateToTerm s
in case runLintM (lintTerm emptyUniqueMap (insertUniqueMap halt_n [IntHashTy] emptyUniqueMap) e) of
[] -> fmap ((,) e) (step s)
errs -> error (unlines (map pPrintRender errs))) s
where s = (mkInScopeSet (S.singleton halt_n), M.empty, (substFromCoIdSubst (mkCoIdSubst (S.singleton halt)), e), [])
putStrLn $ pPrintRender the_example
mapM_ (putStrLn . pPrintRender) $ steps $ fromTerm (ids', emptyInScopeSet) (emptyUniqueMap, the_example) (Unknown halt)
|
beni55/cps-core
|
Main.hs
|
bsd-3-clause
| 4,683 | 4 | 24 | 1,251 | 1,901 | 1,059 | 842 | 66 | 2 |
import Data.Compact
import Data.Compact.Internal
import qualified Data.Map as Map
main = do
let m = Map.fromList [(x,show x) | x <- [1..(10000::Integer)]]
c <- compactWithSharing m
print (length (show (getCompact c)))
c <- compact m
print (length (show (getCompact c)))
|
olsner/ghc
|
libraries/compact/tests/compact_largemap.hs
|
bsd-3-clause
| 281 | 0 | 15 | 51 | 138 | 70 | 68 | 9 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.ElastiCache.ResetCacheParameterGroup
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | The /ResetCacheParameterGroup/ action modifies the parameters of a cache
-- parameter group to the engine or system default value. You can reset specific
-- parameters by submitting a list of parameter names. To reset the entire cache
-- parameter group, specify the /ResetAllParameters/ and /CacheParameterGroupName/
-- parameters.
--
-- <http://docs.aws.amazon.com/AmazonElastiCache/latest/APIReference/API_ResetCacheParameterGroup.html>
module Network.AWS.ElastiCache.ResetCacheParameterGroup
(
-- * Request
ResetCacheParameterGroup
-- ** Request constructor
, resetCacheParameterGroup
-- ** Request lenses
, rcpgCacheParameterGroupName
, rcpgParameterNameValues
, rcpgResetAllParameters
-- * Response
, ResetCacheParameterGroupResponse
-- ** Response constructor
, resetCacheParameterGroupResponse
-- ** Response lenses
, rcpgrCacheParameterGroupName
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.ElastiCache.Types
import qualified GHC.Exts
data ResetCacheParameterGroup = ResetCacheParameterGroup
{ _rcpgCacheParameterGroupName :: Text
, _rcpgParameterNameValues :: List "member" ParameterNameValue
, _rcpgResetAllParameters :: Maybe Bool
} deriving (Eq, Read, Show)
-- | 'ResetCacheParameterGroup' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rcpgCacheParameterGroupName' @::@ 'Text'
--
-- * 'rcpgParameterNameValues' @::@ ['ParameterNameValue']
--
-- * 'rcpgResetAllParameters' @::@ 'Maybe' 'Bool'
--
resetCacheParameterGroup :: Text -- ^ 'rcpgCacheParameterGroupName'
-> ResetCacheParameterGroup
resetCacheParameterGroup p1 = ResetCacheParameterGroup
{ _rcpgCacheParameterGroupName = p1
, _rcpgResetAllParameters = Nothing
, _rcpgParameterNameValues = mempty
}
-- | The name of the cache parameter group to reset.
rcpgCacheParameterGroupName :: Lens' ResetCacheParameterGroup Text
rcpgCacheParameterGroupName =
lens _rcpgCacheParameterGroupName
(\s a -> s { _rcpgCacheParameterGroupName = a })
-- | An array of parameter names to be reset. If you are not resetting the entire
-- cache parameter group, you must specify at least one parameter name.
rcpgParameterNameValues :: Lens' ResetCacheParameterGroup [ParameterNameValue]
rcpgParameterNameValues =
lens _rcpgParameterNameValues (\s a -> s { _rcpgParameterNameValues = a })
. _List
-- | If /true/, all parameters in the cache parameter group will be reset to default
-- values. If /false/, no such action occurs.
--
-- Valid values: 'true' | 'false'
rcpgResetAllParameters :: Lens' ResetCacheParameterGroup (Maybe Bool)
rcpgResetAllParameters =
lens _rcpgResetAllParameters (\s a -> s { _rcpgResetAllParameters = a })
newtype ResetCacheParameterGroupResponse = ResetCacheParameterGroupResponse
{ _rcpgrCacheParameterGroupName :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'ResetCacheParameterGroupResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rcpgrCacheParameterGroupName' @::@ 'Maybe' 'Text'
--
resetCacheParameterGroupResponse :: ResetCacheParameterGroupResponse
resetCacheParameterGroupResponse = ResetCacheParameterGroupResponse
{ _rcpgrCacheParameterGroupName = Nothing
}
-- | The name of the cache parameter group.
rcpgrCacheParameterGroupName :: Lens' ResetCacheParameterGroupResponse (Maybe Text)
rcpgrCacheParameterGroupName =
lens _rcpgrCacheParameterGroupName
(\s a -> s { _rcpgrCacheParameterGroupName = a })
instance ToPath ResetCacheParameterGroup where
toPath = const "/"
instance ToQuery ResetCacheParameterGroup where
toQuery ResetCacheParameterGroup{..} = mconcat
[ "CacheParameterGroupName" =? _rcpgCacheParameterGroupName
, "ParameterNameValues" =? _rcpgParameterNameValues
, "ResetAllParameters" =? _rcpgResetAllParameters
]
instance ToHeaders ResetCacheParameterGroup
instance AWSRequest ResetCacheParameterGroup where
type Sv ResetCacheParameterGroup = ElastiCache
type Rs ResetCacheParameterGroup = ResetCacheParameterGroupResponse
request = post "ResetCacheParameterGroup"
response = xmlResponse
instance FromXML ResetCacheParameterGroupResponse where
parseXML = withElement "ResetCacheParameterGroupResult" $ \x -> ResetCacheParameterGroupResponse
<$> x .@? "CacheParameterGroupName"
|
romanb/amazonka
|
amazonka-elasticache/gen/Network/AWS/ElastiCache/ResetCacheParameterGroup.hs
|
mpl-2.0
| 5,559 | 3 | 10 | 1,035 | 579 | 350 | 229 | 72 | 1 |
--
-- Copyright (c) 2012 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
{-# LANGUAGE NoMonomorphismRestriction #-}
module Parse ( parseDBus ) where
import Control.Applicative (empty, (<$), (<$>) )
import qualified Data.ByteString.UTF8 as UTF8
import Text.ParserCombinators.Parsec
import Network.DBus
parseDBus :: String -> SignatureElem -> Maybe DBusValue
parseDBus x t = either (const Nothing) Just $ parse (dbusvalue_p True t) "" x
int_p :: (Read a, Integral a) => CharParser st a
int_p = read <$> do
prefix <- option "" (string "-")
(prefix++) <$> many1 digit
floating_p :: CharParser st Double
floating_p = read <$> do
prefix <- option "" (string "-")
a <- many1 digit
b <- optionMaybe (char '.' >> many1 digit)
return (prefix ++ a ++ case b of
Nothing -> ""
Just b -> '.':b)
str_p :: Bool -> CharParser st String
str_p toplevel@True = getInput
str_p False = quoted '"' <|> quoted '\''
where
quoted q = do
char q
s <- escapedCh `manyTill` char q
return s
where
escapedCh =
( '\\' <$ try (char '\\' >> char '\\') )
<|> ( q <$ try (char '\\' >> char q ) )
<|> anyChar
dbusvalue_p :: Bool -> SignatureElem -> CharParser st DBusValue
dbusvalue_p toplevel t = go t where
go SigByte = DBusByte <$> int_p
go SigBool = DBusBoolean <$> bool_p
go SigInt16 = DBusInt16 <$> int_p
go SigUInt16 = DBusUInt16 <$> int_p
go SigInt32 = DBusInt32 <$> int_p
go SigUInt32 = DBusUInt32 <$> int_p
go SigInt64 = DBusInt64 <$> int_p
go SigUInt64 = DBusUInt64 <$> int_p
go SigDouble = DBusDouble <$> floating_p
go SigString = str_p toplevel >>= \x -> return $ DBusString (PackedString $ UTF8.fromString x)
go SigObjectPath = str_p toplevel >>= \x -> return $ DBusObjectPath (ObjectPath x)
go SigSignature = signature_p
go (SigArray (SigDict kt vt)) = dict_p kt vt
go (SigArray et) = array_p et
go (SigDict kt vt) = dictentry_p kt vt
go (SigStruct ets) = struct_p ets
go SigUnixFD = DBusUnixFD <$> int_p
go SigVariant = variant_p toplevel
bool_p :: CharParser st Bool
bool_p = True <$ ( string "true" <|> string "1" )
<|> False <$ ( string "false" <|> string "0" )
signature_p = empty
variant_p toplevel = empty
struct_p ts = do
spaces
char '['
spaces
r <- items ts
spaces
char ']'
return (DBusStruct ts r)
where
items [] = return []
items (t:ts) = do
spaces
x <- dbusvalue_p False t
xs <- case ts of
[] -> return []
_ -> spaces >> char ',' >> spaces >> items ts
return (x:xs)
dict_p kt vt = do
spaces
char '{'
spaces
entries <- dictentry_p kt vt `sepBy` (try (spaces >> char ',' >> spaces))
spaces
char '}'
spaces
return $ DBusArray (SigDict kt vt) entries
dictentry_p kt vt = do
spaces
k <- dbusvalue_p False kt
spaces
char ':'
spaces
v <- dbusvalue_p False vt
spaces
return $ DBusDict k v
array_p t = do
spaces
char '['
spaces
items <- dbusvalue_p False t `sepBy` (try (spaces >> char ',' >> spaces))
spaces
char ']'
spaces
return $ DBusArray t items
|
jean-edouard/manager
|
xec/Parse.hs
|
gpl-2.0
| 3,814 | 0 | 16 | 927 | 1,232 | 597 | 635 | 99 | 18 |
{-# LANGUAGE JavaScriptFFI, OverloadedStrings, ScopedTypeVariables #-}
module Main where
import Data.Char
import Data.IORef
import Data.Monoid
import Control.Concurrent
import Control.Concurrent.MVar
import Control.Exception
import Control.Monad
import GHCJS.Foreign
import GHCJS.Foreign.Callback
import GHCJS.Types
import GHCJS.Prim
import GHCJS.Marshal
import qualified Data.JSString as JSS
import qualified Data.JSString.Internal as JSSI
import System.Mem
import System.Mem.Weak
main :: IO ()
main = sequence_ [test1, test2, test3]
tou :: JSVal -> String
tou = map toUpper . JSS.unpack . jss
{-# NOINLINE tou #-}
-- sync callbacks without result
test1 :: IO ()
test1 = do
js_log "test1"
mv <- newEmptyMVar
let jsa = jsval ("ab" :: JSString)
jsb = jsval ("bc" :: JSString)
jsc = jsval ("cd" :: JSString)
x = takeMVar mv >> js_log "mvar taken"
x' = x `catch` \(_::WouldBlockException) -> js_log "would block"
t0 n ob xx = t n (syncCallback ob xx) (js_runCallback n)
t1 n ob xx = t n (syncCallback1 ob xx) (js_runCallback1 n jsa)
t2 n ob xx = t n (syncCallback2 ob xx) (js_runCallback2 n jsa jsb)
t3 n ob xx = t n (syncCallback3 ob xx) (js_runCallback3 n jsa jsb jsc)
t name mkCB runCB = do
js_log (">>> " <> name)
cb <- mkCB
runCB cb
putMVar mv ()
threadDelay 200000
tryTakeMVar mv
releaseCallback cb
t0 "cb1" ContinueAsync x -- mvar taken async / result false
t0 "cb2" ThrowWouldBlock x -- result false
t0 "cb3" ContinueAsync x' -- mvar taken async / result false
t0 "cb4" ThrowWouldBlock x' -- mvar not taken / would block printed / result true
t1 "cb5" ThrowWouldBlock (js_log . JSS.pack . tou)
t2 "cb6" ThrowWouldBlock (\a b -> js_log (JSS.pack $ tou a ++ tou b))
t3 "cb7" ThrowWouldBlock (\a b c -> js_log (JSS.pack $ tou a ++ tou b ++ tou c))
-- sync callbacks with result
test2 :: IO ()
test2 = do
js_log "test2"
mv <- newEmptyMVar
let jsa = jsval ("ab" :: JSString)
jsb = jsval ("bc" :: JSString)
jsc = jsval ("cd" :: JSString)
s1 = return . jsval . JSS.pack . tou
s2 a b = return . jsval . JSS.pack $ tou a ++ tou b
s3 a b c = return . jsval . JSS.pack $ tou a ++ tou b ++ tou c
t0 n xx = t n (syncCallback' xx) (js_runCallback n)
t1 n xx = t n (syncCallback1' xx) (js_runCallback1 n jsa)
t2 n xx = t n (syncCallback2' xx) (js_runCallback2 n jsa jsb)
t3 n xx = t n (syncCallback3' xx) (js_runCallback3 n jsa jsb jsc)
t name mkCB runCB = do
js_log (">>> " <> name)
cb <- mkCB
runCB cb
putMVar mv ()
threadDelay 200000
tryTakeMVar mv
releaseCallback cb
tmc = takeMVar mv `catch` \(_::WouldBlockException) -> js_log "would block"
t1 "cb1" (return . jsval . JSS.pack . tou)
t2 "cb2" (\a b -> return . jsval . JSS.pack $ tou a ++ tou b)
t3 "cb3" (\a b c -> return . jsval . JSS.pack $ tou a ++ tou b ++ tou c)
t1 "cb4" (\a -> takeMVar mv >> s1 a)
t2 "cb5" (\a b -> takeMVar mv >> s2 a b)
t3 "cb6" (\a b c -> takeMVar mv >> s3 a b c)
t1 "cb7" (\a -> tmc >> s1 a)
t2 "cb8" (\a b -> tmc >> s2 a b)
t3 "cb9" (\a b c -> tmc >> s3 a b c)
test3 :: IO ()
test3 = do
js_log "test3"
ior <- newIORef "abc"
w <- mkWeakIORef ior (js_log "finalized")
let v1 = jsval ("xyz" :: JSString)
cb1 <- syncCallback ThrowWouldBlock (readIORef ior >>= js_log . ("value: " <>))
cb2 <- syncCallback1 ThrowWouldBlock
(\x -> readIORef ior >>= \y -> js_log (jss x <> y))
performGC
js_runCallback "cb1" cb1
js_runCallback1 "cb2" v1 cb2
writeIORef ior "def"
js_runCallback "cb1" cb1
js_runCallback1 "cb2" v1 cb2
js_log "test3.1"
performGC
threadDelay 500000
js_log =<< maybe (return "<empty>") readIORef =<< deRefWeak w
js_log "test3.2" -- finalizer should not have run
releaseCallback cb1
releaseCallback cb2
performGC
threadDelay 500000
js_log =<< maybe (return "<empty>") readIORef =<< deRefWeak w
js_log "test3.3" -- finalizer should have run
foreign import javascript unsafe
"h$log($1);"
js_log :: JSString -> IO ()
foreign import javascript unsafe
"try { h$log($1 + ' result: ' + $2()) } catch(e) { h$log($1 + ' exception: ' + e); }"
js_runCallback :: JSString -> Callback a -> IO ()
foreign import javascript unsafe
"try { h$log($1 + ' result: ' + $3($2)) } catch(e) { h$log($1 + ' exception: ' + e); }"
js_runCallback1 :: JSString -> JSVal -> Callback a -> IO ()
foreign import javascript unsafe
"try { h$log($1 + ' result: ' + $4($2,$3)) } catch(e) { h$log($1 + ' exception: ' + e); }"
js_runCallback2 :: JSString -> JSVal -> JSVal -> Callback a -> IO ()
foreign import javascript unsafe
"try { h$log($1 + ' result: ' + $5($2,$3,$4)) } catch(e) { h$log($1 + ' exception: ' + e); }"
js_runCallback3 :: JSString -> JSVal -> JSVal -> JSVal -> Callback a -> IO ()
foreign import javascript unsafe "$r = $1;" jss :: JSVal -> JSString
|
ryantrinkle/ghcjs
|
test/ffi/callback.hs
|
mit
| 5,012 | 24 | 17 | 1,243 | 1,794 | 872 | 922 | 120 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, TemplateHaskell, DeriveFunctor, DeriveFoldable, DeriveTraversable, DeriveDataTypeable,
PatternGuards #-}
module Lamdu.Data.Expression.Infer
( Inferred(..), rExpression
, Loaded, load, loadIndependent
, inferLoaded, addRules
, derefExpr, derefNode
, IsRestrictedPoly(..)
, Node(..), TypedValue(..)
, Error(..), MismatchError(..), MismatchErrorDetails(..)
, RefMap, Context, ExprRef, Scope
, Loader(..), InferActions(..)
, initial
-- Used for inferring independent expressions in an inner infer context
-- (See hole apply forms).
, newNodeWithScope
, createRefExpr
) where
import Control.Applicative (Applicative(..), (<$>), (<$))
import Control.DeepSeq (NFData(..))
import Control.Lens (LensLike')
import Control.Lens.Operators
import Control.Monad ((<=<), guard, unless, void, when)
import Control.Monad.Trans.Class (MonadTrans(..))
import Control.Monad.Trans.Either (EitherT(..))
import Control.Monad.Trans.State (StateT(..), State, runState)
import Control.Monad.Trans.State.Utils (toStateT)
import Control.Monad.Trans.Writer (Writer)
import Control.MonadA (MonadA)
import Data.Binary (Binary(..), getWord8, putWord8)
import Data.Derive.Binary (makeBinary)
import Data.Derive.NFData (makeNFData)
import Data.DeriveTH (derive)
import Data.Foldable (traverse_)
import Data.Function (on)
import Data.Functor.Identity (Identity(..))
import Data.IntMap (IntMap)
import Data.IntSet (IntSet)
import Data.Map (Map)
import Data.Maybe (isJust, fromMaybe, fromJust)
import Data.Maybe.Utils(unsafeUnjust)
import Data.Monoid (Monoid(..))
import Data.Traversable (traverse)
import Data.Typeable (Typeable)
import Lamdu.Data.Expression.IRef (DefI)
import Lamdu.Data.Expression.Infer.Rules (Rule(..))
import Lamdu.Data.Expression.Infer.Types
import qualified Control.Lens as Lens
import qualified Control.Lens.Utils as LensUtils
import qualified Control.Monad.Trans.Either as Either
import qualified Control.Monad.Trans.State as State
import qualified Data.Foldable as Foldable
import qualified Data.IntSet as IntSet
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Lamdu.Data.Expression as Expr
import qualified Lamdu.Data.Expression.IRef as ExprIRef
import qualified Lamdu.Data.Expression.Infer.Rules as Rules
import qualified Lamdu.Data.Expression.Lens as ExprLens
import qualified Lamdu.Data.Expression.Utils as ExprUtil
newtype RuleRef = RuleRef { unRuleRef :: Int }
deriving (Eq, Ord)
instance Show RuleRef where
show = ('E' :) . show . unRuleRef
-- Initial Pass:
-- Get Definitions' types expand.
-- Use expression's structures except for Apply.
-- (because an Apply can result in something else
-- but for example an Int or Lambda stays the same)
-- Add SimpleType, Union, LambdaOrPi, LambdaBodyType, Apply rules
-- Param types of Lambdas and Pis are of type Set
-- Pi result type is of type Set
-- When recursing on an expression, we remember the parent expression origins,
-- And we make sure not to add a sub-expression with a parent origin (that's a recursive structure).
data RefData def = RefData
{ _rExpression :: RefExpression def
, _rRules :: [RuleRef] -- Rule id
} deriving (Eq, Ord)
--------------
--- RefMap:
data RefMap a = RefMap
{ _refs :: IntMap a
, _nextRef :: Int
} deriving (Eq, Ord)
Lens.makeLenses ''RefData
Lens.makeLenses ''RefMap
emptyRefMap :: RefMap a
emptyRefMap =
RefMap
{ _refs = mempty
, _nextRef = 0
}
{-# INLINE createRef #-}
createRef :: MonadA m => a -> StateT (RefMap a) m Int
createRef val = do
key <- Lens.use nextRef
nextRef += 1
refs . Lens.at key .= Just val
return key
{-# INLINE refsAt #-}
refsAt :: Functor f => Int -> LensLike' f (RefMap a) (Maybe a)
refsAt k = refs . Lens.at k
-------------- InferActions
data MismatchErrorDetails def
= MismatchIn
(Expr.Expression def ())
(Expr.Expression def ())
| InfiniteExpression (Rule def (Expr.Expression def ()))
deriving (Eq, Ord, Show)
instance Functor MismatchErrorDetails where
fmap f (MismatchIn x y) =
on MismatchIn (ExprLens.exprDef %~ f) x y
fmap _ (InfiniteExpression _) =
error "TODO: Functor MismatchErrorDetails case of InfiniteExpression"
data MismatchError def = MismatchError
{ errRef :: ExprRef
, errMismatch ::
( Expr.Expression def ()
, Expr.Expression def ()
)
, errDetails :: MismatchErrorDetails def
} deriving (Show, Eq, Ord)
instance Functor MismatchError where
fmap f (MismatchError ref mis details) =
MismatchError ref
(mis & Lens.both . ExprLens.exprDef %~ f)
(f <$> details)
data Error def
= ErrorMismatch (MismatchError def)
| ErrorMissingDefType def
deriving (Functor, Eq, Ord, Show)
newtype InferActions def m = InferActions
{ reportError :: Error def -> m ()
}
--------------
data Context def = Context
{ _exprMap :: RefMap (RefData def)
, _ruleMap :: RefMap (Rule def ExprRef)
} deriving (Typeable, Eq, Ord)
data InferState def m = InferState
{ _sContext :: Context def
, _sBfsNextLayer :: IntSet
, _sBfsCurLayer :: IntSet
, _sActions :: InferActions def m
}
Lens.makeLenses ''Context
Lens.makeLenses ''InferState
fmap concat . sequence $
derive
<$> [makeBinary, makeNFData]
<*> [''Context, ''MismatchErrorDetails, ''MismatchError, ''RuleRef, ''RefData, ''RefMap]
-- ExprRefMap:
toRefExpression :: Expr.Expression def () -> RefExpression def
toRefExpression = (RefExprPayload mempty mempty mempty <$)
createRefExpr :: MonadA m => StateT (Context def) m ExprRef
createRefExpr =
fmap ExprRef . Lens.zoom exprMap . createRef $
RefData (toRefExpression ExprUtil.pureHole) mempty
{-# INLINE exprRefsAt #-}
exprRefsAt :: Functor f => ExprRef -> LensLike' f (Context def) (RefData def)
exprRefsAt k =
exprMap . refsAt (unExprRef k) .
LensUtils._fromJust (unwords ["exprRefsAt: key", show k, "not in map"])
-- RuleRefMap
createRuleRef :: Rule def ExprRef -> State (Context def) RuleRef
createRuleRef = fmap RuleRef . Lens.zoom ruleMap . createRef
{-# INLINE ruleRefsAt #-}
ruleRefsAt :: Functor f => RuleRef -> LensLike' f (Context def) (Rule def ExprRef)
ruleRefsAt k =
ruleMap . refsAt (unRuleRef k) .
LensUtils._fromJust (unwords ["ruleRefsAt: key", show k, "not in map"])
-------------
createTypedVal :: MonadA m => StateT (Context def) m TypedValue
createTypedVal = TypedValue <$> createRefExpr <*> createRefExpr
newNodeWithScope :: MonadA m => Scope def -> StateT (Context def) m (Node def)
newNodeWithScope scope = (`Node` scope) <$> createTypedVal
initial :: Ord def => Maybe def -> (Context def, Node def)
initial mRecursiveDefI =
(context, res)
where
(res, context) =
(`runState` emptyContext) $ do
rootTv <- createTypedVal
let
scope =
case mRecursiveDefI of
Nothing -> mempty
Just recursiveDefI ->
Map.singleton (Expr.DefinitionRef recursiveDefI) (tvType rootTv)
return $ Node rootTv scope
emptyContext =
Context
{ _exprMap = emptyRefMap
, _ruleMap = emptyRefMap
}
--- InferT:
newtype InferT def m a =
InferT { unInferT :: StateT (InferState def m) m a }
deriving (Functor, Applicative, Monad)
askActions :: MonadA m => InferT def m (InferActions def m)
askActions = InferT $ Lens.use sActions
liftState :: Monad m => StateT (InferState def m) m a -> InferT def m a
liftState = InferT
{-# SPECIALIZE liftState :: StateT (InferState def Maybe) Maybe a -> InferT def Maybe a #-}
{-# SPECIALIZE liftState :: Monoid w => StateT (InferState def (Writer w)) (Writer w) a -> InferT def (Writer w) a #-}
instance MonadTrans (InferT def) where
lift = liftState . lift
derefNode :: Context def -> Node def -> Maybe (Inferred def)
derefNode context inferNode =
mkInferred
<$> (deref . tvVal . nRefs) inferNode
<*> (deref . tvType . nRefs) inferNode
<*> ( nScope inferNode
& traverse onScopeElement . Map.toList
<&> Map.fromList . concat )
where
mkInferred val typ scope =
Inferred
{ iValue = val
, iType = typ
, iScope = scope
, iNode = inferNode
}
onScopeElement (Expr.ParameterRef guid, ref) = (: []) . (,) guid <$> deref ref
onScopeElement _ = pure []
toIsRestrictedPoly False = UnrestrictedPoly
toIsRestrictedPoly True = RestrictedPoly
deref ref =
context ^? exprMap . refsAt (unExprRef ref) . Lens._Just . rExpression
<&> Lens.mapped %~
toIsRestrictedPoly . (^. rplRestrictedPoly . Lens.unwrapped)
{-# INLINE derefNode #-}
derefExpr ::
Expr.Expression def (Node def, a) -> Context def ->
Expr.Expression def (Inferred def, a)
derefExpr expr context =
expr <&> Lens._1 %~ unsafeUnjust msg . derefNode context
where
msg = "derefExpr must be given valid expr/context pair"
getRefExpr :: MonadA m => ExprRef -> InferT def m (RefExpression def)
getRefExpr ref = liftState $ Lens.use (sContext . exprRefsAt ref . rExpression)
{-# SPECIALIZE getRefExpr :: ExprRef -> InferT (DefI t) Maybe (RefExpression (DefI t)) #-}
{-# SPECIALIZE getRefExpr :: Monoid w => ExprRef -> InferT (DefI t) (Writer w) (RefExpression (DefI t)) #-}
executeRules :: (Eq def, MonadA m) => InferT def m ()
executeRules = do
curLayer <- liftState $ Lens.use sBfsNextLayer
liftState $ sBfsCurLayer .= curLayer
liftState $ sBfsNextLayer .= IntSet.empty
unless (IntSet.null curLayer) $ do
traverse_ processRule $ IntSet.toList curLayer
executeRules
where
processRule key = do
liftState $ sBfsCurLayer . Lens.contains key .= False
ruleRefs <- liftState $ Lens.use (sContext . ruleRefsAt (RuleRef key))
ruleExprs <- traverse getRefExpr ruleRefs
traverse_ (uncurry (setRefExpr (Just (RuleRef key, ruleExprs)))) $ Rules.runRule ruleExprs
{-# SPECIALIZE executeRules :: InferT (DefI t) Maybe () #-}
{-# SPECIALIZE executeRules :: Monoid w => InferT (DefI t) (Writer w) () #-}
execInferT ::
(MonadA m, Eq def) => InferActions def m ->
InferT def m a -> StateT (Context def) m a
execInferT actions act = do
inferState <- State.gets mkInferState
(res, newState) <-
lift . (`runStateT` inferState) . unInferT $ do
res <- act
executeRules
return res
State.put $ newState ^. sContext
return res
where
mkInferState ctx = InferState ctx mempty mempty actions
{-# SPECIALIZE
execInferT ::
InferActions (DefI t) Maybe -> InferT (DefI t) Maybe a ->
StateT (Context (DefI t)) Maybe a
#-}
{-# SPECIALIZE
execInferT ::
Monoid w =>
InferActions (DefI t) (Writer w) -> InferT (DefI t) (Writer w) a ->
StateT (Context (DefI t)) (Writer w) a
#-}
newtype Loader def m = Loader
{ loadPureDefinitionType :: def -> m (Expr.Expression def ())
}
-- This is because platform's Either's MonadA instance sucks
runEither :: EitherT l Identity a -> Either l a
runEither = runIdentity . runEitherT
guardEither :: l -> Bool -> EitherT l Identity ()
guardEither err False = Either.left err
guardEither _ True = return ()
-- Merge two expressions:
-- If they do not match, return Nothing.
-- Holes match with anything, expand to the other expr.
-- Param guids and Origins come from the first expression.
-- If origins repeat, fail.
mergeExprs ::
Eq def =>
RefExpression def ->
Maybe (RuleRef, Rule def (RefExpression def)) ->
RefExpression def ->
Either (MismatchErrorDetails def) (RefExpression def)
mergeExprs oldExp mRule newExp =
runEither $ ExprUtil.matchExpression onMatch onMismatch oldExp newExp
where
mergePayloadInto src =
mappendLens rplRestrictedPoly src .
mappendLens rplSubstitutedArgs src .
mappendLens rplOrigins src
mappendLens lens src =
Lens.cloneLens lens <>~ src ^. Lens.cloneLens lens
onMatch x y = return $ y `mergePayloadInto` x
mergePayloads s e =
e
& Expr.ePayload %~ (mappendLens rplRestrictedPoly s . mappendLens rplOrigins s)
& Lens.mapped %~ mappendLens rplSubstitutedArgs s
onMismatch e0 (Expr.Expression (Expr.BodyLeaf Expr.Hole) s1) =
return $ mergePayloads s1 e0
onMismatch (Expr.Expression (Expr.BodyLeaf Expr.Hole) s0) e1 = do
guardEither ((InfiniteExpression . fmap void . snd . fromJust) mRule) .
IntSet.null . IntSet.intersection origins .
mconcat $ e1 ^.. Lens.traversed . rplOrigins
return .
mergePayloads s0 $
e1 &
Lens.filtered (Lens.has (ExprLens.exprLeaves . Expr._Hole)) .
Expr.ePayload . rplOrigins <>~ origins
onMismatch e0 e1 =
Either.left $ MismatchIn (void e0) (void e1)
origins = maybe mempty (IntSet.singleton . unRuleRef . fst) mRule
touch :: MonadA m => ExprRef -> InferT def m ()
touch ref =
liftState $ do
nodeRules <- Lens.use (sContext . exprRefsAt ref . rRules)
curLayer <- Lens.use sBfsCurLayer
sBfsNextLayer %=
( mappend . IntSet.fromList
. filter (not . (`IntSet.member` curLayer))
. map unRuleRef
) nodeRules
{-# SPECIALIZE touch :: ExprRef -> InferT (DefI t) Maybe () #-}
{-# SPECIALIZE touch :: Monoid w => ExprRef -> InferT (DefI t) (Writer w) () #-}
reportAnError :: MonadA m => Error def -> InferT def m ()
reportAnError err = do
report <- reportError <$> askActions
lift $ report err
setRefExpr ::
(Eq def, MonadA m) =>
Maybe (RuleRef, Rule def (RefExpression def)) ->
ExprRef ->
RefExpression def -> InferT def m ()
setRefExpr mRule ref newExpr = do
curExpr <- liftState $ Lens.use (sContext . exprRefsAt ref . rExpression)
case mergeExprs curExpr mRule newExpr of
Right mergedExpr -> do
let
isChange = not $ equiv mergedExpr curExpr
isHole = Lens.notNullOf ExprLens.exprHole mergedExpr
when isChange $ touch ref
when (isChange || isHole) $
liftState $ sContext . exprRefsAt ref . rExpression .= mergedExpr
Left details ->
reportAnError $ ErrorMismatch MismatchError
{ errRef = ref
, errMismatch = (void curExpr, void newExpr)
, errDetails = details
}
where
equiv x y =
isJust $
ExprUtil.matchExpression comparePl ((const . const) Nothing) x y
comparePl x y =
guard $
(x ^. rplSubstitutedArgs) == (y ^. rplSubstitutedArgs) &&
(x ^. rplRestrictedPoly) == (y ^. rplRestrictedPoly)
{-# SPECIALIZE setRefExpr :: Maybe (RuleRef, Rule (DefI t) (RefExpression (DefI t))) -> ExprRef -> RefExpression (DefI t) -> InferT (DefI t) Maybe () #-}
{-# SPECIALIZE setRefExpr :: Monoid w => Maybe (RuleRef, Rule (DefI t) (RefExpression (DefI t))) -> ExprRef -> RefExpression (DefI t) -> InferT (DefI t) (Writer w) () #-}
liftContextState :: MonadA m => State (Context def) a -> InferT def m a
liftContextState = liftState . Lens.zoom sContext . toStateT
exprIntoContext ::
(MonadA m, Ord def) => Scope def ->
Loaded def a ->
InferT def m (Expr.Expression def (Node def, a))
exprIntoContext rootScope (Loaded rootExpr defTypes) = do
defTypesRefs <-
Lens.itraverse defTypeIntoContext defTypes
<&> Map.mapKeys Expr.DefinitionRef
-- mappend prefers left, so it is critical we put rootScope
-- first. defTypesRefs may contain the loaded recursive defI because
-- upon resumption, we load without giving the root defI, so its
-- type does get (unnecessarily) loaded.
go (rootScope `mappend` defTypesRefs) =<<
liftContextState
(traverse addTypedVal rootExpr)
where
defTypeIntoContext defI (fullType, defType) = do
unless
(fullType || Map.member (Expr.DefinitionRef defI) rootScope) .
reportAnError $ ErrorMissingDefType defI
ref <- liftContextState createRefExpr
setRefExpr Nothing ref $ toRefExpression defType
return ref
addTypedVal x = fmap ((,) x) createTypedVal
go scope (Expr.Expression body (s, createdTV)) = do
inferNode <- toNode scope (void <$> body) createdTV
newBody <-
case body of
Expr.BodyLam (Expr.Lam k paramGuid paramType result) -> do
paramTypeDone <- go scope paramType
let
paramTypeRef = tvVal . nRefs . fst $ paramTypeDone ^. Expr.ePayload
newScope = Map.insert (Expr.ParameterRef paramGuid) paramTypeRef scope
resultDone <- go newScope result
return $ ExprUtil.makeLam k paramGuid paramTypeDone resultDone
_ -> traverse (go scope) body
return $ Expr.Expression newBody (inferNode, s)
toNode scope body tv = do
let
typedValue =
tv
{ tvType =
fromMaybe (tvType tv) $
body ^?
Expr._BodyLeaf . Expr._GetVariable .
Lens.folding (`Map.lookup` scope)
}
return $ Node typedValue scope
ordNub :: Ord a => [a] -> [a]
ordNub = Set.toList . Set.fromList
data Loaded def a = Loaded
{ _lExpr :: Expr.Expression def a
, _lDefTypes :: Map def (Bool, Expr.Expression def ())
} deriving (Typeable, Functor, Eq, Ord)
-- Requires Ord instance for def, cannot derive
instance (Binary a, Binary def, Ord def) => Binary (Loaded def a) where
get = Loaded <$> get <*> get
put (Loaded a b) = put a >> put b
load ::
(MonadA m, Ord def) =>
Loader def m -> Maybe def -> Expr.Expression def a -> m (Loaded def a)
load loader mRecursiveDef expr =
fmap (Loaded expr) loadDefTypes
where
loadDefTypes =
expr ^..
Lens.folding ExprUtil.subExpressions .
ExprLens.exprDefinitionRef .
Lens.filtered ((/= mRecursiveDef) . Just)
& traverse loadType . ordNub
<&> Map.fromList
markFullTypes typeExpr =
(Lens.nullOf ExprLens.holePayloads typeExpr, typeExpr)
loadType defI =
loadPureDefinitionType loader defI
<&> (,) defI . markFullTypes
-- An Independent expression has no GetDefinition of any expression
-- except potentially the given recurse def. The given function should
-- yield a justification for the belief that it has no such
-- GetDefinitions in it.
loadIndependent :: Ord def => (def -> String) -> Maybe def -> Expr.Expression def a -> Loaded def a
loadIndependent errStr mRecursiveDef =
either (error . errStr) id . load (Loader Left) mRecursiveDef
addRule :: Rule def ExprRef -> State (InferState def m) ()
addRule rule = do
ruleRef <- makeRule
traverse_ (addRuleId ruleRef) $ Foldable.toList rule
sBfsNextLayer . Lens.contains (unRuleRef ruleRef) .= True
where
makeRule = Lens.zoom sContext $ createRuleRef rule
addRuleId ruleRef ref = sContext . exprRefsAt ref . rRules %= (ruleRef :)
addRules ::
(Eq def, MonadA m) => InferActions def m ->
[Expr.Expression def (Node def)] ->
StateT (Context def) m ()
addRules actions exprs =
execInferT actions . liftState . toStateT .
traverse_ addRule . concat .
traverse Rules.makeForNode $ (map . fmap) nRefs exprs
inferLoaded ::
(Ord def, MonadA m) =>
InferActions def m -> Loaded def a ->
Node def ->
StateT (Context def) m (Expr.Expression def (Inferred def, a))
inferLoaded actions loadedExpr node =
State.gets . derefExpr <=<
execInferT actions $ do
expr <- exprIntoContext (nScope node) loadedExpr
liftState . toStateT $ do
let
addUnionRules f =
traverse_ addRule $ on Rules.union (f . nRefs) node . fst $ expr ^. Expr.ePayload
addUnionRules tvVal
addUnionRules tvType
traverse_ addRule . Rules.makeForAll $ nRefs . fst <$> expr
return expr
{-# SPECIALIZE
inferLoaded ::
InferActions (DefI t) Maybe -> Loaded (DefI t) a ->
Node (DefI t) ->
StateT (Context (DefI t)) Maybe (ExprIRef.Expression t (Inferred (DefI t), a))
#-}
{-# SPECIALIZE
inferLoaded ::
Monoid w => InferActions (DefI t) (Writer w) -> Loaded (DefI t) a ->
Node (DefI t) ->
StateT (Context (DefI t)) (Writer w) (ExprIRef.Expression t (Inferred (DefI t), a))
#-}
|
schell/lamdu
|
Lamdu/Data/Expression/Infer.hs
|
gpl-3.0
| 19,800 | 0 | 22 | 4,207 | 5,668 | 2,964 | 2,704 | -1 | -1 |
{-|
Module : IRTS.JavaScript.PrimOp
Description : The JavaScript primitive operations.
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE OverloadedStrings, PatternGuards, StandaloneDeriving #-}
module IRTS.JavaScript.PrimOp
( PrimF
, PrimDec
, JsPrimTy(..)
, primDB
, jsPrimCoerce
) where
import qualified Data.Map.Strict as Map
import Idris.Core.TT
import IRTS.JavaScript.AST
import IRTS.Lang
data JsPrimTy = PTBool | PTAny deriving (Eq, Ord)
type PrimF = [JsExpr] -> JsExpr
type PrimDec = (Bool, JsPrimTy, PrimF) -- the bool indicates if bigint library is used or not
primDB :: Map.Map PrimFn PrimDec
primDB =
Map.fromList [
item (LPlus ATFloat) False PTAny $ binop "+"
, item (LPlus (ATInt ITChar)) False PTAny $ JsForeign "String.fromCharCode(%0.charCodeAt(0) + %1.charCodeAt(0))"
, item (LPlus (ATInt ITNative)) False PTAny $ binop "+"
, item (LPlus (ATInt (ITFixed IT8))) False PTAny $ JsForeign "%0 + %1 & 0xFF"
, item (LPlus (ATInt (ITFixed IT16))) False PTAny $ JsForeign "%0 + %1 & 0xFFFF"
, item (LPlus (ATInt (ITFixed IT32))) False PTAny $ JsForeign "%0+%1|0"
, item (LPlus (ATInt ITBig)) True PTAny $ method "add"
, item (LPlus (ATInt (ITFixed IT64))) True PTAny $
\[l, r] -> JsForeign "%0.add(%1).and(new $JSRTS.jsbn.BigInteger(%2))" [l,r, JsStr $ show 0xFFFFFFFFFFFFFFFF]
, item (LMinus ATFloat) False PTAny $ binop "-"
, item (LMinus (ATInt ITChar)) False PTAny $ JsForeign "String.fromCharCode(%0.charCodeAt(0) - %1.charCodeAt(0))"
, item (LMinus (ATInt ITNative)) False PTAny $ binop "-"
, item (LMinus (ATInt (ITFixed IT8))) False PTAny $ JsForeign "%0 - %1 & 0xFF"
, item (LMinus (ATInt (ITFixed IT16))) False PTAny $ JsForeign "%0 - %1 & 0xFFFF"
, item (LMinus (ATInt (ITFixed IT32))) False PTAny $ JsForeign "%0-%1|0"
, item (LMinus (ATInt ITBig)) True PTAny $ method "subtract"
, item (LMinus (ATInt (ITFixed IT64))) True PTAny $
\[l, r] -> JsForeign "%0.subtract(%1).and(new $JSRTS.jsbn.BigInteger(%2))" [l,r, JsStr $ show 0xFFFFFFFFFFFFFFFF]
, item (LTimes ATFloat) False PTAny $ binop "*"
, item (LTimes (ATInt ITChar)) False PTAny $ JsForeign "String.fromCharCode(%0.charCodeAt(0) * %1.charCodeAt(0))"
, item (LTimes (ATInt ITNative)) False PTAny $ binop "*"
, item (LTimes (ATInt (ITFixed IT8))) False PTAny $ JsForeign "%0 * %1 & 0xFF"
, item (LTimes (ATInt (ITFixed IT16))) False PTAny $ JsForeign "%0 * %1 & 0xFFFF"
, item (LTimes (ATInt (ITFixed IT32))) False PTAny $ JsForeign "%0*%1|0"
, item (LTimes (ATInt ITBig)) True PTAny $ method "multiply"
, item (LTimes (ATInt (ITFixed IT64))) True PTAny $
\[l, r] -> JsForeign "%0.multiply(%1).and(new $JSRTS.jsbn.BigInteger(%2))" [l,r, JsStr $ show 0xFFFFFFFFFFFFFFFF]
, item (LUDiv (ITFixed IT8)) False PTAny $ JsForeign "%0 / %1"
, item (LUDiv (ITFixed IT16)) False PTAny $ JsForeign "%0 / %1"
, item (LUDiv (ITFixed IT32)) False PTAny $ JsForeign "(%0>>>0) / (%1>>>0) |0"
, item (LUDiv (ITFixed IT64)) True PTAny $ JsForeign "%0.divide(%1)"
, item (LSDiv ATFloat) False PTAny $ binop "/"
, item (LSDiv (ATInt (ITFixed IT8))) False PTAny $ JsForeign "%0 / %1"
, item (LSDiv (ATInt (ITFixed IT16))) False PTAny $ JsForeign "%0 / %1"
, item (LSDiv (ATInt (ITFixed IT32))) False PTAny $ JsForeign "%0 / %1 |0"
, item (LSDiv (ATInt (ITFixed IT64))) True PTAny $ JsForeign "%0.divide(%1)"
, item (LSDiv (ATInt ITNative)) False PTAny $ JsForeign "%0/%1|0" -- we need "|0" in this
, item (LSDiv (ATInt ITBig)) True PTAny $ method "divide"
, item (LURem (ITFixed IT8)) False PTAny $ JsForeign "%0 % %1"
, item (LURem (ITFixed IT16)) False PTAny $ JsForeign "%0 % %1"
, item (LURem (ITFixed IT32)) False PTAny $ JsForeign "(%0>>>0) % (%1>>>0) |0"
, item (LURem (ITFixed IT64)) True PTAny $ method "remainder"
, item (LSRem ATFloat) False PTAny $ binop "%"
, item (LSRem (ATInt ITNative)) False PTAny $ binop "%"
, item (LSRem (ATInt (ITFixed IT8))) False PTAny $ JsForeign "%0 % %1"
, item (LSRem (ATInt (ITFixed IT16))) False PTAny $ JsForeign "%0 % %1"
, item (LSRem (ATInt (ITFixed IT32))) False PTAny $ JsForeign "%0 % %1 |0"
, item (LSRem (ATInt (ITFixed IT64))) True PTAny $ method "remainder"
, item (LSRem (ATInt ITBig)) True PTAny $ method "remainder"
, item (LAnd ITNative) False PTAny $ JsForeign "%0 & %1"
, item (LAnd (ITFixed IT8)) False PTAny $ JsForeign "%0 & %1"
, item (LAnd (ITFixed IT16)) False PTAny $ JsForeign "%0 & %1"
, item (LAnd (ITFixed IT32)) False PTAny $ JsForeign "%0 & %1"
, item (LAnd (ITFixed IT64)) True PTAny $ method "and"
, item (LAnd ITBig) True PTAny $ method "and"
, item (LOr ITNative) False PTAny $ JsForeign "%0 | %1"
, item (LOr (ITFixed IT8)) False PTAny $ JsForeign "%0 | %1"
, item (LOr (ITFixed IT16)) False PTAny $ JsForeign "%0 | %1"
, item (LOr (ITFixed IT32)) False PTAny $ JsForeign "%0 | %1"
, item (LOr (ITFixed IT64)) True PTAny $ method "or"
, item (LOr ITBig) True PTAny $ method "or"
, item (LXOr ITNative) False PTAny $ JsForeign "%0 ^ %1"
, item (LXOr (ITFixed IT8)) False PTAny $ JsForeign "%0 ^ %1"
, item (LXOr (ITFixed IT16)) False PTAny $ JsForeign "%0 ^ %1"
, item (LXOr (ITFixed IT32)) False PTAny $ JsForeign "%0 ^ %1"
, item (LXOr (ITFixed IT64)) True PTAny $ method "xor"
, item (LXOr ITBig) True PTAny $ method "xor"
, item (LSHL ITNative) False PTAny $ JsForeign "%0 << %1 |0"
, item (LSHL (ITFixed IT8)) False PTAny $ JsForeign "%0 << %1 & 0xFF"
, item (LSHL (ITFixed IT16)) False PTAny $ JsForeign "%0 << %1 & 0xFFFF"
, item (LSHL (ITFixed IT32)) False PTAny $ JsForeign "%0 << %1 | 0"
, item (LSHL (ITFixed IT64)) True PTAny $
\[l, r] -> JsForeign "%0.shiftLeft(%1).and(new $JSRTS.jsbn.BigInteger(%2))" [l,r, JsStr $ show 0xFFFFFFFFFFFFFFFF]
, item (LSHL ITBig) True PTAny $ method "shiftLeft"
, item (LLSHR ITNative) False PTAny $ JsForeign "%0 >>> %1 |0"
, item (LLSHR (ITFixed IT8)) False PTAny $ JsForeign "%0 >>> %1"
, item (LLSHR (ITFixed IT16)) False PTAny $ JsForeign "%0 >>> %1"
, item (LLSHR (ITFixed IT32)) False PTAny $ JsForeign "%0 >>> %1|0"
, item (LLSHR (ITFixed IT64)) True PTAny $ JsForeign "%0.shiftRight(%1)"
, item (LASHR ITNative) False PTAny $ JsForeign "%0 >> %1 |0"
, item (LASHR (ITFixed IT8)) False PTAny $ JsForeign "%0 >> %1"
, item (LASHR (ITFixed IT16)) False PTAny $ JsForeign "%0 >> %1"
, item (LASHR (ITFixed IT32)) False PTAny $ JsForeign "%0 >> %1|0"
, item (LASHR (ITFixed IT64)) True PTAny $ JsForeign "%0.shiftRight(%1)"
, item (LCompl (ITFixed IT8)) False PTAny $ JsForeign "~%0"
, item (LCompl (ITFixed IT16)) False PTAny $ JsForeign "~%0"
, item (LCompl (ITFixed IT32)) False PTAny $ JsForeign "~%0|0"
, item (LCompl (ITFixed IT64)) True PTAny $ method "not"
, item (LEq ATFloat) False PTBool $ binop "==="
, item (LEq (ATInt ITNative)) False PTBool $ binop "==="
, item (LEq (ATInt ITChar)) False PTBool $ binop "==="
, item (LEq (ATInt ITBig)) True PTBool $ method "equals"
, item (LEq (ATInt (ITFixed IT8))) False PTBool $ binop "==="
, item (LEq (ATInt (ITFixed IT16))) False PTBool $ binop "==="
, item (LEq (ATInt (ITFixed IT32))) False PTBool $ binop "==="
, item (LEq (ATInt (ITFixed IT64))) True PTBool $ method "equals"
, item (LLt (ITFixed IT8)) False PTBool $ JsForeign "%0 < %1"
, item (LLt (ITFixed IT16)) False PTBool $ JsForeign "%0 < %1"
, item (LLt (ITFixed IT32)) False PTBool $ JsForeign "(%0>>>0) < (%1>>>0)"
, item (LLt (ITFixed IT64)) True PTBool $ JsForeign "%0.compareTo(%1) < 0"
, item (LLe (ITFixed IT8)) False PTBool $ JsForeign "%0 <= %1"
, item (LLe (ITFixed IT16)) False PTBool $ JsForeign "%0 <= %1"
, item (LLe (ITFixed IT32)) False PTBool $ JsForeign "(%0>>>0) <= (%1>>>0)"
, item (LLe (ITFixed IT64)) True PTBool $ JsForeign "%0.compareTo(%1) <= 0"
, item (LGt (ITFixed IT8)) False PTBool $ JsForeign "%0 > %1"
, item (LGt (ITFixed IT16)) False PTBool $ JsForeign "%0 > %1"
, item (LGt (ITFixed IT32)) False PTBool $ JsForeign "(%0>>>0) > (%1>>>0)"
, item (LGt (ITFixed IT64)) True PTBool $ JsForeign "%0.compareTo(%1) > 0"
, item (LGe (ITFixed IT8)) False PTBool $ JsForeign "%0 >= %1"
, item (LGe (ITFixed IT16)) False PTBool $ JsForeign "%0 >= %1"
, item (LGe (ITFixed IT32)) False PTBool $ JsForeign "(%0>>>0) >= (%1>>>0)"
, item (LGe (ITFixed IT64)) True PTBool $ JsForeign "%0.compareTo(%1) >= 0"
, item (LSLt ATFloat) False PTBool $ binop "<"
, item (LSLt (ATInt ITChar)) False PTBool $ binop "<"
, item (LSLt (ATInt ITNative)) False PTBool $ binop "<"
, item (LSLt (ATInt ITBig)) True PTBool $ JsForeign "%0.compareTo(%1) < 0"
, item (LSLt (ATInt (ITFixed IT8))) False PTBool $ binop "<"
, item (LSLt (ATInt (ITFixed IT16))) False PTBool $ binop "<"
, item (LSLt (ATInt (ITFixed IT32))) False PTBool $ binop "<"
, item (LSLt (ATInt (ITFixed IT64))) True PTBool $ JsForeign "%0.compareTo(%1) < 0"
, item (LSLe ATFloat) False PTBool $ binop "<="
, item (LSLe (ATInt ITNative)) False PTBool $ binop "<="
, item (LSLe (ATInt ITBig)) True PTBool $ JsForeign "%0.compareTo(%1) <= 0"
, item (LSLe (ATInt (ITFixed IT8))) False PTBool $ binop "<="
, item (LSLe (ATInt (ITFixed IT16))) False PTBool $ binop "<="
, item (LSLe (ATInt (ITFixed IT32))) False PTBool $ binop "<="
, item (LSLe (ATInt (ITFixed IT64))) True PTBool $ JsForeign "%0.compareTo(%1) <= 0"
, item (LSGt ATFloat) False PTBool $ binop ">"
, item (LSGt (ATInt ITNative)) False PTBool $ binop ">"
, item (LSGt (ATInt ITBig)) True PTBool $ JsForeign "%0.compareTo(%1) > 0"
, item (LSGt (ATInt (ITFixed IT8))) False PTBool $ binop ">"
, item (LSGt (ATInt (ITFixed IT16))) False PTBool $ binop ">"
, item (LSGt (ATInt (ITFixed IT32))) False PTBool $ binop ">"
, item (LSGt (ATInt (ITFixed IT64))) True PTBool $ JsForeign "%0.compareTo(%1) > 0"
, item (LSGe ATFloat) False PTBool $ binop ">="
, item (LSGe (ATInt ITNative)) False PTBool $ binop ">="
, item (LSGe (ATInt ITBig)) True PTBool $ JsForeign "%0.compareTo(%1) >= 0"
, item (LSGe (ATInt (ITFixed IT8))) False PTBool $ binop ">="
, item (LSGe (ATInt (ITFixed IT16))) False PTBool $ binop ">="
, item (LSGe (ATInt (ITFixed IT32))) False PTBool $ binop ">="
, item (LSGe (ATInt (ITFixed IT64))) True PTBool $ JsForeign "%0.compareTo(%1) >= 0"
, item (LSExt ITNative ITBig) True PTAny $ JsForeign "new $JSRTS.jsbn.BigInteger(''+%0)"
, item (LZExt (ITFixed IT8) ITNative) False PTAny $ head
, item (LZExt (ITFixed IT16) ITNative) False PTAny $ head
, item (LZExt (ITFixed IT32) ITNative) False PTAny $ head
, item (LZExt ITNative ITBig) True PTAny $ JsForeign "new $JSRTS.jsbn.BigInteger(''+%0)"
, item (LZExt (ITFixed IT8) ITBig) True PTAny $ JsForeign "new $JSRTS.jsbn.BigInteger(''+%0)"
, item (LZExt (ITFixed IT16) ITBig) True PTAny $ JsForeign "new $JSRTS.jsbn.BigInteger(''+%0)"
, item (LZExt (ITFixed IT32) ITBig) True PTAny $ JsForeign "new $JSRTS.jsbn.BigInteger(''+%0)"
, item (LZExt (ITFixed IT64) ITBig) True PTAny $ head
, item (LTrunc ITBig ITNative) True PTAny $ JsForeign "%0.intValue()|0"
, item (LTrunc ITBig (ITFixed IT8)) True PTAny $ JsForeign "%0.intValue() & 0xFF"
, item (LTrunc ITBig (ITFixed IT16)) True PTAny $ JsForeign "%0.intValue() & 0xFFFF"
, item (LTrunc ITBig (ITFixed IT32)) True PTAny $ JsForeign "%0.intValue() & 0xFFFFFFFF"
, item (LTrunc ITBig (ITFixed IT64)) True PTAny $
\[x] -> JsForeign "%0.and(new $JSRTS.jsbn.BigInteger(%1))" [x, JsStr $ show 0xFFFFFFFFFFFFFFFF]
, item (LTrunc (ITFixed IT16) (ITFixed IT8)) False PTAny $ JsForeign "%0 & 0xFF"
, item (LTrunc (ITFixed IT32) (ITFixed IT8)) False PTAny $ JsForeign "%0 & 0xFF"
, item (LTrunc (ITFixed IT64) (ITFixed IT8)) True PTAny $ JsForeign "%0.intValue() & 0xFF"
, item (LTrunc (ITFixed IT32) (ITFixed IT16)) False PTAny $ JsForeign "%0 & 0xFFFF"
, item (LTrunc (ITFixed IT64) (ITFixed IT16)) True PTAny $ JsForeign "%0.intValue() & 0xFFFF"
, item (LTrunc (ITFixed IT64) (ITFixed IT32)) True PTAny $ JsForeign "%0.intValue() & 0xFFFFFFFF"
, item LStrConcat False PTAny $ binop "+"
, item LStrLt False PTBool $ binop "<"
, item LStrEq False PTBool $ binop "=="
, item LStrLen False PTAny $ JsForeign "%0.length"
, item (LIntFloat ITNative) False PTAny $ head
, item (LIntFloat ITBig) True PTAny $ JsForeign "%0.intValue()"
, item (LFloatInt ITNative) False PTAny $ JsForeign "%0|0"
, item (LFloatInt ITBig) True PTAny $ JsForeign "new $JSRTS.jsbn.BigInteger(Math.trunc(%0)+ '')"
, item (LIntStr ITNative) False PTAny $ JsForeign "''+%0"
, item (LIntStr ITBig) True PTAny $ JsForeign "%0.toString()"
, item (LStrInt ITNative) False PTAny $ JsForeign "parseInt(%0)|0"
, item (LStrInt ITBig) True PTAny $ JsForeign "new $JSRTS.jsbn.BigInteger(%0)"
, item (LFloatStr) False PTAny $ JsForeign "''+%0"
, item (LStrFloat) False PTAny $ jsAppN "parseFloat"
, item (LChInt ITNative) False PTAny $ JsForeign "%0.charCodeAt(0)|0"
, item (LIntCh ITNative) False PTAny $ jsAppN "String.fromCharCode"
, item LFExp False PTAny $ jsAppN "Math.exp"
, item LFLog False PTAny $ jsAppN "Math.log"
, item LFSin False PTAny $ jsAppN "Math.sin"
, item LFCos False PTAny $ jsAppN "Math.cos"
, item LFTan False PTAny $ jsAppN "Math.tan"
, item LFASin False PTAny $ jsAppN "Math.asin"
, item LFACos False PTAny $ jsAppN "Math.acos"
, item LFATan False PTAny $ jsAppN "Math.atan"
, item LFATan2 False PTAny $ jsAppN "Math.atan2"
, item LFSqrt False PTAny $ jsAppN "Math.sqrt"
, item LFFloor False PTAny $ jsAppN "Math.floor"
, item LFCeil False PTAny $ jsAppN "Math.ceil"
, item LFNegate False PTAny $ JsForeign "-%0"
, item LStrHead False PTAny $ \[x] -> JsArrayProj (JsInt 0) x
, item LStrTail False PTAny $ \[x] -> JsMethod x "slice" [JsInt 1]
, item LStrCons False PTAny $ JsForeign "%0+%1"
, item LStrIndex False PTAny $ \[x, y] -> JsArrayProj y x
, item LStrRev False PTAny $ JsForeign "%0.split('').reverse().join('')"
, item LStrSubstr False PTAny $ JsForeign "$JSRTS.prim_strSubstr(%0, %1, %2)"
, item LSystemInfo False PTAny $ JsApp (JsProp (JsVar "$JSRTS") "prim_systemInfo")
, item LCrash False PTAny $ \[l] -> JsErrorExp l
, item LReadStr False PTAny $ \[_] -> JsApp (JsProp (JsVar "$JSRTS") "prim_readStr") []
, item LWriteStr False PTAny $ \[_, str] -> JsApp (JsProp (JsVar "$JSRTS") "prim_writeStr") [str]
, item LNoOp False PTAny $ head
]
where
item :: PrimFn -> Bool -> JsPrimTy -> PrimF -> (PrimFn, PrimDec)
item fn ubi pty c = (fn, (ubi, pty, c))
binop op [l, r] = JsBinOp op l r
method op (l:r) = JsMethod l op r
jsB2I :: JsExpr -> JsExpr
jsB2I x = JsForeign "%0 ? 1|0 : 0|0" [x]
jsPrimCoerce :: JsPrimTy -> JsPrimTy -> JsExpr -> JsExpr
jsPrimCoerce PTBool PTAny x = jsB2I x
jsPrimCoerce _ _ x = x
|
kojiromike/Idris-dev
|
src/IRTS/JavaScript/PrimOp.hs
|
bsd-3-clause
| 14,816 | 0 | 14 | 2,849 | 6,026 | 2,960 | 3,066 | 228 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude
, ExistentialQuantification
, MagicHash
, RecordWildCards
, PatternSynonyms
#-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Exception
-- Copyright : (c) The University of Glasgow, 1998-2002
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable (GHC extensions)
--
-- Exceptions and exception-handling functions.
--
-----------------------------------------------------------------------------
module GHC.Exception
( Exception(..) -- Class
, throw
, SomeException(..), ErrorCall(..,ErrorCall), ArithException(..)
, divZeroException, overflowException, ratioZeroDenomException
, errorCallException, errorCallWithCallStackException
-- re-export CallStack and SrcLoc from GHC.Types
, CallStack, fromCallSiteList, getCallStack, prettyCallStack
, prettyCallStackLines, showCCSStack
, SrcLoc(..), prettySrcLoc
) where
import Data.Maybe
import Data.Typeable (Typeable, cast)
-- loop: Data.Typeable -> GHC.Err -> GHC.Exception
import GHC.Base
import GHC.Show
import GHC.Stack.Types
import GHC.OldList
import GHC.IO.Unsafe
import {-# SOURCE #-} GHC.Stack.CCS
{- |
The @SomeException@ type is the root of the exception type hierarchy.
When an exception of type @e@ is thrown, behind the scenes it is
encapsulated in a @SomeException@.
-}
data SomeException = forall e . Exception e => SomeException e
instance Show SomeException where
showsPrec p (SomeException e) = showsPrec p e
{- |
Any type that you wish to throw or catch as an exception must be an
instance of the @Exception@ class. The simplest case is a new exception
type directly below the root:
> data MyException = ThisException | ThatException
> deriving (Show, Typeable)
>
> instance Exception MyException
The default method definitions in the @Exception@ class do what we need
in this case. You can now throw and catch @ThisException@ and
@ThatException@ as exceptions:
@
*Main> throw ThisException \`catch\` \\e -> putStrLn (\"Caught \" ++ show (e :: MyException))
Caught ThisException
@
In more complicated examples, you may wish to define a whole hierarchy
of exceptions:
> ---------------------------------------------------------------------
> -- Make the root exception type for all the exceptions in a compiler
>
> data SomeCompilerException = forall e . Exception e => SomeCompilerException e
> deriving Typeable
>
> instance Show SomeCompilerException where
> show (SomeCompilerException e) = show e
>
> instance Exception SomeCompilerException
>
> compilerExceptionToException :: Exception e => e -> SomeException
> compilerExceptionToException = toException . SomeCompilerException
>
> compilerExceptionFromException :: Exception e => SomeException -> Maybe e
> compilerExceptionFromException x = do
> SomeCompilerException a <- fromException x
> cast a
>
> ---------------------------------------------------------------------
> -- Make a subhierarchy for exceptions in the frontend of the compiler
>
> data SomeFrontendException = forall e . Exception e => SomeFrontendException e
> deriving Typeable
>
> instance Show SomeFrontendException where
> show (SomeFrontendException e) = show e
>
> instance Exception SomeFrontendException where
> toException = compilerExceptionToException
> fromException = compilerExceptionFromException
>
> frontendExceptionToException :: Exception e => e -> SomeException
> frontendExceptionToException = toException . SomeFrontendException
>
> frontendExceptionFromException :: Exception e => SomeException -> Maybe e
> frontendExceptionFromException x = do
> SomeFrontendException a <- fromException x
> cast a
>
> ---------------------------------------------------------------------
> -- Make an exception type for a particular frontend compiler exception
>
> data MismatchedParentheses = MismatchedParentheses
> deriving (Typeable, Show)
>
> instance Exception MismatchedParentheses where
> toException = frontendExceptionToException
> fromException = frontendExceptionFromException
We can now catch a @MismatchedParentheses@ exception as
@MismatchedParentheses@, @SomeFrontendException@ or
@SomeCompilerException@, but not other types, e.g. @IOException@:
@
*Main> throw MismatchedParentheses `catch` \e -> putStrLn (\"Caught \" ++ show (e :: MismatchedParentheses))
Caught MismatchedParentheses
*Main> throw MismatchedParentheses `catch` \e -> putStrLn (\"Caught \" ++ show (e :: SomeFrontendException))
Caught MismatchedParentheses
*Main> throw MismatchedParentheses `catch` \e -> putStrLn (\"Caught \" ++ show (e :: SomeCompilerException))
Caught MismatchedParentheses
*Main> throw MismatchedParentheses `catch` \e -> putStrLn (\"Caught \" ++ show (e :: IOException))
*** Exception: MismatchedParentheses
@
-}
class (Typeable e, Show e) => Exception e where
toException :: e -> SomeException
fromException :: SomeException -> Maybe e
toException = SomeException
fromException (SomeException e) = cast e
-- | Render this exception value in a human-friendly manner.
--
-- Default implementation: @'show'@.
--
-- @since 4.8.0.0
displayException :: e -> String
displayException = show
instance Exception SomeException where
toException se = se
fromException = Just
displayException (SomeException e) = displayException e
-- | Throw an exception. Exceptions may be thrown from purely
-- functional code, but may only be caught within the 'IO' monad.
throw :: Exception e => e -> a
throw e = raise# (toException e)
-- |This is thrown when the user calls 'error'. The @String@ is the
-- argument given to 'error'.
data ErrorCall = ErrorCallWithLocation String String
deriving (Eq, Ord)
pattern ErrorCall :: String -> ErrorCall
pattern ErrorCall err <- ErrorCallWithLocation err _ where
ErrorCall err = ErrorCallWithLocation err ""
instance Exception ErrorCall
instance Show ErrorCall where
showsPrec _ (ErrorCallWithLocation err "") = showString err
showsPrec _ (ErrorCallWithLocation err loc) = showString (err ++ '\n' : loc)
errorCallException :: String -> SomeException
errorCallException s = toException (ErrorCall s)
errorCallWithCallStackException :: String -> CallStack -> SomeException
errorCallWithCallStackException s stk = unsafeDupablePerformIO $ do
ccsStack <- currentCallStack
let
implicitParamCallStack = prettyCallStackLines stk
ccsCallStack = showCCSStack ccsStack
stack = intercalate "\n" $ implicitParamCallStack ++ ccsCallStack
return $ toException (ErrorCallWithLocation s stack)
showCCSStack :: [String] -> [String]
showCCSStack [] = []
showCCSStack stk = "CallStack (from -prof):" : map (" " ++) (reverse stk)
-- prettySrcLoc and prettyCallStack are defined here to avoid hs-boot
-- files. See Note [Definition of CallStack]
-- | Pretty print a 'SrcLoc'.
--
-- @since 4.9.0.0
prettySrcLoc :: SrcLoc -> String
prettySrcLoc SrcLoc {..}
= foldr (++) ""
[ srcLocFile, ":"
, show srcLocStartLine, ":"
, show srcLocStartCol, " in "
, srcLocPackage, ":", srcLocModule
]
-- | Pretty print a 'CallStack'.
--
-- @since 4.9.0.0
prettyCallStack :: CallStack -> String
prettyCallStack = intercalate "\n" . prettyCallStackLines
prettyCallStackLines :: CallStack -> [String]
prettyCallStackLines cs = case getCallStack cs of
[] -> []
stk -> "CallStack (from HasCallStack):"
: map ((" " ++) . prettyCallSite) stk
where
prettyCallSite (f, loc) = f ++ ", called at " ++ prettySrcLoc loc
-- |Arithmetic exceptions.
data ArithException
= Overflow
| Underflow
| LossOfPrecision
| DivideByZero
| Denormal
| RatioZeroDenominator -- ^ @since 4.6.0.0
deriving (Eq, Ord)
divZeroException, overflowException, ratioZeroDenomException :: SomeException
divZeroException = toException DivideByZero
overflowException = toException Overflow
ratioZeroDenomException = toException RatioZeroDenominator
instance Exception ArithException
instance Show ArithException where
showsPrec _ Overflow = showString "arithmetic overflow"
showsPrec _ Underflow = showString "arithmetic underflow"
showsPrec _ LossOfPrecision = showString "loss of precision"
showsPrec _ DivideByZero = showString "divide by zero"
showsPrec _ Denormal = showString "denormal"
showsPrec _ RatioZeroDenominator = showString "Ratio has zero denominator"
|
tolysz/prepare-ghcjs
|
spec-lts8/base/GHC/Exception.hs
|
bsd-3-clause
| 8,674 | 2 | 13 | 1,520 | 1,012 | 558 | 454 | 97 | 2 |
#!/usr/bin/env stack
-- stack --resolver lts-9.21 script
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
import Control.Monad.IO.Class (liftIO)
import Database.Persist
import Database.Persist.Sqlite
import Database.Persist.TH
share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
Person
name String
age Int Maybe
deriving Show Eq
BlogPost
title String
authorId PersonId
deriving Show Eq
|]
main :: IO ()
main = runSqlite ":memory:" $ do
runMigration migrateAll
johnId <- insert $ Person "John Doe" $ Just 35
delete johnId
|
gbwey/persistent
|
.github/reproductionTemplates/sqlite.hs
|
mit
| 929 | 0 | 11 | 237 | 120 | 67 | 53 | 19 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module CostCentreState ( CostCentreState, newCostCentreState
, CostCentreIndex, unCostCentreIndex, getCCIndex
) where
import GhcPrelude
import FastString
import FastStringEnv
import Data.Data
import Binary
-- | Per-module state for tracking cost centre indices.
--
-- See documentation of 'CostCentre.cc_flavour' for more details.
newtype CostCentreState = CostCentreState (FastStringEnv Int)
-- | Initialize cost centre state.
newCostCentreState :: CostCentreState
newCostCentreState = CostCentreState emptyFsEnv
-- | An index into a given cost centre module,name,flavour set
newtype CostCentreIndex = CostCentreIndex { unCostCentreIndex :: Int }
deriving (Eq, Ord, Data, Binary)
-- | Get a new index for a given cost centre name.
getCCIndex :: FastString
-> CostCentreState
-> (CostCentreIndex, CostCentreState)
getCCIndex nm (CostCentreState m) =
(CostCentreIndex idx, CostCentreState m')
where
m_idx = lookupFsEnv m nm
idx = maybe 0 id m_idx
m' = extendFsEnv m nm (idx + 1)
|
sdiehl/ghc
|
compiler/profiling/CostCentreState.hs
|
bsd-3-clause
| 1,152 | 0 | 9 | 234 | 195 | 114 | 81 | 22 | 1 |
-- GSoC 2015 - Haskell bindings for OpenCog.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE DataKinds #-}
-- | Simple example on inserting and removing many atoms in a new AtomSpace.
import OpenCog.AtomSpace (TruthVal(..),Atom(..),AtomSpace,
runOnNewAtomSpace,get,insert,remove,
debug,printAtom,noTv,stv)
import Control.Monad.IO.Class (liftIO)
conceptN :: Int -> Atom
conceptN i = Node "ConceptNode" ("Animal"++ show i) (stv 1 1)
main :: IO ()
main = runOnNewAtomSpace $ do
let listConcepts :: Int -> [Atom]
listConcepts n = [conceptN i | i <- [1..n]]
mapM insert $ listConcepts 50000
res <- get $ conceptN 49877
liftIO $ putStrLn $ "We found: " ++ show res
mapM remove $ listConcepts 50000
debug
insert $ Link "ListLink"
[Link "ListLink" (listConcepts 200) noTv
,Link "ListLink" (listConcepts 200) noTv
] noTv
debug
return ()
|
inflector/atomspace
|
examples/haskell/example_multiple_atoms.hs
|
agpl-3.0
| 1,057 | 0 | 14 | 365 | 287 | 148 | 139 | 23 | 1 |
module Main where
import Language.Haskell.TH.Syntax
t1 = case mkName "^.." of
Name (OccName ".") (NameQ (ModName "^")) -> error "bug0"
Name (OccName "^..") NameS -> return ()
t2 = case mkName "Control.Lens.^.." of
Name (OccName ".") (NameQ (ModName "Control.Lens.^")) -> error "bug1"
Name (OccName "^..") (NameQ (ModName "Control.Lens")) -> return ()
t3 = case mkName "Data.Bits..&." of
Name (OccName ".&.") (NameQ (ModName "Data.Bits")) -> return ()
t4 = case mkName "abcde" of
Name (OccName "abcde") NameS -> return ()
main :: IO ()
main = do t1; t2; t3; t4
|
green-haskell/ghc
|
testsuite/tests/th/T8633.hs
|
bsd-3-clause
| 623 | 0 | 12 | 153 | 265 | 129 | 136 | 14 | 2 |
y :: Int
y = y + 1
-- Machine-sized integers
i :: Int
i = -78
printBounds = (minBound :: Int, maxBound :: Int)
-- Arbitrary-precision integers
n :: Integer
n = 1234567890987654321987340982334987349872349874534
reallyBig :: Integer
reallyBig = 2^(2^(2^(2^2)))
numDigits :: Int
numDigits = length (show reallyBig)
-- Float is single precision, Double is double precision.
d1, d2 :: Double
d1 = 4.5387
d2 = 6.2831e-4
-- Booleans
b1, b2 :: Bool
b1 = True
b2 = False
-- Unicode characters
c1, c2, c3 :: Char
c1 = 'x'
c2 = 'Ø'
c3 = 'ダ'
-- Strings are lists of characters with special syntax
s :: String
s = "Hello, Haskell!"
ex01 = 3 + 2
ex02 = 19 - 27
ex03 = 2.35 * 8.6
ex04 = 8.7 / 3.1
ex05 = mod 19 3
ex06 = 19 `mod` 3
ex07 = 7 ^ 222
-- negative numbers need to be wraped to avoid - parsed as subtraction.
ex08 = (-3) * (-7)
-- Lightbulb: `=` is decleration, not assignment.
i2 = 30 :: Int
-- No instance for (Fractional Int) arising from a use of ‘/’
-- In the expression: i2 / i2
-- In an equation for ‘divError’: divError = i2 / i2
--
-- divError = i2 / i2
-- i3 = 30 :: Int
-- divError2 = print (i / i)
i3 = 30 :: Int
divTest2 = print (i3 `div` i3, 12 `div` 5)
ex11 = True && False
ex12 = not (False || True)
ex13 = ('a' == 'a')
ex14 = (16 /= 3)
ex15 = (5 > 3) && ('p' <= 'q')
ex16 = "Haskell" > "C++"
-- Compute the sum if the integers from 1 to n.
sumtorial :: Integer -> Integer
sumtorial 0 = 0
sumtorial n = n + sumtorial (n - 1)
-- Each clause is checked in order from top to bottom, and the first matching clause is chose.
sumtest = print (sumtorial 10)
-- Guards.
hailstone :: Integer -> Integer
hailstone n
| n `mod` 2 == 0 = n `div` 2
| otherwise = 3 * n + 1
hailstoneTest = print (hailstone 3)
foo :: Integer -> Integer
foo 0 = 16
foo 1
| "Haskell" > "C++" = 3
| otherwise = 4
foo n
| n < 0 = 0
| n `mod` 17 == 2 = -43
| otherwise = n + 3
fooTest = print [foo (-3), foo 0, foo 1, foo 36, foo 38]
isEven :: Integer -> Bool
isEven n
| n `mod` 2 == 0 = True
| otherwise = False
evenTest = print [isEven 2, isEven 5]
betterIsEven :: Integer -> Bool
betterIsEven n = n `mod` 2 == 0
-- Pairs
p :: (Int, Char)
p = (3, 'x')
sumPair :: (Int, Int) -> Int
sumPair (x,y) = x + y
sumPairTest = print (sumPair (3,4))
f x y z = x + y + z
functionTest = print (f 3 17 8)
-- LightBulb!: -- these are "singly-linked lists" not Arrays!
listTest = [2,3,4] == 2 : 3 : 4 : []
hailstoneSeq :: Integer -> [Integer]
hailstoneSeq 1 = [1]
hailstoneSeq n = n : hailstoneSeq (hailstone n)
hailstoneSeqTest = print (hailstoneSeq 15)
-- Compute the length of a list of Integers
intListLength :: [Integer] -> Integer
intListLength [] = 0
intListLength (_:xs) = 1 + intListLength xs
intListLengthTest = print (intListLength [1,2,3,4,5])
sumEveryTwo :: [Integer] -> [Integer]
sumEveryTwo [] = [] -- Do nothing to the empty list
sumEveryTwo (x:[]) = [] -- Do nothing to lists with a single element
sumEveryTwo (x:(y:zs)) = (x + y) : sumEveryTwo zs
sumTwoTest = print (sumEveryTwo [1,2,3,4,5,6,7,8])
-- The number of hailstone steps needed to reach 1 from a given number
hailstoneLen :: Integer -> Integer
hailstoneLen n = intListLength (hailstoneSeq n) - 1
hailstoneLenTest = print (hailstoneLen 5)
|
v0lkan/learning-haskell
|
session-005/001.back-to-the-future.hs
|
mit
| 3,325 | 0 | 11 | 785 | 1,198 | 666 | 532 | 90 | 1 |
module Main where
import Homework.Week07.Assignment
import Homework.Week07.Editor
import Homework.Week07.StringBuffer
main :: IO ()
main = runEditor editor $ unlines
[ "This buffer is for notes you don't want to save, and for"
, "evaluation of steam valve coefficients."
, "To load a different file, type the character L followed"
, "by the name of the file."
]
|
laser/cis-194-spring-2017
|
src/Homework/Week07/StringBufEditor.hs
|
mit
| 409 | 0 | 7 | 104 | 59 | 35 | 24 | 10 | 1 |
module Main where
import System.IO
import Text.Show.Pretty (ppShow)
import Data.CapnProto.Schema (readSchema)
import Data.CapnProto.Schema.Generator (generateCode)
main :: IO ()
main = do
schema <- readSchema stdin
{- putStrLn . ppShow $ schema -}
putStrLn $ generateCode schema
return ()
|
cstrahan/hs-capnp
|
capnpc-hs/Main.hs
|
mit
| 373 | 0 | 8 | 122 | 87 | 48 | 39 | 10 | 1 |
import XMonad
import XMonad.Actions.CycleWS
import XMonad.Actions.GridSelect
import XMonad.Actions.NoBorders
import XMonad.Hooks.DynamicLog
import XMonad.Hooks.ManageDocks
import XMonad.Layout.Dishes
import XMonad.Layout.LimitWindows
import XMonad.Layout.Grid
import XMonad.Layout.NoBorders
import XMonad.Layout.PerWorkspace
import XMonad.Util.Run(spawnPipe)
import Data.Monoid
import System.IO
import System.Exit
import qualified XMonad.StackSet as W
import qualified Data.Map as M
-- KEYS - 5 rows :
-- F* row: media controls
-- Numbers row: 11 workspaces
-- Upper row: 3 screens & free for app customs
-- Home row: 4 free for app customs & shift apps & moves
-- Bottom row : xmonad
-- The default number of workspaces (virtual screens) and their names.
-- By default we use numeric strings, but any string may be used as a
-- workspace name. The number of workspaces is determined by the length
-- of this list.
-- > workspaces = ["web", "irc", "code" ] ++ map show [4..9]
myWorkspaces = map show [0..10]
-- GridSelect color scheme
myColorizer = colorRangeFromClassName
(0x00, 0x2B, 0x36) --lowest inactive bg
(0x93, 0xA1, 0xA1) --highest inactive bg
(0xDC, 0x32, 0x2F) --active bg
(0xCC, 0xCC, 0xCC) --inactive fg
(0xFF, 0xFF, 0xFF) --active fg
-- GridSelect theme
myGSConfig = (buildDefaultGSConfig myColorizer)
{ gs_cellheight = 150
, gs_cellwidth = 300
, gs_cellpadding = 10
}
-- Key bindings. Add, modify or remove key bindings here.
myKeys conf@(XConfig {XMonad.modMask = modm}) = M.fromList $
-- Launch a terminal
[ ((modm, xK_Return), spawn $ XMonad.terminal conf)
-- Sound volume
, ((modm, xK_F1 ), spawn "amixer set Master toggle")
, ((modm, xK_F2 ), spawn "amixer set Master 3%-")
, ((modm, xK_F3 ), spawn "amixer set Master 3%+")
, ((modm, xK_F4 ), spawn "amixer set Mic toggle")
-- Screen brightness
, ((modm, xK_F5 ), spawn "xbacklight -10")
, ((modm, xK_F6 ), spawn "xbacklight +10")
-- Keyboard (zsh "bepo" alias non available)
, ((modm, xK_F11 ), spawn "kbswitch.sh lafayette")
, ((modm, xK_F12 ), spawn "kbswitch.sh bepo")
-- Launch applications
, ((modm, xK_p ), spawn "dmenu_run")
, ((modm .|. shiftMask, xK_d ), spawn "deadbeef")
, ((modm .|. shiftMask, xK_f ), spawn "firefox")
, ((modm .|. shiftMask, xK_g ), spawn "chromium")
-- Bottom row, xmonad related
, ((modm, xK_w ), goToSelected myGSConfig)
-- Resize viewed windows to the correct size
, ((modm, xK_x ), refresh)
-- Close focused window
, ((modm .|. shiftMask, xK_c ), kill)
, ((modm, xK_Escape), kill)
-- Push window back into tiling
, ((modm, xK_v ), withFocused $ windows . W.sink)
, ((modm, xK_b ), withFocused toggleBorder)
-- Quit xmonad
, ((modm .|. shiftMask, xK_n ), io (exitWith ExitSuccess))
-- Restart xmonad
, ((modm, xK_n ), spawn "xmonad --recompile; xmonad --restart")
-- Rotate through the available layout algorithms
, ((modm, xK_space ), sendMessage NextLayout)
-- Reset the layouts on the current workspace to default
, ((modm .|. shiftMask, xK_space ), setLayout $ XMonad.layoutHook conf)
-- Move focus to the next window
, ((modm, xK_Tab ), windows W.focusDown)
-- Move focus to the next window
, ((modm, xK_j ), windows W.focusDown)
-- Move focus to the previous window
, ((modm, xK_k ), windows W.focusUp)
-- Move focus to the master window
, ((modm, xK_m ), windows W.focusMaster)
-- Swap the focused window and the master window
-- , ((modm, xK_Return), windows W.swapMaster)
-- Swap the focused window with the next window
, ((modm .|. shiftMask, xK_j ), windows W.swapDown)
-- Swap the focused window with the previous window
, ((modm .|. shiftMask, xK_k ), windows W.swapUp)
-- Shrink the master area
, ((modm, xK_h ), sendMessage Shrink)
-- Expand the master area
, ((modm, xK_l ), sendMessage Expand)
-- Move to prev workspace
, ((modm, xK_Left ), prevWS)
-- Move to next workspace
, ((modm, xK_Right ), nextWS)
-- Increment the number of windows in the master area
, ((modm, xK_Up ), sendMessage $ IncMasterN 1)
-- Deincrement the number of windows in the master area
, ((modm, xK_Down ), sendMessage $ IncMasterN (-1))
-- Toggle the status bar gap
-- Use this binding with avoidStruts from Hooks.ManageDocks.
-- See also the statusBar function from Hooks.DynamicLog.
-- , ((modm, xK_y ), sendMessage ToggleStruts)
]
++
-- mod-[² & é .. ç à], Switch to workspace N
-- mod-shift-[² & é .. ç à], Move client to workspace N
-- use xev to find key numbers
[((m .|. modm, k), windows $ f i)
| (i, k) <- zip (XMonad.workspaces conf)
[ xK_twosuperior
, xK_ampersand
, xK_eacute
, xK_quotedbl
, xK_apostrophe
, xK_parenleft
, xK_minus
, xK_egrave
, xK_underscore
, xK_ccedilla
, xK_agrave
]
, (f, m) <- [(W.greedyView, 0), (W.shift, shiftMask)]]
++
-- mod-{a,z,e}, Switch to physical/Xinerama screens 1, 2, or 3
-- mod-shift-{a,z,e}, Move client to screen 1, 2, or 3
[((m .|. modm, key), screenWorkspace sc >>= flip whenJust (windows . f))
| (key, sc) <- zip [xK_a, xK_z, xK_e] [0..]
, (f, m) <- [(W.view, 0), (W.shift, shiftMask)]]
-- Mouse bindings: default actions bound to mouse events
myMouseBindings (XConfig {XMonad.modMask = modm}) = M.fromList $
-- mod-button1, Set the window to floating mode and move by dragging
[ ((modm, button1), (\w -> focus w >> mouseMoveWindow w
>> windows W.shiftMaster))
-- mod-button2, Raise the window to the top of the stack
, ((modm, button2), (\w -> focus w >> windows W.shiftMaster))
-- mod-button3, Set the window to floating mode and resize by dragging
, ((modm, button3), (\w -> focus w >> mouseResizeWindow w
>> windows W.shiftMaster))
]
-- Layouts:
-- If you change layout bindings be sure to use 'mod-shift-space' after
-- restarting (with 'mod-q') to reset your layout state to the new
-- defaults, as xmonad preserves your old layout settings by default.
--
-- Note that each layout is separated by ||| which denotes layout choice.
gridFull = Grid ||| Full
tiersLayouts = tiled ||| Mirror tiled
where
-- Default tiling algorithm partitions the screen into two panes
tiled = Tall nmaster delta ratio
-- Default number of windows in the master pane
nmaster = 1
-- Default proportion of screen occupied by master pane
ratio = 2/3
-- Percent of screen to increment by when resizing panes
delta = 3/100
halvesLayouts = tiled ||| Mirror tiled
where
tiled = Tall nmaster delta ratio
nmaster = 1
ratio = 1/2
delta = 3/100
tiersFirst = smartBorders $ tiersLayouts ||| halvesLayouts ||| gridFull
halvesFirst = smartBorders $ halvesLayouts ||| tiersLayouts ||| gridFull
-- logs & daemons
dishesLayout = limitWindows 5 $ Dishes nmaster ratio
where
nmaster = 1
ratio = 1/5
-- chats
curtainsLayout = Mirror dishesLayout
dishesFirst = smartBorders $ dishesLayout ||| curtainsLayout ||| gridFull
curtainsFirst = smartBorders $ curtainsLayout ||| dishesLayout ||| gridFull
-- music & video players
noBordersLayout = noBorders $ Full
myLayoutHook = onWorkspace "0" dishesFirst $
onWorkspace "4" curtainsFirst $
onWorkspaces ["7", "8"] tiersFirst $
onWorkspaces ["9", "10"] noBordersLayout $
halvesFirst
-- Window rules:
-- Execute arbitrary actions and WindowSet manipulations when managing
-- a new window. You can use this to always float a particular program,
-- or have a client always appear on a particular workspace.
--
-- To find the property name associated with a program, use
-- > xprop | grep WM_CLASS
-- and click on the client you're interested in.
--
-- To match on the WM_NAME, you can use 'title' in the same way that
-- 'className' and 'resource' are used below.
myManageHook :: ManageHook
myManageHook = composeAll
[ className =? "Firefox" --> doShift "2"
, className =? "Chromium" --> doShift "3"
, className =? "Deadbeef" --> doShift "9"
, resource =? "desktop_window" --> doIgnore
, resource =? "kdesktop" --> doIgnore ]
-- Event handling
-- Defines a custom handler function for X Events. The function should
-- return (All True) if the default handler is to be run afterwards. To
-- combine event hooks use mappend or mconcat from Data.Monoid.
myEventHook = mempty
-- Status bars and logging
-- Perform an arbitrary action on each internal state change or X event.
-- See the 'XMonad.Hooks.DynamicLog' extension for examples.
myLogHook = return ()
-- Startup hook
-- Perform an arbitrary action each time xmonad starts or is restarted
-- with mod-q. Used by, e.g., XMonad.Layout.PerWorkspace to initialize
-- per-workspace layout choices.
--
myStartupHook = return ()
-- A structure containing your configuration settings, overriding
-- fields in the default config. Any you don't override, will
-- use the defaults defined in xmonad/XMonad/Config.hs
main = do
xmonad $ defaultConfig {
terminal = "xterm",
focusFollowsMouse = True,
borderWidth = 2,
normalBorderColor = "#000000",
focusedBorderColor = "#DC322F",
modMask = mod4Mask,
workspaces = myWorkspaces,
-- key bindings
keys = myKeys,
mouseBindings = myMouseBindings,
-- hooks, layouts
layoutHook = myLayoutHook,
manageHook = myManageHook,
handleEventHook = myEventHook,
logHook = myLogHook,
startupHook = myStartupHook
}
|
Delapouite/dotfiles
|
link/.xmonad/xmonad.azerty.hs
|
mit
| 10,522 | 0 | 13 | 2,993 | 1,901 | 1,155 | 746 | 140 | 1 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverlappingInstances #-}
module Text.XML.Generic where
import Data.Word
import Data.Text (Text, pack)
import Data.Char (toLower)
import qualified Data.Map.Lazy as LMap
import GHC.Generics
import Text.XML
import Text.Hamlet.XML
-- Typeclass ToXml
class ToXml a where
toXml :: a -> [Node]
default toXml :: (Generic a, GToXml (Rep a)) => a -> [Node]
toXml a = gToXml (from a)
-- Instances
instance ToXml Int where
toXml i = [xml|#{pack $ show i}|]
instance ToXml Integer where
toXml i = [xml|#{pack $ show i}|]
instance ToXml Double where
toXml d = [xml|#{pack $ show d}|]
instance ToXml Float where
toXml f = [xml|#{pack $ show f}|]
instance ToXml Word8 where
toXml w = [xml|#{pack $ show w}|]
instance ToXml Word16 where
toXml w = [xml|#{pack $ show w}|]
instance ToXml Word32 where
toXml w = [xml|#{pack $ show w}|]
instance ToXml Word64 where
toXml w = [xml|#{pack $ show w}|]
instance ToXml String where
toXml str = [xml|#{pack $ str}|]
instance ToXml a => ToXml [a] where
toXml xs = foldl (\x y -> x ++ (toXml y)) [] xs
-- Generics
-- Generic Type Class
class GToXml f where
gToXml :: f a -> [Node]
--
instance GToXml U1 where
gToXml U1 = []
instance (GToXml a, GToXml b) => GToXml (a :*: b) where
gToXml (a :*: b) = gToXml a ++ gToXml b
instance (GToXml a, GToXml b) => GToXml (a :+: b) where
gToXml (L1 z) = gToXml z
gToXml (R1 z) = gToXml z
-- Use constructor of a datatype as element name
instance (GToXml a, Constructor c) => GToXml (M1 C c a) where
gToXml m1 = NodeElement (Element (Name (pack $ map toLower $ conName m1) Nothing Nothing) LMap.empty (gToXml (unM1 m1))) : []
instance (GToXml a) => GToXml (M1 D c a) where
gToXml (M1 z) = gToXml z
instance (GToXml a, Selector c) => GToXml (M1 S c a) where
gToXml m1 | selName m1 == "" = gToXml (unM1 m1)
| otherwise = NodeElement (Element (Name (pack $ map toLower $ selName m1) Nothing Nothing) LMap.empty (gToXml ( unM1 m1))) : []
instance (ToXml a) => GToXml (K1 i a) where
gToXml (K1 z) = toXml z
|
jhedev/xml-conduit-generics
|
src/Text/XML/Generic.hs
|
mit
| 2,376 | 0 | 16 | 589 | 861 | 463 | 398 | 57 | 0 |
--myMaybe.hs
module MyMaybe where
safeHead :: [a] -> Maybe a
safeHead [] = Nothing
safeHead (x:_) = Just x
|
deciduously/Haskell-First-Principles-Exercises
|
2-Defining and combining/9-Lists/code/myMaybe.hs
|
mit
| 107 | 0 | 7 | 19 | 48 | 26 | 22 | 4 | 1 |
module Module.Capnp.Pointer (ptrTests) where
import Data.Bits
import Data.Int
import Data.Word
import Test.Hspec
import Test.QuickCheck
import Capnp.Pointer
instance Arbitrary EltSpec where
arbitrary = oneof [ EltNormal <$> arbitrary <*> arbitraryU29
, EltComposite <$> arbitraryI29
]
instance Arbitrary ElementSize where
arbitrary = oneof $ map return [ Sz0
, Sz1
, Sz8
, Sz16
, Sz32
, Sz64
, SzPtr
]
-- | arbitraryIN is an arbitrary N bit signed integer as an Int32.
arbitraryI32, arbitraryI30, arbitraryI29 :: Gen Int32
arbitraryI32 = arbitrary
arbitraryI30 = (`shiftR` 2) <$> arbitraryI32
arbitraryI29 = (`shiftR` 3) <$> arbitraryI32
-- | arbitraryUN is an arbitrary N bit unsigned integer as a Word32.
arbitraryU32, arbitraryU29 :: Gen Word32
arbitraryU32 = arbitrary
arbitraryU29 = (`shiftR` 3) <$> arbitraryU32
instance Arbitrary Ptr where
arbitrary = oneof [ StructPtr <$> arbitraryI30
<*> arbitrary
<*> arbitrary
, ListPtr <$> arbitraryI30
<*> arbitrary
, FarPtr <$> arbitrary
<*> arbitraryU29
<*> arbitrary
, CapPtr <$> arbitrary
]
ptrTests :: Spec
ptrTests = do
ptrProps
parsePtrExamples
ptrProps :: Spec
ptrProps = describe "Pointer Properties" $ do
it "Should satisfy: parseEltSpec . serializeEltSpec == id" $
property $ \spec -> parseEltSpec (serializeEltSpec spec) == spec
it "Should satisfy: parsePtr . serializePtr == id" $
property $ \ptr ->
case ptr of
(Just (StructPtr 0 0 0)) -> True -- we skip this one, since it's
-- the same bits as a null, so this
-- shouldn't hold. TODO: the name
-- of this test is a bit misleading
-- because of this case; should fix
-- that.
_ -> parsePtr (serializePtr ptr) == ptr
parsePtrExamples :: Spec
parsePtrExamples = describe "parsePtr (examples)" $
it "Should parse this example correctly" $
parsePtr 0x0000000200000000 `shouldBe` Just (StructPtr 0 2 0)
|
zenhack/haskell-capnp
|
tests/Module/Capnp/Pointer.hs
|
mit
| 2,729 | 0 | 16 | 1,202 | 443 | 247 | 196 | 52 | 2 |
{-# LANGUAGE TemplateHaskell #-}
module SuperUserSpark.Parser.TestUtils where
import TestImport hiding (succeeds)
import Data.Either (isRight)
import SuperUserSpark.Parser.Internal
import Text.Parsec
import Text.Parsec.String
shouldSucceed
:: (Show a, Eq a)
=> Parser a -> String -> IO ()
shouldSucceed parser input = input `shouldSatisfy` succeeds parser
shouldFail
:: (Show a, Eq a)
=> Parser a -> String -> IO ()
shouldFail parser input = input `shouldNotSatisfy` succeeds parser
succeeds :: Parser a -> String -> Bool
succeeds parser = succeedsWithLeftover $ parser >> eof
succeedsWithLeftover :: Parser a -> String -> Bool
succeedsWithLeftover parser input = isRight $ parseWithoutSource parser input
succeedsAnywhere :: Parser a -> String -> Bool
succeedsAnywhere p s = any (succeedsWithLeftover p) $ tails s
where
tails :: [a] -> [[a]]
tails [] = [[]]
tails ass@(_:as) = ass : tails as
fails :: Parser a -> String -> Bool
fails parser input = not $ succeeds parser input
testInputSource :: Path Abs File
testInputSource = $(mkAbsFile "/Test/input/file")
parseShouldSucceedAs
:: (Show a, Eq a)
=> Parser a -> String -> a -> IO ()
parseShouldSucceedAs parser input a =
parseFromSource parser testInputSource input `shouldBe` Right a
parseShouldBe
:: (Show a, Eq a)
=> Parser a -> String -> Either ParseError a -> IO ()
parseShouldBe parser input result =
parseFromSource parser testInputSource input `shouldBe` result
parseWithoutSource :: Parser a -> String -> Either ParseError a
parseWithoutSource parser = parseFromSource parser testInputSource
|
NorfairKing/super-user-spark
|
test/SuperUserSpark/Parser/TestUtils.hs
|
mit
| 1,621 | 0 | 10 | 298 | 552 | 284 | 268 | 40 | 2 |
module Hablog.Routes
( routes
) where
import Control.Monad.Reader (runReaderT)
import Hablog.Admin.Pages.Login (loginRequired)
import Hablog.Data (Page)
import Hablog.Data.RequestState (RequestState(..))
import Hablog.Data.Sitemap
import Happstack.Server (Response, ServerPartT)
import Web.Routes (RouteT)
import qualified Hablog.Pages as Pages
import qualified Hablog.Admin.Pages as Admin
routes :: RequestState -> Sitemap -> RouteT Sitemap (ServerPartT IO) Response
routes rq url = runReaderT routes' rq
where
routes' :: Page Response
routes' = case url of
(SiteHome) -> Pages.home
(SiteEntry slug) -> Pages.entry slug
_ -> loginRequired url $ case url of
(AdminHome) -> Admin.home
(AdminLogin) -> Admin.login
(AdminLogout) -> Admin.logout
(AdminEntryNew) -> Admin.entryNew
(AdminEntryList) -> Admin.entryList
(AdminEntryEdit _) -> Admin.home
_ -> Pages.home
|
garrettpauls/Hablog
|
src/Hablog/Routes.hs
|
mit
| 1,038 | 0 | 15 | 280 | 287 | 162 | 125 | 25 | 9 |
module STMGraphs.Heterogenous.DynKey where
import STMGraphs.Prelude
-- |
-- A dynamic value usable as a key in a hash map.
data DynKey =
forall a. (Eq a) => DynKey !Int !TypeRep !a
instance Hashable DynKey where
hashWithSalt s = hashWithSalt s . hash
hash (DynKey h _ _) = h
instance Eq DynKey where
DynKey _ t1 v1 == DynKey _ t2 v2 =
t1 == t2 && unsafeCoerce v1 == v2
dynKey :: (Typeable a, Hashable a, Eq a) => a -> DynKey
dynKey a =
DynKey (hash a) (typeOf a) a
|
nikita-volkov/stm-graphs
|
library/STMGraphs/Heterogenous/DynKey.hs
|
mit
| 485 | 0 | 8 | 112 | 190 | 96 | 94 | -1 | -1 |
{-# Language MultiParamTypeClasses, FlexibleInstances, TypeFamilies #-}
module Control.CCA.Apply where
class (F f a ~ r, G a r ~ f) => Apply f a r where
type F f a
type G a r
apply:: f -> a -> r
instance Apply f () f where
type F f () = f
type G () f = f
apply = const
instance (Apply y z r, F y z ~ r, G z r ~ y) => Apply (x -> y) (x, z) r where
type F (x -> y) (x, z) = F y z
type G (x, z) r = x -> G z r
apply f (x, z) = apply (f x) z
|
farre/ccat
|
src/Control/CCA/Apply.hs
|
mit
| 459 | 0 | 8 | 137 | 264 | 142 | 122 | 14 | 0 |
-- Surprisingly there are only three numbers that can be written as the sum of fourth powers of their digits:
--
-- 1634 = 1^4 + 6^4 + 3^4 + 4^4
-- 8208 = 8^4 + 2^4 + 0^4 + 8^4
-- 9474 = 9^4 + 4^4 + 7^4 + 4^4
-- As 1 = 1^4 is not a sum it is not included.
--
-- The sum of these numbers is 1634 + 8208 + 9474 = 19316.
--
-- Find the sum of all the numbers that can be written as the sum of fifth powers of their digits.
import Data.Char
canBeSumOfDigits :: Int -> Int -> Bool
canBeSumOfDigits exponent' number = do
let sumOfPowers = sum $ map ((^exponent') . digitToInt ) $ show number
sumOfPowers == number
calc :: Int
calc = sum $ [x | x <- [2..999999], canBeSumOfDigits 5 x == True]
main :: IO ()
main = print calc
|
daniel-beard/projecteulerhaskell
|
Problems/p30.hs
|
mit
| 733 | 0 | 15 | 171 | 141 | 77 | 64 | 9 | 1 |
{-# LANGUAGE DataKinds #-}
-- | Example 'Source' based on 'directory-layout'
module Control.Biegunka.Source.Layout
( Layout
, layout
) where
import Control.Applicative (empty)
import Control.Lens
import System.FilePath (takeDirectory, takeFileName)
import System.Directory.Layout (Layout, dir, make)
import Control.Biegunka.Language
import Control.Biegunka.Script
-- | Make specified layout and use is as 'Source'
layout
:: Layout a -- ^ Layout to make
-> FilePath -- ^ Layout root
-> Script 'Actions () -- ^ What to do with layout files
-> Script 'Sources ()
layout dirlayout relpath = sourced Source
{ sourceType = "dummy"
, sourceFrom = "localhost"
, sourceTo = relpath
, sourceUpdate = update
}
where
update abspath =
return
( empty
, empty <$ do
l <- make (takeDirectory abspath) (dir (takeFileName abspath) dirlayout)
traverseOf_ _Left print l
)
|
biegunka/biegunka
|
src/Control/Biegunka/Source/Layout.hs
|
mit
| 953 | 0 | 17 | 227 | 221 | 125 | 96 | 26 | 1 |
{-# OPTIONS -funbox-strict-fields #-}
module Cmm.Op where
import Data.Binary
import Util.Gen
import Text.ParserCombinators.ReadP as P
import Text.Read.Lex
{-
Basic operations. These are chosen to be roughly equivalent to c-- operations,
but can be effectively used to generate C or assembly code as well.
An operation consists of the operation itself, the type of the arguments and
return value, and a hint attached to each argument.
A condition is that the operation must be fully determined by the operation
name and the type of its arguments. this specifically does not include the
hint. For instance, since whether a number is signed or unsigned is in the
hint, so the operation itself must say whether it is signed or unsigned.
Also, distinct algorithms should be given different operations, for instance
floating point and integer comparison are so different that they should be
separate opcodes, even if it could be determined by the type they operate on.
-}
-- these take 2 arguments of the same type, and return one of the same type.
-- an exception are the mulx routines, which may return a type exactly
-- double in size of the original, and the shift and rotate routines, where the
-- second argument may be of any width and is interpreted as an unsigned
-- number.
--
-- the invarient is that the return type is always exactly determined by the
-- argument types
data BinOp
= Add
| Sub
| Mul
| Mulx
| UMulx
| Div -- ^ round to -Infinity
| Mod -- ^ mod rounding to -Infinity
| Quot -- ^ round to 0
| Rem -- ^ rem rounding to 0
| UDiv -- ^ round to zero (unsigned)
| UMod -- ^ unsigned mod
-- bitwise
| And
| Or
| Xor
| Shl
| Shr -- ^ shift right logical
| Shra -- ^ shift right arithmetic
| Rotl
| Rotr
-- floating
| FAdd
| FSub
| FDiv
| FMul
| FPwr
| FAtan2
-- These all compare two things of the same type, and return a boolean.
| Eq
| NEq
| Gt
| Gte
| Lt
| Lte
-- unsigned versions
| UGt
| UGte
| ULt
| ULte
-- floating point comparasons
| FEq
| FNEq
| FGt
| FGte
| FLt
| FLte
-- whether two values can be compared at all.
| FOrdered
deriving(Eq,Show,Ord,Read,Enum,Bounded)
{-! derive: Binary !-}
data UnOp
= Neg -- ^ 2s compliment negation
| Com -- ^ bitwise compliment
-- floating
| FAbs -- ^ floating absolute value
| FNeg -- ^ floating point negation
| Sin
| Cos
| Tan
| Sinh
| Cosh
| Tanh
| Asin
| Acos
| Atan
| Log
| Exp
| Sqrt
-- exotic bit operations
| Bswap -- ^ Switch the order of the bytes in a word
| Ffs -- ^ Returns one plus the index of the least
-- significant 1-bit of x, 0 if x is zero.
| Clz -- ^ number of leading (from MSB) zeros, undefined if zero
| Ctz -- ^ number of trailing (from LSB) zeros, undefined if zero.
| Popcount -- ^ number of bits set to 1 in word
| Parity -- ^ number of bits set to 1 mod 2
deriving(Eq,Show,Ord,Read,Enum,Bounded)
{-! derive: Binary !-}
-- conversion ops
data ConvOp
= F2I -- ^ convert a floating point to an integral value via truncation
| F2U -- ^ convert a floating point to an unsigned integral value via truncation, negative values become zero
| U2F -- ^ convert an unsigned integral value to a floating point number
| I2F -- ^ convert an integral value to a floating point number
| F2F -- ^ convert a float from one precision to another, preserving value as much as possible
| Lobits -- ^ extract the low order bits
| Sx -- ^ sign extend a value (signed)
| Zx -- ^ zero extend a value (unsigned)
| I2I -- ^ perform a 'Lobits' or a 'Sx' depending on the sizes of the arguments
| U2U -- ^ perform a 'Lobits' or a 'Zx' depending on the sizes of the arguments
| B2B -- ^ a nop, useful for coercing hints (bits 2 bits)
deriving(Eq,Show,Ord,Read,Enum,Bounded)
{-! derive: Binary !-}
data ValOp
= NaN
| PInf
| NInf
| PZero
| NZero
deriving(Eq,Show,Ord,Read,Bounded)
{-! derive: Binary !-}
data ArchBits = BitsMax | BitsPtr | BitsUnknown
deriving(Eq,Ord)
{-! derive: Binary !-}
data TyBits = Bits {-# UNPACK #-} !Int | BitsArch !ArchBits | BitsExt String
deriving(Eq,Ord)
{-! derive: Binary !-}
data TyHint
= HintSigned
| HintUnsigned
| HintFloat -- an IEEE floating point value
| HintCharacter -- a unicode character, implies unsigned
| HintNone -- no hint
deriving(Eq,Ord)
{-! derive: Binary !-}
data Ty
= TyBits !TyBits !TyHint
| TyBool
| TyComplex Ty
| TyVector !Int Ty
deriving(Eq,Ord)
{-! derive: Binary !-}
--runReadP :: ReadP a -> String -> Maybe a
--runReadP rp s = case readP_to_S rp s of
-- [(x,"")] -> Just x
-- _ -> Nothing
preadTy :: ReadP Ty
preadTy = choice cs where
cs = [ do string "bool"; return TyBool
, do char 's'; TyBits x _ <- preadTy; return $ TyBits x HintSigned
, do char 'u'; TyBits x _ <- preadTy; return $ TyBits x HintUnsigned
, do char 'f'; TyBits x _ <- preadTy; return $ TyBits x HintFloat
, do char 'c'; TyBits x _ <- preadTy; return $ TyBits x HintCharacter
, do string "bits<"; x <- manyTill P.get (char '>'); return $ TyBits (f x) HintNone
, do string "bits"; x <- readDecP; return $ TyBits (Bits x) HintNone
, do n <- readDecP; char '*'; t <- preadTy; return (TyVector n t)
, do string "i"; t <- preadTy; return (TyComplex t)
]
f "ptr" = BitsArch BitsPtr
f "max" = BitsArch BitsMax
f "?" = BitsArch BitsUnknown
f x = BitsExt x
readTy :: Monad m => String -> m Ty
readTy s = case runReadP preadTy s of
Nothing -> fail "readTy: not type"
Just x -> return x
stringToOpTy :: String -> Ty
stringToOpTy s = case readTy s of
Just t -> t
_ -> error $ "stringToOpTy: " ++ show s
bool = TyBool
bits_ptr = TyBits (BitsArch BitsPtr) HintNone
bits_max = TyBits (BitsArch BitsMax) HintNone
bits8 = TyBits (Bits 8) HintNone
bits16 = TyBits (Bits 16) HintNone
bits32 = TyBits (Bits 32) HintNone
bits64 = TyBits (Bits 64) HintNone
class ToCmmTy a where
toCmmTy :: a -> Maybe Ty
instance ToCmmTy Ty where
toCmmTy a = Just a
instance ToCmmTy String where
toCmmTy s = readTy s
cmmTyBits :: ToCmmTy a => a -> Maybe Int
cmmTyBits x = do TyBits (Bits b) _ <- toCmmTy x; return b
cmmTyHint x = do TyBits _ hint <- toCmmTy x; return hint
instance Show TyHint where
showsPrec _ HintSigned = ('s':)
showsPrec _ HintUnsigned = ('u':)
showsPrec _ HintFloat = ('f':)
showsPrec _ HintCharacter = ('c':)
showsPrec _ HintNone = id
instance Show Ty where
showsPrec _ TyBool = showString "bool"
showsPrec _ (TyBits b h) = shows h . showString "bits" . shows b
showsPrec _ (TyVector n t) = shows n . showChar '*' . shows t
showsPrec _ (TyComplex t) = showChar 'i' . shows t
instance Show TyBits where
showsPrec _ (Bits n) = shows n
showsPrec _ (BitsExt s) = showChar '<' . showString s . showChar '>'
showsPrec _ (BitsArch s) = showChar '<' . shows s . showChar '>'
instance Show ArchBits where
show BitsMax = "max"
show BitsPtr = "ptr"
show BitsUnknown = "?"
data Op v
= BinOp BinOp v v
| UnOp UnOp v
| ValOp ValOp
| ConvOp ConvOp v
deriving(Eq,Show,Ord)
{-! derive: Binary !-}
binopType :: BinOp -> Ty -> Ty -> Ty
binopType Mulx (TyBits (Bits i) h) _ = TyBits (Bits (i*2)) h
binopType UMulx (TyBits (Bits i) h) _ = TyBits (Bits (i*2)) h
binopType Eq _ _ = TyBool
binopType NEq _ _ = TyBool
binopType Gt _ _ = TyBool
binopType Gte _ _ = TyBool
binopType Lt _ _ = TyBool
binopType Lte _ _ = TyBool
binopType UGt _ _ = TyBool
binopType UGte _ _ = TyBool
binopType ULt _ _ = TyBool
binopType ULte _ _ = TyBool
binopType FEq _ _ = TyBool
binopType FNEq _ _ = TyBool
binopType FGt _ _ = TyBool
binopType FGte _ _ = TyBool
binopType FLt _ _ = TyBool
binopType FLte _ _ = TyBool
binopType FOrdered _ _ = TyBool
binopType _ t1 _ = t1
isCommutable :: BinOp -> Bool
isCommutable x = f x where
f Add = True
f Mul = True
f And = True
f Or = True
f Xor = True
f Eq = True
f NEq = True
f FAdd = True
f FMul = True
f FEq = True
f FNEq = True
f FOrdered = True
f _ = False
commuteBinOp :: BinOp -> Maybe BinOp
commuteBinOp x | isCommutable x = return x
commuteBinOp Lt = return Gt
commuteBinOp Gt = return Lt
commuteBinOp Lte = return Gte
commuteBinOp Gte = return Lte
commuteBinOp ULt = return UGt
commuteBinOp UGt = return ULt
commuteBinOp ULte = return UGte
commuteBinOp UGte = return ULte
commuteBinOp FLt = return FGt
commuteBinOp FGt = return FLt
commuteBinOp FLte = return FGte
commuteBinOp FGte = return FLte
commuteBinOp _ = Nothing
isAssociative :: BinOp -> Bool
isAssociative x = f x where
f Add = True
f Mul = True
f And = True
f Or = True
f Xor = True
f _ = False
unopFloat :: Ty -> UnOp -> Maybe String
unopFloat (TyBits b HintFloat) op = g b =<< f op where
g (Bits 64) x = return x
g (Bits 32) x = return $ x ++ "f"
g _ _ = Nothing
f FAbs = return "fabs"
f Sin = return "sin"
f Cos = return "cos"
f Tan = return "tan"
f Sinh = return "sinh"
f Cosh = return "cosh"
f Tanh = return "tanh"
f Asin = return "asin"
f Acos = return "acos"
f Atan = return "atan"
f Sqrt = return "sqrt"
f Log = return "log"
f Exp = return "exp"
f _ = Nothing
unopFloat _ _ = Nothing
binopFunc :: Ty -> Ty -> BinOp -> Maybe String
binopFunc (TyBits b _) _ bop = g b =<< f bop where
g (Bits 64) x = return x
g (Bits 32) x = return $ x ++ "f"
g _ _ = Nothing
f FPwr = Just "pow"
f FAtan2 = Just "atan2"
f _ = Nothing
binopFunc TyBool _ bop = Nothing where
binopInfix :: BinOp -> Maybe (String,Int)
binopInfix UDiv = Just ("/",8)
binopInfix Mul = Just ("*",8)
binopInfix UMod = Just ("%",8)
binopInfix Sub = Just ("-",7)
binopInfix Add = Just ("+",7)
binopInfix Shr = Just (">>",6)
binopInfix Shl = Just ("<<",6)
binopInfix And = Just ("&",5)
binopInfix Xor = Just ("^",4)
binopInfix Or = Just ("|",3)
binopInfix UGte = Just (">=",2)
binopInfix UGt = Just (">",2)
binopInfix ULte = Just ("<=",2)
binopInfix ULt = Just ("<",2)
binopInfix Eq = Just ("==",2)
binopInfix NEq = Just ("!=",2)
binopInfix _ = Nothing
class IsOperator o where
isCheap :: o -> Bool
isEagerSafe :: o -> Bool
instance IsOperator BinOp where
isCheap FAtan2 = False
isCheap _ = True
isEagerSafe Div = False
isEagerSafe Mod = False
isEagerSafe Quot = False
isEagerSafe Rem = False
isEagerSafe UDiv = False
isEagerSafe UMod = False
isEagerSafe _ = True
instance IsOperator UnOp where
isCheap _ = True
isEagerSafe _ = True
instance IsOperator ConvOp where
isCheap _ = True
isEagerSafe _ = True
instance IsOperator (Op v) where
isCheap (BinOp o _ _) = isCheap o
isCheap (UnOp o _) = isCheap o
isCheap (ConvOp o _) = isCheap o
isCheap _ = False
isEagerSafe (BinOp o _ _) = isEagerSafe o
isEagerSafe (UnOp o _) = isEagerSafe o
isEagerSafe (ConvOp o _) = isEagerSafe o
isEagerSafe _ = False
|
dec9ue/jhc_copygc
|
src/Cmm/Op.hs
|
gpl-2.0
| 11,506 | 0 | 13 | 3,226 | 3,374 | 1,740 | 1,634 | 317 | 16 |
{-# LANGUAGE TemplateHaskell, MultiParamTypeClasses, OverlappingInstances, DeriveDataTypeable, StandaloneDeriving, TypeSynonymInstances, TupleSections, FlexibleInstances, NoMonomorphismRestriction #-}
module AVLBaumBonn.Central where
import qualified TextConfig
import Challenger.Partial (Verify(..), Partial(..))
import Autolib.ToDoc -- (derives, makeToDoc, Doc(..), text, vcat, hcat, ($$), (<>), (<+>), hsep, toDoc, nest, ToDoc(..), docParen, fsep, (</>), empty)
import Autolib.Reader -- (makeReader, Reader(..), {- only needed inside derived code: -} readerParenPrec, my_reserved, pzero, (<|>))
import Autolib.Reporter (Reporter, reject, inform)
import Autolib.Dot.Dot
import Autolib.Hash
import qualified Autolib.Dot.Graph
import qualified Autolib.Dot.Node
import qualified Autolib.Dot.Edge
import Autolib.Size (Size(size))
import Inter.Types (OrderScore(..), ScoringOrder(Increasing), direct)
import Data.Typeable (Typeable)
import Control.Monad (when,unless,guard)
import Data.List (zip5, transpose, intersperse)
import qualified Baum.Such.Generate
import qualified Baum.Such.Op
import qualified Baum.Such.Class
import qualified Baum.AVL.Type
import qualified Baum.AVL.Ops
import Data.Traversable (traverse)
import Control.Monad.State (evalState, get, put)
import Baum.AVL.Type (isLeaf, left, right, key)
import AVLBaumBonn.GraphViz
import AVLBaumBonn.Conversion
instance Baum.Such.Class.Such Baum.AVL.Type.AVLTree where
empty = Baum.AVL.Type.leaf
isEmpty = Baum.AVL.Type.isLeaf
contains = Baum.AVL.Ops.contains
insert = Baum.AVL.Ops.insert
delete = error "Delete is für AVL-Bäume nicht implementiert"
equal = (==)
contents = Baum.AVL.Type.inorder
data AVLBaum = AVLBaum deriving Typeable
$(derives [makeReader, makeToDoc] [''AVLBaum])
-------------------------
data Feedback = Always
| OnlyOnCompletion
| None
deriving (Typeable, Eq)
$(derives [makeReader, makeToDoc] [''Feedback])
data Config = Config
{ feedback :: Feedback
, startTree :: AVLTreeBonn
, operations :: [Baum.Such.Op.Op Int]
, finalTree :: AVLTreeBonn
}
deriving (Typeable)
$(derives [makeReader, makeToDoc] [''Config])
instance OrderScore AVLBaum where
scoringOrder h = Increasing
instance Verify AVLBaum Config where
verify _ cfg = do
return ()
make_fixed = direct AVLBaum $ Config
{ feedback = Always
, startTree = Empty
, operations = [Baum.Such.Op.Any]
, finalTree = Node 42 Empty Empty
}
data AVLOp = Insert Int
| MyInsert Int
| Any
convertOp :: Baum.Such.Op.Op Int -> AVLOp
convertOp (Baum.Such.Op.Insert x) = Insert x
convertOp Baum.Such.Op.Any = Any
convertOp _ = error "Operation not valid on AVL Tree"
newtype OpList = OpList [AVLOp] deriving (Typeable)
$(derives [makeReader] [''AVLOp])
instance ToDoc AVLOp where
toDoc (Insert a) = text "Insert(" <> toDoc a <> text ")"
toDoc (MyInsert a) = text "MyInsert(" <> toDoc a <> text ")"
toDoc Any = text "Any"
instance Reader OpList where
reader = do
ops <- reader
return (OpList ops)
instance ToDoc OpList where
toDoc (OpList ops) = text (show ops)
instance Size OpList where
size (OpList ops) = length ops
instance Partial AVLBaum Config OpList where
report _ (Config fb startB plan endB) = do
let start = debonnifyTree startB
end = debonnifyTree endB
if isLeaf start
then do
inform $ vcat [ text "Auf einen leeren Baum sollen diese Operationen angewendet werden"
, text "(wobei Sie jedes Any durch ein MyInsert mit dem einzufügenden Element als Argument ersetzen sollen):"
]
else do
inform $ vcat [ text "Auf den Baum:"
, text ""
, text (toPng start)
, text ""
, text "sollen diese Operationen angewendet werden"
, text "(wobei Sie jedes Any durch ein MyInsert mit dem einzufügenden Element als Argument ersetzen sollen):"
]
inform $ vcat [ nest 4 $ niceOps (map convertOp plan)
, text ""
, text "so dass dieser Baum entsteht:"
, text ""
, text (toPng end)
]
when (fb == None) $ do
inform TextConfig.noFeedbackDisclaimer
inform $ text "<span style='color:red'>Hinweis: Die zum Rebalancieren des Baumes nötigen <em>Rotationen</em> werden beim Einfügen automatisch durchgeführt. Sie müssen diese <em>nicht</em> mit angeben.</span>"
initial _ (Config _ _ plan _) =
OpList (map convertOp plan)
total _ (Config None _ _ _) _ = do
inform TextConfig.noFeedbackResult
total _ (Config fb startB plan endB) (OpList ops) = do
c <- steps start (map convertOp plan) ops []
if c == end
then inform TextConfig.ok
else rejectTreeAlways c ops $ text "Resultat stimmt nicht mit Aufgabenstellung überein."
where
start = debonnifyTree startB
end = debonnifyTree endB
rejectTree :: Baum.AVL.Type.AVLTree Int -> [AVLOp] -> Doc -> Reporter a
rejectTree b ops reason = do
case fb of
OnlyOnCompletion -> do
reject $ text "Nein." <+> reason
Always -> do
rejectTreeAlways b ops reason
rejectTreeAlways b ops reason = do
inform $ vcat [ text "Nein." <+> reason
, text ""
, text $ "<b>Tatsächlicher Baum* <-> Ziel-Baum</b>"
, text "*Ausgeführte Operationen:" <+> toDoc ops
, hcat [ text $ toPng b -- Tatsächlicher Baum
, text $ toPng end -- Erwarteter Baum
]
]
reject $ text ""
step b op = do
c <- case op of
MyInsert a -> insert a
Insert a -> insert a
_ -> reject $ text "Operation ist unbekannt"
return c
where
insert a = do
-- Falls der einzufügende Knoten schon im Baum ist,
-- lässt diese Operation den Baum unverändert.
if b `Baum.AVL.Ops.contains` a
then return b
else return $ Baum.AVL.Ops.insert b a
steps b [] [] _ = return b
steps b [] send done = rejectTree b (reverse done) $ vcat
[ text "Sie wollen noch diese Operationen ausführen:"
, nest 4 $ niceOps send
, text "es sind aber keine mehr zugelassen."
]
steps b plan [] done = rejectTree b (reverse done) $ vcat
[ text "Es müssen noch diese Operationen ausgeführt werden:"
, nest 4 $ niceOps plan
]
steps b (p : plan) (s : send) done = do
conforms p s
c <- step b s
steps c plan send (s : done)
where
conforms _ Any = do
rejectTree b (reverse done) $ text "Sie sollen Any durch eine Operation ersetzen."
conforms Any _ = return ()
conforms (Insert x) (Insert y) | x == y = return ()
conforms (Insert x) (MyInsert y) | x == y = return ()
conforms op@(Insert _) _ = do
rejectTree b (reverse done) $ text "Die Operation" <+> toDoc op <+> text "soll nicht geändert werden."
niceOps [] = text "[]"
niceOps (x:xs) = vcat [ text "[" <+> toDoc x
, vcat [ text "," <+> toDoc x' | x' <- xs ]
, text "]"
]
|
Erdwolf/autotool-bonn
|
src/AVLBaumBonn/Central.hs
|
gpl-2.0
| 8,059 | 0 | 17 | 2,742 | 2,030 | 1,056 | 974 | 162 | 1 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 [email protected]
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.StepDT
(
tickStepDT,
) where
import MEnv.Tick
import Game.MEnv
import Game.World
tickStepDT :: World a e =>
TickT -> s -> a -> b -> MEnv' (s, a, b)
tickStepDT dt = \s a b ->
return $ (s, worldTickModify a (+ dt), b)
|
karamellpelle/grid
|
designer/source/Game/StepDT.hs
|
gpl-3.0
| 1,014 | 0 | 11 | 218 | 126 | 79 | 47 | 10 | 1 |
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE TemplateHaskell, StandaloneDeriving, DeriveDataTypeable #-}
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
module Db.AcidTypes where
import BaseImport
import Data.SafeCopy
import Audit.ContentHistory
import Audit.EditAction
import Audit.ObjectHistory
import Audit.VersionedChange
import Util.EntityKey
import Util.SequentialId
$(deriveSafeCopy 0 'base ''SeqId)
$(deriveSafeCopy 0 'base ''EntityKey)
$(deriveSafeCopy 0 'base ''Edit)
$(deriveSafeCopy 0 'base ''EditAction)
$(deriveSafeCopy 0 'base ''VersionedChange)
$(deriveSafeCopy 0 'base ''HistoryLine)
-- $(deriveSafeCopy 0 'base ''ContentHistory) doesn't work, need manual deriving below
instance SafeCopy ObjectHistory where
putCopy h = contain $
safePut (versionedChanges h)
>> safePut (historyLines h)
>> safePut (lastAuditId h)
getCopy = contain $ ContentHistory <$> safeGet <*> safeGet <*> safeGet
|
c0c0n3/audidoza
|
app/Db/AcidTypes.hs
|
gpl-3.0
| 978 | 0 | 11 | 167 | 226 | 116 | 110 | 24 | 0 |
-- | Processing export files
{-# LANGUAGE LambdaCase #-}
module Lamdu.Data.Export.JSON.Process (process) where
import Control.Lens.Operators
import qualified Data.Aeson as Aeson
import qualified Data.Aeson.Encode.Pretty as AesonPretty
import qualified Data.ByteString.Lazy as BSL
import qualified Lamdu.Data.Export.JSON.Codec as Codec
import System.IO (Handle, stdin, stdout)
hProcess :: Handle -> Handle -> ([Codec.Entity] -> IO [Codec.Entity]) -> IO ()
hProcess inHandle outHandle f =
BSL.hGetContents inHandle
<&> Aeson.eitherDecode
>>= \case
Left err -> fail err
Right entities ->
f entities
<&> AesonPretty.encodePretty
>>= BSL.hPutStr outHandle
process :: ([Codec.Entity] -> IO [Codec.Entity]) -> IO ()
process = hProcess stdin stdout
|
da-x/lamdu
|
Lamdu/Data/Export/JSON/Process.hs
|
gpl-3.0
| 829 | 0 | 12 | 181 | 230 | 132 | 98 | 20 | 2 |
import Graphics.Gloss
import Graphics.Gloss.Interface.IO.Game
import Graphics.Gloss.Game
import Graphics.Gloss.Data.Point
import Data.Fixed
import Data.List(elemIndex)
import Data.Maybe
import Control.Monad
import World
import Constants
import Castle
import Game
import GUI
main = do
let
window = InWindow "you are the monster" (100, 100) (windowWidth, windowHeight)
game <- newGame "test"
playIO window black 60 game render input update
input :: Event -> Game -> IO Game
input event game
| EventKey (SpecialKey KeyEsc) Down _ _ <- event = error "exit"
| EventKey (MouseButton LeftButton) Down _ p <- event = clickOn p game (topGui game)
| otherwise = return game
handleClickIngame :: ActionHandler Game
handleClickIngame (x,y) game = do
action <- return $ getTileAtPosition x y game >>= \(Tile name _) -> Just(print name)
case action of
Just a -> a
otherwise -> return ()
return game
{-
getTileAtPosition :: Float -> Float -> Game -> Maybe Tile
getTileAtPosition x y game = do
getTile (world game) (mk x) (mk y)
where mk a = round (a / fromIntegral tileSize)
-}
drawWorld :: Game -> IRectangle -> Gui Game
drawWorld Game (x0,y0,x1,y1) =
pictures $ do
(row, y) <- skipTake (y1-y0) y0 $ tiles world
(tile, x) <- skipTake (x1-x0) x0 row
return $ drawTile tile x y
where
skipTake s t list = (take s . drop t) list `zip` [0..]
-- draw a tile that is already adjusted to the screen
drawTile (Tile _ t) x y = translate nx ny t
where
nx = new x
ny = new y
new a = fromIntegral $ a*tileSize
windowWidthF = fromIntegral windowWidth :: Float
windowHeightF = fromIntegral windowHeight :: Float
data ClickRect = ClickRect Point Point (Game -> IO Game)
gameRect = Rectangle (-windowWidthF/2,-windowHeightF/2) (windowWidthF/2,windowHeightF/2)
ingameRect = Rectangle (-windowWidthF/2,-windowHeightF/2+100) (windowWidthF/2,windowHeightF/2)
background :: Game -> Gui Game
background game = Gui (Button pic handleClickIngame) ingameRect
where pic = drawWorld (world game) (rectangle game)
guiBar game = Gui (Group ls)
(Rectangle(-windowWidthF/2,-windowHeightF/2) (windowWidthF/2,-windowHeightF/2+100))
where ls= [ Gui (Static guiBarBottom) $ Rectangle (0,0) (0,0)
, Gui (Button exitButton (\_ _ -> error "exit"))
$ Rectangle (-windowWidthF/2,-50) (-windowWidthF/2+100,50)
]
exitButton = pictures [
rectangleWire 150 100
, translate (-50) 0 $ scale 0.2 0.2 $ text "Exit"
]
handle :: Point -> Game -> ClickRect -> IO Game
handle cursor game (ClickRect p0 p1 f) = do
if pointInBox cursor p0 p1
then f game
else return game
castle game = Gui (Static $ drawCastleInfo testCastle) $ Rectangle (00,0) (00,0)
topGui game = Gui (Group children) gameRect
where children =
($game) `map` [ guiBar
, background
, castle
]
guiBarBottom = png "data/gui-down.png"
drawGuiBar :: Game -> Picture
drawGuiBar game =
let guiBar = guiBarBottom
in translate 0.0 (-windowHeightF/2.0+50.0) $ pictures [guiBar]
render :: Game -> IO Picture
render game = do
--let background = drawWorld (world game) (rectangle game)
let bgRect = color (greyN 0.5) $!
rectangleSolid windowWidthF windowHeightF
return $ pictures [bgRect, draw $ topGui game]
update t game = do
return (game { time = (time game)+t })
|
Marthog/ld33
|
Main.hs
|
gpl-3.0
| 3,535 | 0 | 14 | 875 | 1,284 | 658 | 626 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Genomics.Pipelines.Run
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Runs a pipeline. The returned Operation\'s metadata field will contain a
-- google.genomics.v2alpha1.Metadata object describing the status of the
-- pipeline execution. The [response] field will contain a
-- google.genomics.v2alpha1.RunPipelineResponse object if the pipeline
-- completes successfully. **Note:** Before you can use this method, the
-- Genomics Service Agent must have access to your project. This is done
-- automatically when the Cloud Genomics API is first enabled, but if you
-- delete this permission, or if you enabled the Cloud Genomics API before
-- the v2alpha1 API launch, you must disable and re-enable the API to grant
-- the Genomics Service Agent the required permissions. Authorization
-- requires the following [Google IAM](https:\/\/cloud.google.com\/iam\/)
-- permission: * \`genomics.operations.create\` [1]: \/genomics\/gsa
--
-- /See:/ <https://cloud.google.com/genomics Genomics API Reference> for @genomics.pipelines.run@.
module Network.Google.Resource.Genomics.Pipelines.Run
(
-- * REST Resource
PipelinesRunResource
-- * Creating a Request
, pipelinesRun
, PipelinesRun
-- * Request Lenses
, prXgafv
, prUploadProtocol
, prAccessToken
, prUploadType
, prPayload
, prCallback
) where
import Network.Google.Genomics.Types
import Network.Google.Prelude
-- | A resource alias for @genomics.pipelines.run@ method which the
-- 'PipelinesRun' request conforms to.
type PipelinesRunResource =
"v2alpha1" :>
"pipelines:run" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] RunPipelineRequest :>
Post '[JSON] Operation
-- | Runs a pipeline. The returned Operation\'s metadata field will contain a
-- google.genomics.v2alpha1.Metadata object describing the status of the
-- pipeline execution. The [response] field will contain a
-- google.genomics.v2alpha1.RunPipelineResponse object if the pipeline
-- completes successfully. **Note:** Before you can use this method, the
-- Genomics Service Agent must have access to your project. This is done
-- automatically when the Cloud Genomics API is first enabled, but if you
-- delete this permission, or if you enabled the Cloud Genomics API before
-- the v2alpha1 API launch, you must disable and re-enable the API to grant
-- the Genomics Service Agent the required permissions. Authorization
-- requires the following [Google IAM](https:\/\/cloud.google.com\/iam\/)
-- permission: * \`genomics.operations.create\` [1]: \/genomics\/gsa
--
-- /See:/ 'pipelinesRun' smart constructor.
data PipelinesRun =
PipelinesRun'
{ _prXgafv :: !(Maybe Xgafv)
, _prUploadProtocol :: !(Maybe Text)
, _prAccessToken :: !(Maybe Text)
, _prUploadType :: !(Maybe Text)
, _prPayload :: !RunPipelineRequest
, _prCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'PipelinesRun' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'prXgafv'
--
-- * 'prUploadProtocol'
--
-- * 'prAccessToken'
--
-- * 'prUploadType'
--
-- * 'prPayload'
--
-- * 'prCallback'
pipelinesRun
:: RunPipelineRequest -- ^ 'prPayload'
-> PipelinesRun
pipelinesRun pPrPayload_ =
PipelinesRun'
{ _prXgafv = Nothing
, _prUploadProtocol = Nothing
, _prAccessToken = Nothing
, _prUploadType = Nothing
, _prPayload = pPrPayload_
, _prCallback = Nothing
}
-- | V1 error format.
prXgafv :: Lens' PipelinesRun (Maybe Xgafv)
prXgafv = lens _prXgafv (\ s a -> s{_prXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
prUploadProtocol :: Lens' PipelinesRun (Maybe Text)
prUploadProtocol
= lens _prUploadProtocol
(\ s a -> s{_prUploadProtocol = a})
-- | OAuth access token.
prAccessToken :: Lens' PipelinesRun (Maybe Text)
prAccessToken
= lens _prAccessToken
(\ s a -> s{_prAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
prUploadType :: Lens' PipelinesRun (Maybe Text)
prUploadType
= lens _prUploadType (\ s a -> s{_prUploadType = a})
-- | Multipart request metadata.
prPayload :: Lens' PipelinesRun RunPipelineRequest
prPayload
= lens _prPayload (\ s a -> s{_prPayload = a})
-- | JSONP
prCallback :: Lens' PipelinesRun (Maybe Text)
prCallback
= lens _prCallback (\ s a -> s{_prCallback = a})
instance GoogleRequest PipelinesRun where
type Rs PipelinesRun = Operation
type Scopes PipelinesRun =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/genomics"]
requestClient PipelinesRun'{..}
= go _prXgafv _prUploadProtocol _prAccessToken
_prUploadType
_prCallback
(Just AltJSON)
_prPayload
genomicsService
where go
= buildClient (Proxy :: Proxy PipelinesRunResource)
mempty
|
brendanhay/gogol
|
gogol-genomics/gen/Network/Google/Resource/Genomics/Pipelines/Run.hs
|
mpl-2.0
| 6,034 | 0 | 16 | 1,282 | 728 | 434 | 294 | 102 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.SWF.DeprecateDomain
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Deprecates the specified domain. After a domain has been deprecated it cannot
-- be used to create new workflow executions or register new types. However, you
-- can still use visibility actions on this domain. Deprecating a domain also
-- deprecates all activity and workflow types registered in the domain.
-- Executions that were started before the domain was deprecated will continue
-- to run.
--
-- This operation is eventually consistent. The results are best effort and may
-- not exactly reflect recent updates and changes. Access Control
--
-- You can use IAM policies to control this action's access to Amazon SWF
-- resources as follows:
--
-- Use a 'Resource' element with the domain name to limit the action to only
-- specified domains. Use an 'Action' element to allow or deny permission to call
-- this action. You cannot use an IAM policy to constrain this action's
-- parameters. If the caller does not have sufficient permissions to invoke the
-- action, or the parameter values fall outside the specified constraints, the
-- action fails. The associated event attribute's cause parameter will be set to
-- OPERATION_NOT_PERMITTED. For details and example IAM policies, see <http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html Using IAMto Manage Access to Amazon SWF Workflows>.
--
-- <http://docs.aws.amazon.com/amazonswf/latest/apireference/API_DeprecateDomain.html>
module Network.AWS.SWF.DeprecateDomain
(
-- * Request
DeprecateDomain
-- ** Request constructor
, deprecateDomain
-- ** Request lenses
, dd1Name
-- * Response
, DeprecateDomainResponse
-- ** Response constructor
, deprecateDomainResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.SWF.Types
import qualified GHC.Exts
newtype DeprecateDomain = DeprecateDomain
{ _dd1Name :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'DeprecateDomain' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dd1Name' @::@ 'Text'
--
deprecateDomain :: Text -- ^ 'dd1Name'
-> DeprecateDomain
deprecateDomain p1 = DeprecateDomain
{ _dd1Name = p1
}
-- | The name of the domain to deprecate.
dd1Name :: Lens' DeprecateDomain Text
dd1Name = lens _dd1Name (\s a -> s { _dd1Name = a })
data DeprecateDomainResponse = DeprecateDomainResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'DeprecateDomainResponse' constructor.
deprecateDomainResponse :: DeprecateDomainResponse
deprecateDomainResponse = DeprecateDomainResponse
instance ToPath DeprecateDomain where
toPath = const "/"
instance ToQuery DeprecateDomain where
toQuery = const mempty
instance ToHeaders DeprecateDomain
instance ToJSON DeprecateDomain where
toJSON DeprecateDomain{..} = object
[ "name" .= _dd1Name
]
instance AWSRequest DeprecateDomain where
type Sv DeprecateDomain = SWF
type Rs DeprecateDomain = DeprecateDomainResponse
request = post "DeprecateDomain"
response = nullResponse DeprecateDomainResponse
|
dysinger/amazonka
|
amazonka-swf/gen/Network/AWS/SWF/DeprecateDomain.hs
|
mpl-2.0
| 4,138 | 0 | 9 | 838 | 364 | 229 | 135 | 47 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Model.Item(Item, Slug, backlinks, getItem, getItems, postItem) where
import Control.Monad (liftM)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.Either (EitherT, left)
import Database.PostgreSQL.Simple (In (In), Only (Only),
fromOnly)
import Database.PostgreSQL.Simple.FromRow (FromRow, field, fromRow)
import Database.PostgreSQL.Simple.ToField (ToField, toField)
import Database.PostgreSQL.Simple.ToRow (ToRow, toRow)
import Database.PostgreSQL.Simple.Types (PGArray (PGArray),
fromPGArray)
import Servant (ServantErr, err401, err404)
import Database (execute, query)
import Model.Item.Definition (Item (..), Slug,
Visibility (..))
import Model.User (Role (..))
instance ToField Visibility where
toField = toField . show
instance FromRow Item where
fromRow = Item <$> field <*> field <*> field <*> liftM fromPGArray field
<*> field <*> field <*> liftM read field
instance ToRow Item where
toRow item = [ toField (created_at item)
, toField (idata item)
, toField (slug item)
, toField (PGArray $ tags item)
, toField (title item)
, toField (itype item)
, toField (visibility item)
]
roleVis :: Role -> [Visibility]
roleVis Guest = [Public]
roleVis Member = [Public, Family]
roleVis Admin = [Public, Family, Private]
getItems :: Role -> EitherT ServantErr IO [Item]
getItems role = liftIO $ query
"SELECT * FROM items WHERE visibility IN ?"
(Only (In $ roleVis role))
getItem :: Role -> Slug -> EitherT ServantErr IO Item
getItem role slug = do items <- liftIO $ query
"SELECT * FROM items WHERE visibility IN ? AND slug = ?"
(In $ roleVis role, slug)
case items of
[] -> left err404
(i:_) -> return i
postItem :: Role -> Item -> EitherT ServantErr IO Item
postItem Admin item = liftIO $ execute
"INSERT INTO items (created_at, idata, slug, tags, title, itype, visibility) VALUES (?, ?, ?, ? :: text[], ?, ?, ? )"
item >> return item
postItem _ _ = left err401
backlinks :: Role -> Slug -> EitherT ServantErr IO [Slug]
backlinks role slug = do slugs <- liftIO $ query
"SELECT slug FROM items WHERE visibility IN ? AND ? = ANY(tags)"
(In $ roleVis role, slug)
return $ map fromOnly slugs
|
bitraten/bitrest
|
src/Model/Item.hs
|
agpl-3.0
| 3,069 | 0 | 12 | 1,245 | 724 | 398 | 326 | 55 | 2 |
module Lupo.ConnectionPool
( ConnectionPool (..)
, withConnection
, makeConnectionPool
) where
import qualified Control.Concurrent.Chan as CC
import Control.Monad.CatchIO
import Control.Monad.Trans
import Lupo.Import
data ConnectionPool conn = ConnectionPool
{ checkoutConnection :: IO conn
, checkinConnection :: conn -> IO ()
}
withConnection :: MonadCatchIO m => ConnectionPool conn -> (conn -> m a) -> m a
withConnection pool = bracket checkout checkin
where
checkout = liftIO $ checkoutConnection pool
checkin = liftIO . checkinConnection pool
makeConnectionPool :: IO conn -> Int -> IO (ConnectionPool conn)
makeConnectionPool builder nConns = do
pool <- initPool
nConns `replicateM_` (checkinConnection pool =<< builder)
pure pool
where
initPool = do
chan <- CC.newChan
pure ConnectionPool
{ checkinConnection = CC.writeChan chan
, checkoutConnection = CC.readChan chan
}
|
keitax/lupo
|
src/Lupo/ConnectionPool.hs
|
lgpl-3.0
| 956 | 0 | 13 | 197 | 269 | 142 | 127 | 25 | 1 |
-- | Simplify doctests by pretty-printing results.
module VersionFile.Parser.Test where
-- | Print the result if there is one.
-- >>> testV id $ return 4
-- 4
testV :: Show s => (a -> s) -> Maybe a -> IO ()
testV _ Nothing = putStrLn "Nothing"
testV show' (Just x) = print (show' x)
-- | Print each result on a separate line.
-- >>> testVs id $ return [1, 2, 3]
-- 1
-- 2
-- 3
testVs :: Show s => (a -> s) -> Maybe [a] -> IO ()
testVs _ Nothing = putStrLn "Nothing"
testVs show' (Just xs) = mapM_ (print . show') xs
|
gelisam/cabal-rangefinder
|
src/VersionFile/Parser/Test.hs
|
unlicense
| 519 | 0 | 9 | 113 | 168 | 88 | 80 | 7 | 1 |
import Control.Concurrent
import Control.Monad
import Data.Char
import Data.Either
import Data.Ratio
import Data.Word
import System.BSD.Sysctl
import System.Directory
import System.Environment
import System.Exit
import System.Posix.Signals
import System.Process
import System.IO
import System.IO.Error
import System.Info
import Text.Printf
import Text.Read
import qualified DateFormatter as DF
import qualified HostConfiguration as HC
type CPUUsed = Integer
type CPUTotal = Integer
data CPULoad = CPULoad CPUUsed CPUTotal
type NetRx = Integer
type NetTx = Integer
data NetLoad = NetLoad NetRx NetTx
type MemTotal = Integer
type MemFree = Integer
data MemStat = MemStat MemTotal MemFree
type SwapPercent = Integer
data Stats = Stats {
conf :: HC.HostConfiguration
, pipe :: Handle
, cpuPercent :: Integer
, cpuCount :: Integer
, memstat :: MemStat
, swapperc :: SwapPercent
, netload :: NetLoad
, maybe_battime :: Maybe Integer
}
myActiveColor = "#a8ff60"
myInactiveColor = "#606060"
myDefaultColor = "orange"
myMediumLoadColor = "yellow"
myHighLoadColor = "red"
getNetLoad :: Maybe Handle -> NetLoad -> IO NetLoad
getNetLoad (Just pipe) lastnetload = do
ready <- catchIOError (hReady pipe) (\_ -> return False)
if ready
then do
l <- words <$> hGetLine pipe
getNetLoad (Just pipe) $ case readMaybe (head l) :: Maybe Integer of
Nothing -> lastnetload
_ -> NetLoad (read $ l !! 3) (read $ l !! 6)
else
return lastnetload
getNetLoad _ lastnetload = return lastnetload
netspeed :: Integer -> String
netspeed x
| x > 2 * 1024 ^ 3 = printf "%.2fGB" ((fromIntegral x /(1024^3)) :: Double)
| x > 2 * 1024 ^ 2 = printf "%.2fMB" ((fromIntegral x /(1024^2)) :: Double)
| x > 2 * 1024 = printf "%.2fkB" ((fromIntegral x /1024) :: Double)
| otherwise = printf "%d B " x
hotCPUColor :: Integer -> Integer -> String
hotCPUColor perc cpucount
| perc < (100 `div` cpucount) = myDefaultColor
| perc < (100 `div` cpucount) * 2 = myMediumLoadColor
| otherwise = myHighLoadColor
hotMemColor :: MemStat -> String
hotMemColor memstat
| perc < 60 = myDefaultColor
| perc < 80 = myMediumLoadColor
| otherwise = myHighLoadColor
where perc = getMemPercent memstat
hotSwapColor :: Integer -> String
hotSwapColor perc
| perc < 5 = myDefaultColor
| perc < 20 = myMediumLoadColor
| otherwise = myHighLoadColor
batteryColor :: Integer -> String
batteryColor minutes
| minutes < 0 = myDefaultColor
| minutes < 15 = myHighLoadColor
| minutes < 30 = myMediumLoadColor
| otherwise = myDefaultColor
batteryIcon :: Integer -> String
batteryIcon minutes
| minutes < 0 = "[ ! ]="
| minutes < 15 = "[' ]="
| minutes < 30 = "[| ]="
| minutes < 90 = "[|' ]="
| minutes < 120 = "[|| ]="
| minutes < 150 = "[||']="
| otherwise = "[|||]="
batteryStats :: Maybe Integer -> String
batteryStats (Just battime) =
printf " <fc=%v>%v % 3vmin</fc>"
(batteryColor battime) (batteryIcon battime)
(if battime > 0 then show battime else "? ")
batteryStats _ = ""
displayStats :: Stats -> IO()
displayStats (Stats conf pipe cpuperc numcpu memstat swapperc (NetLoad net_rx net_tx) maybe_battime) = do
datestr <- DF.getTimeAndDate conf
hPutStrLn pipe $
printf ("<fc=%v>|</fc> CPU:<fc=%v>%3v%%</fc> MEM:\
\<fc=%v>%3v%%</fc> SWP:<fc=%v>\
\%3v%%</fc> DN:% 9v UP:% 9v" ++
batteryStats maybe_battime ++ " <fc=%v>|\
\</fc> <fc=%v><action=`%v -title Calendar -e sh -c 'ncal -w; zsh -i'`>%v</action></fc>")
myInactiveColor (hotCPUColor cpuperc numcpu) cpuperc
(hotMemColor memstat) (getMemPercent memstat)
(hotSwapColor swapperc) swapperc (netspeed net_rx)
(netspeed net_tx)
myInactiveColor myActiveColor (HC.terminal conf) datestr
hFlush pipe
getSwapStats :: IO SwapPercent
getSwapStats = do
swap <- rights <$>
mapM (\nr -> tryIOError (
sysctlNameToOidArgs "vm.swap_info" [ nr ] >>=
sysctlPeekArray :: IO [Word32])) [0..15]
let tot = sum $ fmap (!! 3) swap
used = sum $ fmap (!! 4) swap
return $ if tot > 0 then
fromIntegral $ (used * 100) `div` tot
else 0;
gatherLoop :: HC.HostConfiguration -> (OID, Integer, Integer, OID, OID, Maybe OID) -> CPULoad -> Maybe Handle -> Handle
-> NetLoad -> IO()
gatherLoop conf (oid_cpuload, numcpu, memtotal, oid_memfree, oid_meminact, oid_battime) oldcpuload netstatPipe pipe lastnet = do
cpuload <- getCPULoad oid_cpuload
memstat <- getMemStat (memtotal, oid_memfree, oid_meminact)
netload <- getNetLoad netstatPipe lastnet
swapload <- getSwapStats
battime <- getBatteryTime oid_battime
displayStats $ Stats conf pipe (getCPUPercent oldcpuload cpuload) numcpu memstat swapload netload battime
threadDelay 1000000
gatherLoop conf (oid_cpuload, numcpu, memtotal, oid_memfree, oid_meminact, oid_battime) cpuload netstatPipe pipe netload
startBSD :: HC.HostConfiguration -> Handle -> IO()
startBSD conf pipe = do
[ oid_cpuload, oid_numcpu, oid_memtotal, oid_memfree, oid_meminact ]
<- mapM sysctlNameToOid [ "kern.cp_time", "hw.ncpu", "vm.stats.vm.v_page_count", "vm.stats.vm.v_free_count", "vm.stats.vm.v_inactive_count" ]
cpuload <- getCPULoad oid_cpuload
oid_battime <- catchIOError (do
oid <- sysctlNameToOid "hw.acpi.battery.time"
return $ Just oid
) (\_ -> return Nothing)
netstatPipe <- spawnNetStat
netinit <- getNetLoad netstatPipe (NetLoad 0 0)
[ numcpu, memtotal ] <- mapM sysctlReadUInt [ "hw.ncpu", "vm.stats.vm.v_page_count" ]
gatherLoop conf (oid_cpuload, fromIntegral numcpu, fromIntegral memtotal, oid_memfree, oid_meminact, oid_battime)
cpuload netstatPipe pipe netinit
getCPUPercent :: CPULoad -> CPULoad -> Integer
getCPUPercent (CPULoad oldused oldtotal) (CPULoad curused curtotal) =
let deltatotal = curtotal - oldtotal
deltaused = curused - oldused
in
if deltatotal > 0 then (100*deltaused) `div` deltatotal else 0
getMemPercent :: MemStat -> Integer
getMemPercent (MemStat total free) = 100 - ((free * 100) `div` total)
getMemStat :: (Integer, OID, OID) -> IO MemStat
getMemStat (memtotal, oid_memfree, oid_meminact) = do
memfree <- sysctlReadUInt oid_memfree
meminact <- sysctlReadUInt oid_meminact
return $ MemStat memtotal (fromIntegral memfree + fromIntegral meminact)
getCPULoad :: OID -> IO CPULoad
getCPULoad oid_cpuload = do
cpuloads2 <- sysctlPeekArray oid_cpuload :: IO [Word64]
let cpuloads = fmap fromIntegral cpuloads2
total = sum cpuloads
used = total - last cpuloads
in return $ CPULoad used total
getBatteryTime :: Maybe OID -> IO (Maybe Integer)
getBatteryTime (Just oid_battime) = do
val <- sysctlReadInt oid_battime
return $ Just (fromIntegral val)
getBatteryTime _ = return Nothing
spawnPipe :: [ String ] -> IO Handle
spawnPipe cmd = do
(Just hin, _, _, _) <- createProcess (proc (head cmd) (tail cmd)){ std_in = CreatePipe }
return hin
spawnNetStat :: IO (Maybe Handle)
spawnNetStat = do
(_, Just hout, _, _) <- createProcess (proc "netstat"
["-ibW", "1"]){ std_out = CreatePipe }
safeRead hout
safeRead hout
return $ Just hout
where safeRead hout =
catchIOError (do
line <- hGetLine hout
return $ Just line)
(\_ -> return Nothing)
xmobarSysInfo :: FilePath -> HC.HostConfiguration -> [ String ]
xmobarSysInfo homedir conf =
[ "xmobar", homedir ++ "/.xmonad/" ++ prefix ++ "sysinfo_xmobar.rc" ]
where prefix
| HC.isSlim conf = "slim_"
| otherwise = ""
main = do
conf <- HC.readHostConfiguration
homedir <- getHomeDirectory
spawnPipe (xmobarSysInfo homedir conf) >>=
(case os of
"freebsd" -> startBSD conf
_ -> error $ "Unknown operating system " ++ os
)
|
nakal/xmonad-conf
|
lib/SysInfoBar.hs
|
bsd-2-clause
| 9,172 | 0 | 17 | 2,982 | 2,521 | 1,286 | 1,235 | 199 | 3 |
{-
Copyright (c) 2012 John P. Feltz <[email protected]>
License: BSD-Style, See LICENSE
-}
--Tweaked from parsec-extra-1.0.2
--License BSD3
--Original Author: Arie Peterson,
module Text.Parsec.Extra
( eol
, digit
, natural
, integer
, whitespace
) where
import Control.Applicative (Applicative,(<$>),(<*>),(*>),pure)
import Data.List (foldl')
import Text.Parsec.Prim ((<|>),(<?>))
import Text.Parsec.String (GenParser)
import Text.Parsec.Combinator (many1,option)
import qualified Text.Parsec.Char as Char
import Text.Parsec.Char (char)
--import Text.Parsec.String (Parser)
-- | Parse \"end of line\": one of \"\\n\", \"\\r\\n\", or \"\\r\".
eol :: GenParser Char state ()
eol = (char '\n' <|> (char '\r' >> option '\n' (char '\n'))) >> return ()
-- | A decimal digit.
digit :: (Integral a) => GenParser Char state a
digit = fromIntegral . (\ c -> fromEnum c - fromEnum '0') <$> Char.digit
-- | A natural (i.e. non-negative integer) number, in decimal notation.
natural :: (Integral a) => GenParser Char state a
natural = (foldl' (\ a b -> a * 10 + b) 0 <$> many1 digit) <?> "nonnegative decimal integer"
-- | An integer number, in decimal notation (possibly prefixed with \"-\").
integer :: (Integral a) => GenParser Char state a
integer = (option id (char '-' *> pure negate) <*> natural) <?> "decimal integer"
-- | A substitute for the otherwise misleading Parsec space parsers
whitespace :: GenParser Char state Char
whitespace = char '\SP'
|
jfeltz/tasty-integrate
|
ide-format/Text/Parsec/Extra.hs
|
bsd-2-clause
| 1,569 | 0 | 12 | 344 | 386 | 223 | 163 | 23 | 1 |
{-|
Description: base types
= navigating the types modules
there are a bunch of type-defining modules here; hopefully you can find what you want
== "Web.Respond.Types.Path"
this module defines 'PathConsumer' and several functions for working with that type
== "Web.Respond.Types.Response"
defines the types 'Responder', 'ResponseBody', and 'MediaTypeMatcher'; also defines the typeclass 'ToResponseBody'.
provides tools for implementing instances of the 'ToResponseBody' by matching against Accept headerst
== "Web.Respond.Types.Errors"
defines the typeclass 'ReportableError', similar to 'ToResponseBody' except with a fallback 'ResponseBody' when unable to match the Accept header.
also defines the 'ErrorReport' datatype, and implements 'ReportableError' for it, defining the formats for rendering it to a response.
provides an instance of 'ReportableError' for unicode errors.
== "Web.Respond.Types.Request"
defines the 'FromBody' typeclass along with
- 'TextBody' newtype, with an appropriate 'FromBody' instance
- 'Json' and 'JsonS' newtypes, with appropriate 'FromBody' and 'ToResponseBody' instances
-}
module Web.Respond.Types (
module Web.Respond.Types.Path,
module Web.Respond.Types.Response,
module Web.Respond.Types.Errors,
module Web.Respond.Types.Request
) where
import Web.Respond.Types.Path
import Web.Respond.Types.Response
import Web.Respond.Types.Request
import Web.Respond.Types.Errors
|
raptros/respond
|
src/Web/Respond/Types.hs
|
bsd-3-clause
| 1,535 | 0 | 5 | 290 | 70 | 51 | 19 | 9 | 0 |
{-# LANGUAGE Safe #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-
Copyright (C) 2004-2008 John Goerzen <[email protected]>
Copyright (C) 2015 David Farrell <[email protected]>
-}
{-|
Module : Polar.ConfigFile.Types
Copyright : Copyright (C) 2004-2008 John Goerzen, 2015 David Farrell
License : BSD3
Maintainer : David Farrell <[email protected]>
Stability : unstable
Portability : non-portable (GHC extensions)
Internal types for "Polar.ConfigFile". This module is not intended to be
used directly by your programs.
Copyright (C) 2004-2008 John Goerzen \<jgoerzen\@complete.org\>, 2015 David Farrell \<shokku.ra\@gmail.com\>.
-}
module Polar.ConfigFile.Types (
Sections, Options,
ConfigErrorType(..), ConfigError, {-CPResult,-}
ConfigParser(..),
SectionName,
OptionName,
Option, Section,
ParseOutput
) where
import qualified Data.Map as Map
import Control.Monad.Error
{- | Internal output from parser -}
type ParseOutput = [(String, [(String, String)])]
{- | Names of sections -}
type SectionName = String
{- | Names of options -}
type OptionName = String
{- | Representation of sections -}
type Section = (SectionName, Options)
{- | Representation of options -}
type Option = (OptionName, String)
{- | Storage of options. -}
type Options = Map.Map OptionName String
{- | The main data storage type (storage of sections). -}
type Sections = Map.Map SectionName Options
{- | Possible ConfigParser errors. -}
data ConfigErrorType = ParseError String -- ^ Parse error
| SectionAlreadyExists SectionName -- ^ Attempt to create an already-existing section
| NoSection SectionName -- ^ The section does not exist
| NoOption OptionName -- ^ The option does not exist
| OtherProblem String -- ^ Miscellaneous error
| InterpolationError String -- ^ Raised by 'Polar.ConfigFile.interpolatingAccess' if a request was made for a non-existant option
deriving (Eq, Ord, Show)
{- | Indicates an error occurred. The String is an explanation of the location
of the error. -}
type ConfigError = (ConfigErrorType, String)
instance Error ConfigError where
noMsg = (OtherProblem "", "")
strMsg x = (OtherProblem x, "")
{- Removed due to Hugs incompatibility.
| Basic ConfigParser error handling. The Left value indicates
an error, while a Right value indicates success.
type CPResult a = MonadError ConfigError m => m a
-}
{- | This is the main record that is used by 'Polar.ConfigFile'.
-}
data ConfigParser = ConfigParser
{ -- | The data itself
content :: Sections,
-- | How to transform an option into a standard representation
optionNameTransform :: (OptionName -> OptionName),
-- | Function to look up an option, considering a default value
-- if 'useDefault' is True; or ignoring a default value otherwise.
-- The option specification is assumed to be already transformed.
defaultHandler :: ConfigParser -> SectionName -> OptionName -> Either ConfigError String,
-- | Whether or not to seek out a default action when no match
-- is found.
useDefault :: Bool,
-- | Function that is used to perform lookups, do optional
-- interpolation, etc. It is assumed that accessFunction
-- will internally call defaultHandler to do the underlying lookup.
-- The option value is not assumed to be transformed.
accessFunction :: (ConfigParser -> SectionName -> OptionName -> Either ConfigError String)
}
|
polar-engine/polar-configfile
|
src/Polar/ConfigFile/Types.hs
|
bsd-3-clause
| 3,911 | 0 | 13 | 1,089 | 364 | 234 | 130 | 37 | 0 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
module Jira2Sheet.Auth where
import Control.Exception (SomeException (..))
import Control.Monad.Except (MonadError (..))
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
import Network.HTTP.Conduit (Manager)
import Network.OAuth.OAuth2 (AccessToken (..), OAuth2 (..))
import Network.Wreq (Auth)
import qualified Network.Wreq as Wreq
import Jira2Sheet.Common (JiraPassword, JiraUsername,
RefreshToken, SavedCredentials (..))
import Jira2Sheet.Crypto (decryptSavedCredentials,
saveCredentials)
import Jira2Sheet.Types.Crypto (MonadCrypto (..))
import Jira2Sheet.Types.Files (MonadReadFS (..), MonadWriteFS (..))
import Jira2Sheet.Types.HTTP (MonadOAuth (..))
import Jira2Sheet.Types.Input (MonadInput (..), getPassword')
import Jira2Sheet.Types.Log (Log (..))
data Credentials = Credentials Auth AccessToken
basicAuth :: JiraUsername -> JiraPassword -> Auth
basicAuth user password = Wreq.basicAuth (encode user) (encode password)
where encode = Text.encodeUtf8 . Text.pack
getJiraCredentials :: (MonadInput m) => m (JiraUsername, JiraPassword)
getJiraCredentials = do
user <- getInputLine "Jira username>"
password <- getPassword' "Jira password>"
return (user, password)
authorizeAndSave :: (MonadOAuth m, MonadWriteFS m, MonadInput m, MonadCrypto m, MonadError SomeException m) => Manager -> OAuth2 -> (RefreshToken -> SavedCredentials) -> m AccessToken
authorizeAndSave mgr oauth builder = do
googleRefresh <- oauthAuthorize mgr oauth [("scope", "https://www.googleapis.com/auth/drive")]
traverse (saveCredentials . builder) $ refreshToken googleRefresh
pure googleRefresh
getCredentials :: (MonadInput m, MonadReadFS m, MonadOAuth m, MonadWriteFS m, MonadCrypto m, MonadError SomeException m, Log m) => Manager -> OAuth2 -> m Credentials
getCredentials mgr oauth = do
credentials <- decryptSavedCredentials "credentials.enc" -- TODO `mplus` pure Nothing
let authorizeAndSave' = authorizeAndSave mgr oauth
case credentials of
Nothing -> do
(user, pass) <- getJiraCredentials
Credentials (basicAuth user pass) <$> authorizeAndSave' (SavedCredentials user pass)
(Just (SavedCredentials user pass tkn)) -> do
logInfo "credentials decrypted"
Credentials (basicAuth user pass) <$> fetchRefreshToken mgr oauth tkn
-- tokenResponse <- runExceptT $ fetchRefreshToken mgr oauth tkn
-- Credentials jCred <$> either (const $ authorizeAndSave' jCred) pure tokenResponse
|
berdario/jira2sheet
|
src/Jira2Sheet/Auth.hs
|
bsd-3-clause
| 2,924 | 0 | 15 | 710 | 686 | 379 | 307 | 46 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : Generics.EMGM.Representation
-- Copyright : (c) 2008, 2009 Universiteit Utrecht
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable
--
-- Summary: Types and related functions for the representation used in EMGM.
--
-- EMGM uses a generic sum-of-products view of datatypes encoded into the
-- 'Unit', @:+:@ (sum), and @:*:@ (product). Many Haskell datatypes can be
-- represented in this way. Right-nested sums replace the @|@, and right-nested
-- products replace the arguments to a constructor. Units replace constructors
-- with no arguments.
--
-- Since constructors encode more than just a list of arguments, this library
-- uses 'ConDescr' to store that information. This includes name, arity, record
-- labels, fixity, and operator precedence. Constructor descriptions are useful
-- for generic operations such as 'Read' and 'Show' and possibly others.
--
-- Generic functions need to convert values between the Haskell datatype and its
-- structure representation. This is done using the embedding-projection pair,
-- which is simply a pair a functions for translating between two types.
--
-----------------------------------------------------------------------------
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
module Generics.EMGM.Representation (
-- * Structure Representation
--
-- | The unit, sum, and product types form the sum-of-products view for a
-- Haskell datatype.
Unit(..),
(:+:)(..),
(:*:)(..),
-- * Constructor Description
--
-- | A description of the syntax of each constructor provides useful auxiliary
-- information for some generic functions.
ConDescr(..),
ConType(..),
LblDescr(..),
-- * Embedding-Projection Pair
--
-- | A pair of a function and its inverse form the isomorphism between a
-- datatype and its structure representation.
EP(..),
-- * Fixity and Precedence
-- | These are used to determine whether a constructor is infix or not and, if
-- it is infix, what its associativity and precedence are.
Fixity(..),
Associativity(..),
Prec,
prec,
minPrec,
maxPrec,
appPrec,
recPrec,
) where
import Text.ParserCombinators.ReadPrec (minPrec, Prec)
infixr 5 :+:
infixr 6 :*:
-- | Encodes a constructor with no arguments. An analogous standard Haskell type
-- is @()@.
data Unit
= Unit -- ^ The only value of type @Unit@ (ignoring @_|_@).
deriving (Enum, Eq, Ord, Show)
-- | The \"sum\" encodes 2 constructor alternatives. An analogous standard
-- Haskell type is @'Either' a b@.
data a :+: b
= L a -- ^ Left alternative
| R b -- ^ Right alternative
deriving (Eq, Ord, Read, Show)
-- | The \"product\" encodes 2 constructor arguments. An analogous standard
-- Haskell type is @(a, b)@.
data a :*: b
= a :*: b -- ^ A pair of arguments
deriving (Eq, Ord, Read, Show)
-- | Encodes the string label for a field in a constructor defined with labeled
-- fields (a.k.a. record syntax).
newtype LblDescr = LblDescr String
deriving (Eq, Ord, Read, Show)
-- | The embedding-projection pair contains two functions for converting between
-- the datatype and its representation. An @EP@ value preserves an isomorphism
-- (ignoring @_|_@s) between a datatype and its structure representation.
data EP d r
= EP
{ from :: (d -> r) -- ^ Embed a @d@atatype into its @r@epresentation.
, to :: (r -> d) -- ^ Project @d@atatype from its @r@epresentation.
}
-- | Contains useful meta-information about the syntax used in a constructor
-- declaration.
--
-- NOTE: It is important that the 'ConDescr' value accurately describe the
-- syntax in a constructor declaration. An incorrect description may lead to
-- faulty 'Read' or 'Show' operation.
data ConDescr
= ConDescr
{ -- | Name of the constructor (without parenthesese if infix).
conName :: String,
-- | Number of fields.
conArity :: Int,
-- | Uses labeled fields (a.k.a. record syntax).
conRecord :: Bool,
-- | Fixity, associativity, precedence.
conFixity :: Fixity
}
deriving (Eq, Show)
-- | Type of constructor syntax. Used in the generic functions 'Read' and
-- 'Show'.
data ConType
= UnknownC -- ^ Have not seen the rcon yet
| NormalC -- ^ Normal prefix-style constructor
| InfixC String -- ^ Infix with symbol (no record syntax)
| RecordC -- ^ Record-style (any fixity)
deriving (Eq, Show)
-- | A constructor's fixity, associativity, and precedence.
data Fixity
-- | Associativity and precedence are the same as function application.
= Prefix
| Infix Associativity Prec
deriving (Eq, Ord, Read, Show)
-- | A constructor's associativity.
data Associativity
-- | Declared with infixl
= LeftAssoc
-- | Declared with infixr
| RightAssoc
-- | Declared with infix
| NonAssoc
deriving (Eq, Ord, Read, Show)
-- TODO: Need smart constructor(s) for ConDescr, so we can verify things.
-- | Get the precedence of a fixity value.
prec :: Fixity -> Prec
prec Prefix = appPrec
prec (Infix _ n) = n
-- | Maximum precedence: 11
maxPrec :: Prec
maxPrec = recPrec
-- | Precedence for function application: 10
appPrec :: Prec
appPrec = 10
-- | Precedence for record construction: 11
recPrec :: Prec
recPrec = appPrec + 1
|
spl/emgm
|
src/Generics/EMGM/Representation.hs
|
bsd-3-clause
| 5,518 | 0 | 10 | 1,129 | 572 | 381 | 191 | 70 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import System.Console.GetOpt
import System.Environment
import System.Exit
import System.IO
import Text.Printf
import Control.Monad
import qualified Data.List as L
import qualified STHLib
data Flag
= Count
| Mean
| Stddev
| Stderr
| Sum
| Var
| Min
| Q1
| Median
| Q3
| Max
| Help
| Summary
| Complete
| Transpose
deriving (Eq,Ord,Enum,Bounded)
instance Show Flag where
show Count = "Count"
show Mean = "Mean"
show Stddev = "Stddev"
show Stderr = "Stderr"
show Sum = "Sum"
show Var = "Variance"
show Min = "Min"
show Q1 = "Q1"
show Median = "Median"
show Q3 = "Q3"
show Max = "Max"
show Help = "Help"
show Summary = "Summary"
show Complete = "Complete"
show Transpose = "Transpose"
data OutputFormat = Normal | Transposed deriving (Eq, Show)
data Stat = Stat Flag (Maybe Float)
instance Show Stat where
show (Stat _ Nothing) = ""
show (Stat f (Just x)) = show f ++ ": " ++ show x ++ "\n"
data Stats = Stats OutputFormat [Stat]
instance Show Stats where
show (Stats Normal (Stat _ (Just v):[])) = show v ++ "\n"
show (Stats Normal x) = unlines [headers x, values x]
show (Stats Transposed x) = x >>= show
headers :: [Stat] -> String
headers [] = ""
headers (Stat f _:xs) = printf "%10s" (show f) ++ headers xs
values :: [Stat] -> String
values [] = ""
values (Stat _ (Just v):xs) = printf "%10f" v ++ values xs
allStats = [Count, Mean, Stddev, Stderr, Sum, Var, Min, Q1, Median, Q3, Max]
summaryStats = [Min, Q1, Median, Q3, Max]
flags = [
Option "nN" ["count"] (NoArg Count) "Display the count",
Option "m" ["mean", "avg"] (NoArg Mean) "Display the mean",
Option [] ["stddev", "sd"] (NoArg Stddev) "Display the standard deviation",
Option [] ["stderr", "se", "sem"] (NoArg Stderr) "Display the standard error",
Option "s" ["sum"] (NoArg Sum) "Display the sumatory",
Option [] ["var", "variance"] (NoArg Var) "Display the variance",
Option [] ["min"] (NoArg Min) "Display the minimun",
Option [] ["q1"] (NoArg Q1) "Display the first quartile",
Option [] ["median"] (NoArg Median) "Display the mediam",
Option [] ["q3"] (NoArg Q3) "Display the third quartile",
Option [] ["max"] (NoArg Max) "Display the maximun",
Option [] ["summary"] (NoArg Summary) "Display the summary info",
Option [] ["complete"] (NoArg Complete) "Display the complete info",
Option [] ["transpose-output", "tn"] (NoArg Transpose) "Display the info in vertical mode",
Option "h" ["help"] (NoArg Help) "Print the help message"
]
generateStatList :: [Flag] -> [Float] -> [Stat]
generateStatList [] content = []
generateStatList (Count:xs) content = Stat Count (Just $ STHLib.count content) : generateStatList xs content
generateStatList (Mean:xs) content = Stat Mean (Just $ STHLib.mean content) : generateStatList xs content
generateStatList (Stddev:xs) content = Stat Stddev (Just $ STHLib.stddev content) : generateStatList xs content
generateStatList (Stderr:xs) content = Stat Stderr (Just $ STHLib.stderr content) : generateStatList xs content
generateStatList (Sum:xs) content = Stat Sum (Just $ STHLib.sum content) : generateStatList xs content
generateStatList (Var:xs) content = Stat Var (Just $ STHLib.variance content) : generateStatList xs content
generateStatList (Min:xs) content = Stat Min (Just $ STHLib.min content) : generateStatList xs content
generateStatList (Q1:xs) content = Stat Q1 (Just $ STHLib.q1 content) : generateStatList xs content
generateStatList (Median:xs) content = Stat Median (Just $ STHLib.median content) : generateStatList xs content
generateStatList (Q3:xs) content = Stat Q3 (Just $ STHLib.q3 content) : generateStatList xs content
generateStatList (Max:xs) content = Stat Max (Just $ STHLib.max content) : generateStatList xs content
generateStatList (_:xs) content = generateStatList xs content
populateArgs :: [Flag] -> [Flag]
populateArgs [] = []
populateArgs (Summary:xs) = summaryStats ++ populateArgs xs
populateArgs (Complete:xs) = allStats ++ populateArgs xs
populateArgs (x:xs) = x : populateArgs xs
-- Mutable zone
parse :: [String] -> IO ([Flag], [String])
parse argv = case getOpt Permute flags argv of
(args, fs, []) -> do
let files = if null fs then [] else fs
if Help `elem` args
then do hPutStrLn stderr usage
exitSuccess
else return (L.nub (populateArgs args), files)
(_, _, errs) -> do
hPutStrLn stderr (concat errs ++ usage)
exitWith (ExitFailure 1)
where header = "Usage: sth [options] [files]"
usage = usageInfo header flags
readFiles :: [FilePath] -> IO String
readFiles = fmap concat . mapM readFile
getData :: [String] -> IO String
getData files = if null files
then getContents
else readFiles files
contentToStats :: [Flag] -> String -> IO Stats
contentToStats flags content = return (if Transpose `elem` flags
then Stats Transposed (generateStatList flags values)
else Stats Normal (generateStatList flags values))
where values = map read $ lines content
main = do
(as, fs) <- getArgs >>= parse
let flags = if null as then allStats else as
putStr =<< (liftM show $ getData fs >>= contentToStats flags)
|
jespino/sth
|
sth.hs
|
bsd-3-clause
| 5,515 | 17 | 12 | 1,320 | 2,077 | 1,062 | 1,015 | 123 | 4 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE EmptyCase #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE UndecidableSuperClasses #-}
-- | Render the first widget on the server, and the second on the client.
module Reflex.Dom.Prerender
( Prerender (..)
, prerender_
, PrerenderClientConstraint
, PrerenderBaseConstraints
) where
import Control.Monad.Primitive (PrimMonad(..))
import Control.Monad.Reader
import Control.Monad.Ref (MonadRef(..), MonadAtomicRef(..))
import Data.IORef (IORef, newIORef)
import Data.Semigroup (Semigroup)
import Data.Text (Text)
import Data.Void
import Foreign.JavaScript.TH
import GHCJS.DOM.Types (MonadJSM)
import Reflex hiding (askEvents)
import Reflex.Dom.Builder.Class
import Reflex.Dom.Builder.Hydratable
import Reflex.Dom.Builder.Immediate
import Reflex.Dom.Builder.InputDisabled
import Reflex.Dom.Builder.Static
import Reflex.Host.Class
import Data.IntMap.Strict (IntMap)
import qualified Data.IntMap.Strict as IntMap
import qualified GHCJS.DOM.Document as Document
import qualified GHCJS.DOM.Node as Node
import qualified GHCJS.DOM.Types as DOM
type PrerenderClientConstraint t m =
( DomBuilder t m
, DomBuilderSpace m ~ GhcjsDomSpace
, DomRenderHook t m
, HasDocument m
, TriggerEvent t m
, PrerenderBaseConstraints t m
)
type PrerenderBaseConstraints t m =
( MonadFix m
, MonadHold t m
, MonadJSM (Performable m)
, MonadJSM m
, MonadRef (Performable m)
, MonadRef m
, MonadReflexCreateTrigger t m
, MonadSample t (Performable m)
, PerformEvent t m
, PostBuild t m
, PrimMonad m
, Ref (Performable m) ~ IORef
, Ref m ~ IORef
)
-- | Render the first widget on the server, and the second on the client. The
-- hydration builder will run *both* widgets.
prerender_
:: (Functor m, Reflex t, Prerender t m)
=> m () -> Client m () -> m ()
prerender_ server client = void $ prerender server client
class (PrerenderClientConstraint t (Client m), Client (Client m) ~ Client m, Prerender t (Client m)) => Prerender t m | m -> t where
-- | Monad in which the client widget is built
type Client m :: * -> *
-- | Render the first widget on the server, and the second on the client. The
-- hydration builder will run *both* widgets, updating the result dynamic at
-- switchover time.
prerender :: m a -> Client m a -> m (Dynamic t a)
instance (ReflexHost t, Adjustable t m, PrerenderBaseConstraints t m) => Prerender t (HydrationDomBuilderT GhcjsDomSpace t m) where
type Client (HydrationDomBuilderT GhcjsDomSpace t m) = HydrationDomBuilderT GhcjsDomSpace t m
prerender _ client = pure <$> client
instance (Adjustable t m, PrerenderBaseConstraints t m, ReflexHost t) => Prerender t (HydrationDomBuilderT HydrationDomSpace t m) where
-- | PostBuildT is needed here because we delay running the client builder
-- until after switchover, at which point the postBuild of @m@ has already fired
type Client (HydrationDomBuilderT HydrationDomSpace t m) = PostBuildT t (HydrationDomBuilderT GhcjsDomSpace t m)
-- | Runs the server widget up until switchover, then replaces it with the
-- client widget.
prerender server client = do
env <- HydrationDomBuilderT ask
events <- askEvents
doc <- askDocument
serverDf <- Document.createDocumentFragment doc -- server dom should not be mounted in the window's doc in hydration
df <- Document.createDocumentFragment doc
unreadyChildren <- HydrationDomBuilderT $ asks _hydrationDomBuilderEnv_unreadyChildren
immediateMode <- liftIO $ newIORef HydrationMode_Immediate
delayed <- liftIO $ newIORef $ pure ()
let clientEnv = env
{ _hydrationDomBuilderEnv_parent = Left $ DOM.toNode df
, _hydrationDomBuilderEnv_hydrationMode = immediateMode
}
serverEnv = HydrationDomBuilderEnv
{ _hydrationDomBuilderEnv_document = doc
, _hydrationDomBuilderEnv_parent = Left $ DOM.toNode serverDf
, _hydrationDomBuilderEnv_unreadyChildren = unreadyChildren
, _hydrationDomBuilderEnv_commitAction = pure ()
, _hydrationDomBuilderEnv_delayed = delayed
, _hydrationDomBuilderEnv_hydrationMode = immediateMode
, _hydrationDomBuilderEnv_switchover = never
}
a0 <- lift $ runHydrationDomBuilderT server serverEnv events
(a', trigger) <- newTriggerEvent
getHydrationMode >>= \case
HydrationMode_Immediate -> do
liftIO . trigger <=< lift $ runHydrationDomBuilderT (runPostBuildT client $ void a') clientEnv events
append $ DOM.toNode df
HydrationMode_Hydrating -> addHydrationStep $ do
liftIO . trigger <=< lift $ runHydrationDomBuilderT (runPostBuildT client $ void a') clientEnv events
insertBefore df =<< deleteToPrerenderEnd doc
holdDyn a0 a'
newtype UnrunnableT t m a = UnrunnableT (ReaderT Void m a)
deriving (Functor, Applicative, Monad, MonadTrans)
unrunnable :: UnrunnableT t m a
unrunnable = UnrunnableT $ ReaderT $ \case {}
instance (Reflex t, Monad m) => DomBuilder t (UnrunnableT t m) where
type DomBuilderSpace (UnrunnableT t m) = GhcjsDomSpace
textNode _ = unrunnable
commentNode _ = unrunnable
element _ _ _ = unrunnable
inputElement _ = unrunnable
textAreaElement _ = unrunnable
selectElement _ _ = unrunnable
placeRawElement _ = unrunnable
wrapRawElement _ _ = unrunnable
instance (Reflex t, Monad m) => NotReady t (UnrunnableT t m) where
notReadyUntil _ = unrunnable
notReady = unrunnable
instance (Reflex t, Monad m) => Adjustable t (UnrunnableT t m) where
runWithReplace _ _ = unrunnable
traverseIntMapWithKeyWithAdjust _ _ _ = unrunnable
traverseDMapWithKeyWithAdjust _ _ _ = unrunnable
traverseDMapWithKeyWithAdjustWithMove _ _ _ = unrunnable
instance (Reflex t, Monad m) => PerformEvent t (UnrunnableT t m) where
type Performable (UnrunnableT t m) = UnrunnableT t m
performEvent _ = unrunnable
performEvent_ _ = unrunnable
instance Monad m => MonadRef (UnrunnableT t m) where
type Ref (UnrunnableT t m) = Ref IO
newRef _ = unrunnable
readRef _ = unrunnable
writeRef _ _ = unrunnable
instance Monad m => MonadAtomicRef (UnrunnableT t m) where
atomicModifyRef _ _ = unrunnable
instance Monad m => HasDocument (UnrunnableT t m) where
askDocument = unrunnable
instance Monad m => TriggerEvent t (UnrunnableT t m) where
newTriggerEvent = unrunnable
newTriggerEventWithOnComplete = unrunnable
newEventWithLazyTriggerWithOnComplete _ = unrunnable
instance Monad m => MonadReflexCreateTrigger t (UnrunnableT t m) where
newEventWithTrigger _ = unrunnable
newFanEventWithTrigger _ = unrunnable
instance Monad m => MonadFix (UnrunnableT t m) where
mfix _ = unrunnable
instance (Monad m, MonadHold t m) => MonadHold t (UnrunnableT t m) where
hold _ _ = unrunnable
holdDyn _ _ = unrunnable
holdIncremental _ _ = unrunnable
buildDynamic _ _ = unrunnable
headE _ = unrunnable
now = unrunnable
instance Monad m => MonadSample t (UnrunnableT t m) where
sample _ = unrunnable
instance Monad m => MonadIO (UnrunnableT t m) where
liftIO _ = unrunnable
#ifndef ghcjs_HOST_OS
instance Monad m => MonadJSM (UnrunnableT t m) where
liftJSM' _ = unrunnable
#endif
instance (Reflex t, Monad m) => PostBuild t (UnrunnableT t m) where
getPostBuild = unrunnable
instance Monad m => PrimMonad (UnrunnableT t m) where
type PrimState (UnrunnableT t m) = PrimState IO
primitive _ = unrunnable
instance (Reflex t, Monad m) => DomRenderHook t (UnrunnableT t m) where
withRenderHook _ _ = unrunnable
requestDomAction _ = unrunnable
requestDomAction_ _ = unrunnable
instance (Reflex t, Monad m, MonadHold t m) => Prerender t (UnrunnableT t m) where
type Client (UnrunnableT t m) = UnrunnableT t m
prerender _ _ = unrunnable
instance (SupportsStaticDomBuilder t m) => Prerender t (StaticDomBuilderT t m) where
type Client (StaticDomBuilderT t m) = UnrunnableT t m
prerender server _ = do
_ <- commentNode $ CommentNodeConfig startMarker Nothing
a <- server
_ <- commentNode $ CommentNodeConfig endMarker Nothing
pure $ pure a
instance (Prerender t m, Monad m) => Prerender t (ReaderT r m) where
type Client (ReaderT r m) = ReaderT r (Client m)
prerender server client = do
r <- ask
lift $ prerender (runReaderT server r) (runReaderT client r)
instance (Prerender t m, Monad m, Reflex t, MonadFix m, Monoid w) => Prerender t (DynamicWriterT t w m) where
type Client (DynamicWriterT t w m) = DynamicWriterT t w (Client m)
prerender server client = do
x <- lift $ prerender (runDynamicWriterT server) (runDynamicWriterT client)
let (a, w') = splitDynPure x
w = join w'
tellDyn w
pure a
instance (Prerender t m, Monad m, Reflex t, Semigroup w) => Prerender t (EventWriterT t w m) where
type Client (EventWriterT t w m) = EventWriterT t w (Client m)
prerender server client = do
x <- lift $ prerender (runEventWriterT server) (runEventWriterT client)
let (a, w') = splitDynPure x
w = switch $ current w'
tellEvent w
pure a
instance (Prerender t m, MonadFix m, Reflex t) => Prerender t (RequesterT t request response m) where
type Client (RequesterT t request response m) = RequesterT t request response (Client m)
prerender server client = mdo
let fannedResponses = fanInt responses
withFannedResponses :: forall m' a. Monad m' => RequesterT t request response m' a -> Int -> m' (a, Event t (IntMap (RequesterData request)))
withFannedResponses w selector = do
(x, e) <- runRequesterT w (selectInt fannedResponses selector)
pure (x, fmapCheap (IntMap.singleton selector) e)
(result, requestsDyn) <- fmap splitDynPure $ lift $ prerender (withFannedResponses server 0) (withFannedResponses client 1)
responses <- fmap (fmapCheap unMultiEntry) $ requesting' $ fmapCheap multiEntry $ switchPromptlyDyn requestsDyn
return result
instance (Prerender t m, Monad m, Reflex t, MonadFix m, Group q, Additive q, Query q, Eq q) => Prerender t (QueryT t q m) where
type Client (QueryT t q m) = QueryT t q (Client m)
prerender server client = mdo
result <- queryDyn query
x <- lift $ prerender (runQueryT server result) (runQueryT client result)
let (a, inc) = splitDynPure x
query = incrementalToDynamic =<< inc -- Can we avoid the incrementalToDynamic?
pure a
instance (Prerender t m, Monad m) => Prerender t (InputDisabledT m) where
type Client (InputDisabledT m) = InputDisabledT (Client m)
prerender (InputDisabledT server) (InputDisabledT client) = InputDisabledT $ prerender server client
instance (Prerender t m, Monad m) => Prerender t (HydratableT m) where
type Client (HydratableT m) = HydratableT (Client m)
prerender (HydratableT server) (HydratableT client) = HydratableT $ prerender server client
instance (Prerender t m, Monad m, ReflexHost t) => Prerender t (PostBuildT t m) where
type Client (PostBuildT t m) = PostBuildT t (Client m)
prerender server client = PostBuildT $ do
pb <- ask
lift $ prerender (runPostBuildT server pb) (runPostBuildT client pb)
startMarker, endMarker :: Text
startMarker = "prerender/start"
endMarker = "prerender/end"
deleteToPrerenderEnd :: (MonadIO m, MonadJSM m, Reflex t, MonadFix m) => DOM.Document -> HydrationRunnerT t m DOM.Comment
deleteToPrerenderEnd doc = do
startNode <- hydrateComment doc startMarker Nothing
let go (n :: Int) lastNode = Node.getNextSibling lastNode >>= \case
Nothing -> do
c <- Document.createComment doc endMarker
insertAfterPreviousNode c
pure c
Just node -> DOM.castTo DOM.Comment node >>= \case
Nothing -> go n node
Just c -> Node.getTextContentUnchecked c >>= \case
t | t == startMarker -> go (succ n) node
| t == endMarker -> case n of
0 -> pure c
_ -> go (pred n) node
| otherwise -> go n node
endNode <- go 0 $ DOM.toNode startNode
deleteBetweenExclusive startNode endNode
setPreviousNode $ Just $ DOM.toNode endNode
pure endNode
|
reflex-frp/reflex-dom
|
reflex-dom-core/src/Reflex/Dom/Prerender.hs
|
bsd-3-clause
| 12,464 | 0 | 28 | 2,525 | 3,856 | 1,957 | 1,899 | -1 | -1 |
import Distribution.Simple
main = defaultMain
{-
import Distribution.Simple (defaultMainWithHooks)
import Distribution.Simple.UUAGC (uuagcLibUserHook)
import UU.UUAGC (uuagc)
main :: IO ()
main = defaultMainWithHooks (uuagcLibUserHook uuagc)
-}
|
atzedijkstra/delimiter-separated
|
Setup.hs
|
bsd-3-clause
| 247 | 0 | 4 | 27 | 12 | 7 | 5 | 2 | 1 |
{-# LANGUAGE CPP, NamedFieldPuns, RecordWildCards, ViewPatterns #-}
-- | cabal-install CLI command: freeze
--
module Distribution.Client.CmdFreeze (
freezeCommand,
freezeAction,
) where
import Distribution.Client.ProjectOrchestration
import Distribution.Client.ProjectPlanning
import Distribution.Client.ProjectConfig
( ProjectConfig(..), ProjectConfigShared(..)
, writeProjectLocalFreezeConfig )
import Distribution.Client.Targets
( UserQualifier(..), UserConstraintScope(..), UserConstraint(..) )
import Distribution.Solver.Types.PackageConstraint
( PackageProperty(..) )
import Distribution.Solver.Types.ConstraintSource
( ConstraintSource(..) )
import Distribution.Client.DistDirLayout
( DistDirLayout(distProjectFile) )
import qualified Distribution.Client.InstallPlan as InstallPlan
import Distribution.Package
( PackageName, packageName, packageVersion )
import Distribution.Version
( VersionRange, thisVersion
, unionVersionRanges, simplifyVersionRange )
import Distribution.PackageDescription
( FlagAssignment )
import Distribution.Client.Setup
( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags
, applyFlagDefaults )
import Distribution.Simple.Setup
( HaddockFlags, fromFlagOrDefault )
import Distribution.Simple.Utils
( die', notice, wrapText )
import Distribution.Verbosity
( normal )
import Data.Monoid as Monoid
import qualified Data.Map as Map
import Data.Map (Map)
import Control.Monad (unless)
import Distribution.Simple.Command
( CommandUI(..), usageAlternatives )
import qualified Distribution.Client.Setup as Client
freezeCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)
freezeCommand = Client.installCommand {
commandName = "new-freeze",
commandSynopsis = "Freeze dependencies.",
commandUsage = usageAlternatives "new-freeze" [ "[FLAGS]" ],
commandDescription = Just $ \_ -> wrapText $
"The project configuration is frozen so that it will be reproducible "
++ "in future.\n\n"
++ "The precise dependency configuration for the project is written to "
++ "the 'cabal.project.freeze' file (or '$project_file.freeze' if "
++ "'--project-file' is specified). This file extends the configuration "
++ "from the 'cabal.project' file and thus is used as the project "
++ "configuration for all other commands (such as 'new-build', "
++ "'new-repl' etc).\n\n"
++ "The freeze file can be kept in source control. To make small "
++ "adjustments it may be edited manually, or to make bigger changes "
++ "you may wish to delete the file and re-freeze. For more control, "
++ "one approach is to try variations using 'new-build --dry-run' with "
++ "solver flags such as '--constraint=\"pkg < 1.2\"' and once you have "
++ "a satisfactory solution to freeze it using the 'new-freeze' command "
++ "with the same set of flags.",
commandNotes = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " new-freeze\n"
++ " Freeze the configuration of the current project\n\n"
++ " " ++ pname ++ " new-build --dry-run --constraint=\"aeson < 1\"\n"
++ " Check what a solution with the given constraints would look like\n"
++ " " ++ pname ++ " new-freeze --constraint=\"aeson < 1\"\n"
++ " Freeze a solution using the given constraints\n\n"
++ "Note: this command is part of the new project-based system (aka "
++ "nix-style\nlocal builds). These features are currently in beta. "
++ "Please see\n"
++ "http://cabal.readthedocs.io/en/latest/nix-local-build-overview.html "
++ "for\ndetails and advice on what you can expect to work. If you "
++ "encounter problems\nplease file issues at "
++ "https://github.com/haskell/cabal/issues and if you\nhave any time "
++ "to get involved and help with testing, fixing bugs etc then\nthat "
++ "is very much appreciated.\n"
}
-- | To a first approximation, the @freeze@ command runs the first phase of
-- the @build@ command where we bring the install plan up to date, and then
-- based on the install plan we write out a @cabal.project.freeze@ config file.
--
-- For more details on how this works, see the module
-- "Distribution.Client.ProjectOrchestration"
--
freezeAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)
-> [String] -> GlobalFlags -> IO ()
freezeAction (applyFlagDefaults -> (configFlags, configExFlags, installFlags, haddockFlags))
extraArgs globalFlags = do
unless (null extraArgs) $
die' verbosity $ "'freeze' doesn't take any extra arguments: "
++ unwords extraArgs
ProjectBaseContext {
distDirLayout,
cabalDirLayout,
projectConfig,
localPackages
} <- establishProjectBaseContext verbosity cliConfig
(_, elaboratedPlan, _) <-
rebuildInstallPlan verbosity
distDirLayout cabalDirLayout
projectConfig
localPackages
let freezeConfig = projectFreezeConfig elaboratedPlan
writeProjectLocalFreezeConfig distDirLayout freezeConfig
notice verbosity $
"Wrote freeze file: " ++ distProjectFile distDirLayout "freeze"
where
verbosity = fromFlagOrDefault normal (configVerbosity configFlags)
cliConfig = commandLineFlagsToProjectConfig
globalFlags configFlags configExFlags
installFlags haddockFlags
-- | Given the install plan, produce a config value with constraints that
-- freezes the versions of packages used in the plan.
--
projectFreezeConfig :: ElaboratedInstallPlan -> ProjectConfig
projectFreezeConfig elaboratedPlan =
Monoid.mempty {
projectConfigShared = Monoid.mempty {
projectConfigConstraints =
concat (Map.elems (projectFreezeConstraints elaboratedPlan))
}
}
-- | Given the install plan, produce solver constraints that will ensure the
-- solver picks the same solution again in future in different environments.
--
projectFreezeConstraints :: ElaboratedInstallPlan
-> Map PackageName [(UserConstraint, ConstraintSource)]
projectFreezeConstraints plan =
--
-- TODO: [required eventually] this is currently an underapproximation
-- since the constraints language is not expressive enough to specify the
-- precise solution. See https://github.com/haskell/cabal/issues/3502.
--
-- For the moment we deal with multiple versions in the solution by using
-- constraints that allow either version. Also, we do not include any
-- /version/ constraints for packages that are local to the project (e.g.
-- if the solution has two instances of Cabal, one from the local project
-- and one pulled in as a setup deps then we exclude all constraints on
-- Cabal, not just the constraint for the local instance since any
-- constraint would apply to both instances). We do however keep flag
-- constraints of local packages.
--
deleteLocalPackagesVersionConstraints
(Map.unionWith (++) versionConstraints flagConstraints)
where
versionConstraints :: Map PackageName [(UserConstraint, ConstraintSource)]
versionConstraints =
Map.mapWithKey
(\p v -> [(UserConstraint (UserQualified UserQualToplevel p) (PackagePropertyVersion v),
ConstraintSourceFreeze)])
versionRanges
versionRanges :: Map PackageName VersionRange
versionRanges =
Map.map simplifyVersionRange $
Map.fromListWith unionVersionRanges $
[ (packageName pkg, thisVersion (packageVersion pkg))
| InstallPlan.PreExisting pkg <- InstallPlan.toList plan
]
++ [ (packageName pkg, thisVersion (packageVersion pkg))
| InstallPlan.Configured pkg <- InstallPlan.toList plan
]
flagConstraints :: Map PackageName [(UserConstraint, ConstraintSource)]
flagConstraints =
Map.mapWithKey
(\p f -> [(UserConstraint (UserQualified UserQualToplevel p) (PackagePropertyFlags f),
ConstraintSourceFreeze)])
flagAssignments
flagAssignments :: Map PackageName FlagAssignment
flagAssignments =
Map.fromList
[ (pkgname, flags)
| InstallPlan.Configured elab <- InstallPlan.toList plan
, let flags = elabFlagAssignment elab
pkgname = packageName elab
, not (null flags) ]
-- As described above, remove the version constraints on local packages,
-- but leave any flag constraints.
deleteLocalPackagesVersionConstraints
:: Map PackageName [(UserConstraint, ConstraintSource)]
-> Map PackageName [(UserConstraint, ConstraintSource)]
deleteLocalPackagesVersionConstraints =
#if MIN_VERSION_containers(0,5,0)
Map.mergeWithKey
(\_pkgname () constraints ->
case filter (not . isVersionConstraint . fst) constraints of
[] -> Nothing
constraints' -> Just constraints')
(const Map.empty) id
localPackages
#else
Map.mapMaybeWithKey
(\pkgname constraints ->
if pkgname `Map.member` localPackages
then case filter (not . isVersionConstraint . fst) constraints of
[] -> Nothing
constraints' -> Just constraints'
else Just constraints)
#endif
isVersionConstraint (UserConstraint _ (PackagePropertyVersion _)) = True
isVersionConstraint _ = False
localPackages :: Map PackageName ()
localPackages =
Map.fromList
[ (packageName elab, ())
| InstallPlan.Configured elab <- InstallPlan.toList plan
, elabLocalToProject elab
]
|
themoritz/cabal
|
cabal-install/Distribution/Client/CmdFreeze.hs
|
bsd-3-clause
| 9,943 | 0 | 29 | 2,370 | 1,443 | 812 | 631 | 163 | 4 |
{-# LANGUAGE OverloadedStrings
, TupleSections
, RecordWildCards
, FlexibleContexts
, PatternGuards
, FlexibleInstances #-}
module System.TaskL.JSON where
import Control.Applicative
import Control.Monad
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as ByteString
import Data.Either
import qualified Data.Map as Map
import Data.Monoid hiding (All)
import Data.Foldable (toList)
import Data.Tree (Tree(..), Forest)
import Data.Aeson
import Data.Aeson.Types
import qualified Data.HashMap.Strict as HashMap
import Data.Text (Text)
import qualified Data.Text.Encoding as Text
import qualified Data.Vector as Vector
import System.TaskL.Task
import System.TaskL.Strings
instance FromJSON Module where
parseJSON = withObject "TaskL.Module" $ \body ->
do names <- scanKeys body
Module "" . Map.fromList <$>
sequence [ (name,) <$> body .: text | (name, text) <- names ]
instance ToJSON Module where
toJSON Module{..} = object [ toStr k .= toJSON v | (k,v) <- Map.toList defs ]
instance FromJSON Task where
parseJSON = withObject "TaskL.Task" template where
template body = Task <$> body .:? "vars" .!= mempty
<*> knot body
knot body = Knot <$> body .:? "cmds" .!= Commands []
<*> body .:? "deps" .!= mempty
instance ToJSON Task where
toJSON (Task vars Knot{..}) = object $ filter nonEmpty
[ "deps" .= array (toJSON <$> deps),
"vars" .= array (toJSON <$> vars), "cmds" .= toJSON code ]
where nonEmpty (_, Array a) = a /= mempty
nonEmpty _ = False
instance FromJSON Use where
parseJSON (Object o) = do
scanned <- scanKeys o
case scanned of [(ref, name)] -> Use ref <$> o .: name
_ -> mzero
parseJSON (String s) = flip Use [] <$> parseJSON (String s)
parseJSON _ = mzero
instance ToJSON Use where
toJSON Use{..} = object [toStr task .= array (toJSON <$> args)]
instance FromJSON (Tree Use) where
parseJSON (Object o) = do
deps <- o .:? "deps" .!= mempty
Node <$> parseJSON (Object o) <*> mapM parseJSON deps
parseJSON (String s) = flip Node [] <$> parseJSON (String s)
parseJSON _ = mzero
instance ToJSON (Tree Use) where
toJSON Node{..} | [] <- subForest = Object o
| otherwise = Object (uncurry HashMap.insert deps o)
where Object o = toJSON rootLabel
deps = "deps" .= array (toJSON <$> subForest)
instance FromJSON (Label, Maybe ByteString) where
parseJSON (Object o) = case [ (unStr k, v) | (k, v) <- HashMap.toList o ] of
[(Right l, String s)] -> return (l, Just (Text.encodeUtf8 s))
[(Right l, Number n)] -> return (l, Just (ByteString.pack $ show n))
_ -> mzero
parseJSON (String s) = (,Nothing) <$> parseJSON (String s)
parseJSON _ = mzero
instance ToJSON (Label, Maybe ByteString) where
toJSON (l, Nothing) = String (toStr l)
toJSON (l, Just b) = object [toStr l .= String (Text.decodeUtf8 b)]
-- Single element hashes are treated as variable references.
instance FromJSON Arg where
parseJSON (Array a) = Scalar <$> mapM varOrLit (toList a)
parseJSON v@(String _) = Scalar . (:[]) <$> varOrLit v
parseJSON v@(Number _) = Scalar . (:[]) <$> varOrLit v
parseJSON v@(Object o) = case HashMap.toList o of
[("...", Null)] -> return Tail
[("~~~", Null)] -> return All
_ -> Scalar . (:[]) <$> varOrLit v
parseJSON _ = mzero
instance ToJSON Arg where
toJSON (Scalar items) = case items of
[ ] -> String ""
[Left l ] -> var l
[Right b] -> val b
_ -> Array . Vector.fromList $ either var val <$> items
where var = object . (:[]) . (.= Null) . toStr
val = String . Text.decodeUtf8
toJSON Tail = object ["..." .= Null]
toJSON All = object ["~~~" .= Null]
instance FromJSON Name where
parseJSON = withText "Name" (either (const mzero) return . unStr)
instance ToJSON Name where toJSON = String . toStr
instance FromJSON Label where
parseJSON = withText "Label" (either (const mzero) return . unStr)
instance ToJSON Label where toJSON = String . toStr
instance FromJSON Code where
parseJSON = withArray "TaskL.Code" ((Commands <$>) . mapM parseOne . toList)
where parseOne = withArray "Task.Code/..." (argv . toList)
where argv (h:t) = (,) <$> cmd h <*> mapM parseJSON t
argv [ ] = mzero
cmd v = (:[]) <$> varOrLit v
<|> withArray "Task.Code/..." (mapM varOrLit . toList) v
instance ToJSON Code where
toJSON (Commands cmds) =
array [ array (toJSON cmd : (toJSON <$> args)) | (cmd, args) <- cmds ]
scanKeys :: (Str s Text) => Object -> Parser [(s, Text)]
scanKeys = return . rights . (parse <$>) . HashMap.keys
where parse text = (,text) <$> unStr text
varOrLit :: Value -> Parser (Either Label ByteString)
varOrLit (Object o) = case [ (unStr k, v) | (k, v) <- HashMap.toList o ] of
[(Right l, Null)] -> return . Left $ l
_ -> mzero
varOrLit (String s) = return . Right . Text.encodeUtf8 $ s
varOrLit (Number n) = return . Right . ByteString.pack . show $ n
varOrLit _ = mzero
onlyOne :: [t] -> Parser t
onlyOne [x] = return x
onlyOne _ = mzero
array :: [Value] -> Value
array = Array . Vector.fromList
|
solidsnack/taskl
|
System/TaskL/JSON.hs
|
bsd-3-clause
| 5,582 | 0 | 15 | 1,585 | 2,114 | 1,096 | 1,018 | 123 | 2 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -Wno-missing-signatures #-}
module Main
( main
)
where
import AutoApply
import qualified Codec.Picture as JP
import qualified Codec.Picture.Types as JP
import Control.Exception.Safe
import Control.Monad.IO.Class
import Control.Monad.Trans.Maybe ( MaybeT(..) )
import Control.Monad.Trans.Reader
import Control.Monad.Trans.Resource
import Data.Bits
import qualified Data.ByteString.Lazy as BSL
import Data.Foldable
import Data.List ( partition )
import Data.Maybe ( catMaybes )
import Data.Ord ( comparing )
import Data.Text ( Text )
import qualified Data.Text as T
import Data.Text.Encoding ( decodeUtf8 )
import qualified Data.Vector as V
import Data.Word
import Foreign.Ptr
import Foreign.Storable ( peek
, sizeOf
)
import Say
#if defined(RENDERDOC)
import Control.Monad ( when )
import qualified Data.Map.Strict as Map
import qualified Language.C.Inline as C
import qualified Language.C.Inline.Context as C
import qualified Language.C.Types as C
#endif
import Vulkan.CStruct.Extends
import Vulkan.Core10 as Vk
hiding ( withImage )
import Vulkan.Extensions.VK_EXT_debug_utils
import Vulkan.Extensions.VK_EXT_validation_features
import Vulkan.Utils.Debug
import Vulkan.Utils.ShaderQQ.GLSL.Glslang
import Vulkan.Zero
import VulkanMemoryAllocator as VMA
hiding ( getPhysicalDeviceProperties )
#if defined(RENDERDOC)
data RENDERDOC_API_1_1_2
C.context
(C.baseCtx <> mempty
{ C.ctxTypesTable = Map.fromList
[(C.TypeName "RENDERDOC_API_1_1_2", [t|RENDERDOC_API_1_1_2|])]
}
)
C.include "<renderdoc_app.h>"
C.include "<dlfcn.h>"
C.include "<assert.h>"
C.include "<stddef.h>"
#endif
----------------------------------------------------------------
-- Define the monad in which most of the program will run
----------------------------------------------------------------
-- | @V@ keeps track of a bunch of "global" handles and performs resource
-- management.
newtype V a = V { unV :: ReaderT GlobalHandles (ResourceT IO) a }
deriving newtype ( Functor
, Applicative
, Monad
, MonadFail
, MonadThrow
, MonadCatch
, MonadMask
, MonadIO
, MonadResource
)
runV
:: Instance
-> PhysicalDevice
-> Word32
-> Device
-> Allocator
-> V a
-> ResourceT IO a
runV ghInstance ghPhysicalDevice ghGraphicsQueueFamilyIndex ghDevice ghAllocator
= flip runReaderT GlobalHandles { .. } . unV
data GlobalHandles = GlobalHandles
{ ghInstance :: Instance
, ghPhysicalDevice :: PhysicalDevice
, ghDevice :: Device
, ghAllocator :: Allocator
, ghGraphicsQueueFamilyIndex :: Word32
}
-- Getters for global handles
getInstance :: V Instance
getInstance = V (asks ghInstance)
getGraphicsQueueFamilyIndex :: V Word32
getGraphicsQueueFamilyIndex = V (asks ghGraphicsQueueFamilyIndex)
getPhysicalDevice :: V PhysicalDevice
getPhysicalDevice = V (asks ghPhysicalDevice)
getDevice :: V Device
getDevice = V (asks ghDevice)
getAllocator :: V Allocator
getAllocator = V (asks ghAllocator)
noAllocationCallbacks :: Maybe AllocationCallbacks
noAllocationCallbacks = Nothing
--
-- Wrap a bunch of Vulkan commands so that they automatically pull global
-- handles from 'V'
--
-- Wrapped functions are suffixed with "'"
--
autoapplyDecs
(<> "'")
[ 'getDevice
, 'getPhysicalDevice
, 'getInstance
, 'getAllocator
, 'noAllocationCallbacks
]
['allocate]
[ 'invalidateAllocation
, 'withImage
, 'deviceWaitIdle
, 'getDeviceQueue
, 'getImageSubresourceLayout
, 'waitForFences
, 'withCommandBuffers
, 'withCommandPool
, 'withFence
, 'withFramebuffer
, 'withGraphicsPipelines
, 'withImageView
, 'withInstance
, 'withPipelineLayout
, 'withRenderPass
, 'withShaderModule
, 'nameObject
]
----------------------------------------------------------------
-- The program
----------------------------------------------------------------
main :: IO ()
main = runResourceT $ do
-- Create Instance, PhysicalDevice, Device and Allocator
inst <- Main.createInstance
(phys, pdi, dev) <- Main.createDevice inst
(_, allocator) <- withAllocator
zero { flags = zero
, physicalDevice = physicalDeviceHandle phys
, device = deviceHandle dev
, instance' = instanceHandle inst
, vulkanApiVersion = myApiVersion
}
allocate
#if defined(RENDERDOC)
-- We need to mark the beginning and end of the capture explicitly as this
-- application doesn't present frames with a swapchain which is the trigger
-- RenderDoc usually uses.
rdoc_api <- liftIO [C.block| RENDERDOC_API_1_1_2* {
RENDERDOC_API_1_1_2* rdoc_api = NULL;
void* mod = dlopen("librenderdoc.so", RTLD_NOW | RTLD_NOLOAD);
if (mod) {
pRENDERDOC_GetAPI RENDERDOC_GetAPI =
(pRENDERDOC_GetAPI)dlsym(mod, "RENDERDOC_GetAPI");
int ret =
RENDERDOC_GetAPI(eRENDERDOC_API_Version_1_1_2, (void **)&rdoc_api);
assert(ret == 1);
};
return rdoc_api;
}|]
when (rdoc_api /= nullPtr) $
sayErr "Running under RenderDoc"
let rdBegin = liftIO [C.exp| void { if($(RENDERDOC_API_1_1_2* rdoc_api)) $(RENDERDOC_API_1_1_2* rdoc_api)->StartFrameCapture(NULL, NULL); } |]
rdEnd = liftIO [C.exp| void { if($(RENDERDOC_API_1_1_2* rdoc_api)) $(RENDERDOC_API_1_1_2* rdoc_api)->EndFrameCapture(NULL, NULL); } |]
_ <- allocate rdBegin (const rdEnd)
#endif
-- Run our application
runV inst phys (pdiGraphicsQueueFamilyIndex pdi) dev allocator
. (`finally` deviceWaitIdle')
$ do
image <- render
let filename = "triangle.png"
sayErr $ "Writing " <> filename
liftIO $ BSL.writeFile filename (JP.encodePng image)
-- | This function renders a triangle and reads the image on the CPU
--
-- It:
-- - Initializes two images
-- - A GPU image which is used as the framebuffer
-- - A CPU image which is copied to and read on the CPU
-- - Creates a RenderPass with a single subpass
-- - Creates a graphics pipeline
-- - Creates command pool and allocated a single command buffer
-- - Uses the command buffer to
-- - Render into the GPU image
-- - Issue a barrier to make it safe to transfer from the GPU image
-- - Issue a barrier to make it safe to write to the CPU image
-- - Perform an image copy
-- - Issue a barrier to make it safe to read the CPU image on the host
-- - Submits and waits for the command buffer to finish executing
-- - Invalidates the CPU image allocation (if it isn't HOST_COHERENT)
-- - Copies the data from the CPU image and returns it
render :: V (JP.Image JP.PixelRGBA8)
render = do
-- Some things to reuse
let imageFormat = FORMAT_R8G8B8A8_UNORM
width = 256
height = 256
-- Create an image to be our render target
let
imageCreateInfo = zero
{ imageType = IMAGE_TYPE_2D
, format = imageFormat
, extent = Extent3D width height 1
, mipLevels = 1
, arrayLayers = 1
, samples = SAMPLE_COUNT_1_BIT
, tiling = IMAGE_TILING_OPTIMAL
, usage = IMAGE_USAGE_COLOR_ATTACHMENT_BIT
.|. IMAGE_USAGE_TRANSFER_SRC_BIT
, initialLayout = IMAGE_LAYOUT_UNDEFINED
}
allocationCreateInfo :: AllocationCreateInfo
allocationCreateInfo = zero { flags = ALLOCATION_CREATE_MAPPED_BIT
, usage = MEMORY_USAGE_GPU_ONLY
}
-- Allocate the image with VMA
(_, (image, _, _)) <- withImage' imageCreateInfo allocationCreateInfo
nameObject' image "GPU side image"
-- Create an image to read on the CPU
let cpuImageCreateInfo = zero { imageType = IMAGE_TYPE_2D
, format = imageFormat
, extent = Extent3D width height 1
, mipLevels = 1
, arrayLayers = 1
, samples = SAMPLE_COUNT_1_BIT
, tiling = IMAGE_TILING_LINEAR
, usage = IMAGE_USAGE_TRANSFER_DST_BIT
, initialLayout = IMAGE_LAYOUT_UNDEFINED
}
cpuAllocationCreateInfo :: AllocationCreateInfo
cpuAllocationCreateInfo = zero { flags = ALLOCATION_CREATE_MAPPED_BIT
, usage = MEMORY_USAGE_GPU_TO_CPU
}
(_, (cpuImage, cpuImageAllocation, cpuImageAllocationInfo)) <- withImage'
cpuImageCreateInfo
cpuAllocationCreateInfo
nameObject' cpuImage "CPU side image"
-- Create an image view
let imageSubresourceRange = ImageSubresourceRange
{ aspectMask = IMAGE_ASPECT_COLOR_BIT
, baseMipLevel = 0
, levelCount = 1
, baseArrayLayer = 0
, layerCount = 1
}
imageViewCreateInfo = zero
{ image = image
, viewType = IMAGE_VIEW_TYPE_2D
, format = imageFormat
, components = ComponentMapping COMPONENT_SWIZZLE_IDENTITY
COMPONENT_SWIZZLE_IDENTITY
COMPONENT_SWIZZLE_IDENTITY
COMPONENT_SWIZZLE_IDENTITY
, subresourceRange = imageSubresourceRange
}
(_, imageView) <- withImageView' imageViewCreateInfo
-- Create a renderpass with a single subpass
let
attachmentDescription :: AttachmentDescription
attachmentDescription = zero
{ format = imageFormat
, samples = SAMPLE_COUNT_1_BIT
, loadOp = ATTACHMENT_LOAD_OP_CLEAR
, storeOp = ATTACHMENT_STORE_OP_STORE
, stencilLoadOp = ATTACHMENT_LOAD_OP_DONT_CARE
, stencilStoreOp = ATTACHMENT_STORE_OP_DONT_CARE
, initialLayout = IMAGE_LAYOUT_UNDEFINED
, finalLayout = IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
}
subpass :: SubpassDescription
subpass = zero
{ pipelineBindPoint = PIPELINE_BIND_POINT_GRAPHICS
, colorAttachments =
[ zero { attachment = 0
, layout = IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
}
]
}
subpassDependency :: SubpassDependency
subpassDependency = zero
{ srcSubpass = SUBPASS_EXTERNAL
, dstSubpass = 0
, srcStageMask = PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT
, srcAccessMask = zero
, dstStageMask = PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT
, dstAccessMask = ACCESS_COLOR_ATTACHMENT_READ_BIT
.|. ACCESS_COLOR_ATTACHMENT_WRITE_BIT
}
(_, renderPass) <- withRenderPass' zero
{ attachments = [attachmentDescription]
, subpasses = [subpass]
, dependencies = [subpassDependency]
}
-- Create a framebuffer
let framebufferCreateInfo :: FramebufferCreateInfo '[]
framebufferCreateInfo = zero { renderPass = renderPass
, attachments = [imageView]
, width = width
, height = height
, layers = 1
}
(_, framebuffer) <- withFramebuffer' framebufferCreateInfo
-- Create the most vanilla rendering pipeline
shaderStages <- createShaders
(_, pipelineLayout) <- withPipelineLayout' zero
let
pipelineCreateInfo :: GraphicsPipelineCreateInfo '[]
pipelineCreateInfo = zero
{ stages = shaderStages
, vertexInputState = Just zero
, inputAssemblyState = Just zero
{ topology = PRIMITIVE_TOPOLOGY_TRIANGLE_LIST
, primitiveRestartEnable = False
}
, viewportState = Just . SomeStruct $ zero
{ viewports =
[ Viewport { x = 0
, y = 0
, width = realToFrac (width :: Word32)
, height = realToFrac (height :: Word32)
, minDepth = 0
, maxDepth = 1
}
]
, scissors = [ Rect2D { offset = Offset2D 0 0
, extent = Extent2D width height
}
]
}
, rasterizationState = SomeStruct $ zero
{ depthClampEnable = False
, rasterizerDiscardEnable = False
, lineWidth = 1
, polygonMode = POLYGON_MODE_FILL
, cullMode = CULL_MODE_NONE
, frontFace = FRONT_FACE_CLOCKWISE
, depthBiasEnable = False
}
, multisampleState = Just . SomeStruct $ zero
{ sampleShadingEnable = False
, rasterizationSamples = SAMPLE_COUNT_1_BIT
, minSampleShading = 1
, sampleMask = [maxBound]
}
, depthStencilState = Nothing
, colorBlendState = Just . SomeStruct $ zero
{ logicOpEnable = False
, attachments = [ zero
{ colorWriteMask =
COLOR_COMPONENT_R_BIT
.|. COLOR_COMPONENT_G_BIT
.|. COLOR_COMPONENT_B_BIT
.|. COLOR_COMPONENT_A_BIT
, blendEnable = False
}
]
}
, dynamicState = Nothing
, layout = pipelineLayout
, renderPass = renderPass
, subpass = 0
, basePipelineHandle = zero
}
(_, (_, [graphicsPipeline])) <- withGraphicsPipelines'
zero
[SomeStruct pipelineCreateInfo]
-- Create a command buffer
graphicsQueueFamilyIndex <- getGraphicsQueueFamilyIndex
let commandPoolCreateInfo :: CommandPoolCreateInfo
commandPoolCreateInfo =
zero { queueFamilyIndex = graphicsQueueFamilyIndex }
(_, commandPool) <- withCommandPool' commandPoolCreateInfo
let commandBufferAllocateInfo = zero { commandPool = commandPool
, level = COMMAND_BUFFER_LEVEL_PRIMARY
, commandBufferCount = 1
}
(_, [commandBuffer]) <- withCommandBuffers' commandBufferAllocateInfo
-- Fill command buffer
--
-- - Execute the renderpass
-- - Transition the images to be able to perform the copy
-- - Copy the image to CPU mapped memory
useCommandBuffer commandBuffer
zero { flags = COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT }
$ do
let renderPassBeginInfo = zero
{ renderPass = renderPass
, framebuffer = framebuffer
, renderArea = Rect2D zero (Extent2D width height)
, clearValues = [Color (Float32 0.1 0.1 0.1 1)]
}
cmdUseRenderPass commandBuffer
renderPassBeginInfo
SUBPASS_CONTENTS_INLINE
$ do
cmdBindPipeline commandBuffer
PIPELINE_BIND_POINT_GRAPHICS
graphicsPipeline
cmdDraw commandBuffer 3 1 0 0
-- Transition render target to transfer source
cmdPipelineBarrier
commandBuffer
PIPELINE_STAGE_ALL_GRAPHICS_BIT
PIPELINE_STAGE_TRANSFER_BIT
zero
[]
[]
[ SomeStruct zero { srcAccessMask = ACCESS_COLOR_ATTACHMENT_WRITE_BIT
, dstAccessMask = ACCESS_TRANSFER_READ_BIT
, oldLayout = IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
, newLayout = IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
, image = image
, subresourceRange = imageSubresourceRange
}
]
-- Transition cpu image to transfer dest
cmdPipelineBarrier
commandBuffer
PIPELINE_STAGE_TOP_OF_PIPE_BIT
PIPELINE_STAGE_TRANSFER_BIT
zero
[]
[]
[ SomeStruct zero { srcAccessMask = zero
, dstAccessMask = ACCESS_TRANSFER_WRITE_BIT
, oldLayout = IMAGE_LAYOUT_UNDEFINED
, newLayout = IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
, image = cpuImage
, subresourceRange = imageSubresourceRange
}
]
-- Copy the image
cmdCopyImage
commandBuffer
image
IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
cpuImage
IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
[ ImageCopy
{ srcSubresource = ImageSubresourceLayers
{ aspectMask = IMAGE_ASPECT_COLOR_BIT
, mipLevel = 0
, baseArrayLayer = 0
, layerCount = 1
}
, srcOffset = Offset3D 0 0 0
, dstSubresource = ImageSubresourceLayers
{ aspectMask = IMAGE_ASPECT_COLOR_BIT
, mipLevel = 0
, baseArrayLayer = 0
, layerCount = 1
}
, dstOffset = Offset3D 0 0 0
, extent = Extent3D width height 1
}
]
-- Transition cpu image to LAYOUT_GENERAL for reading
cmdPipelineBarrier
commandBuffer
PIPELINE_STAGE_TRANSFER_BIT
PIPELINE_STAGE_HOST_BIT
zero
[]
[]
[ SomeStruct zero { srcAccessMask = ACCESS_TRANSFER_WRITE_BIT
, dstAccessMask = ACCESS_HOST_READ_BIT
, oldLayout = IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
, newLayout = IMAGE_LAYOUT_GENERAL
, image = cpuImage
, subresourceRange = imageSubresourceRange
}
]
-- Create a fence so we can know when render is finished
(_, fence) <- withFence' zero
-- Submit the command buffer and wait for it to execute
let submitInfo = zero { waitSemaphores = []
, waitDstStageMask = []
, commandBuffers = [commandBufferHandle commandBuffer]
, signalSemaphores = []
}
graphicsQueue <- getDeviceQueue' graphicsQueueFamilyIndex 0
queueSubmit graphicsQueue [SomeStruct submitInfo] fence
let fenceTimeout = 1e9 -- 1 second
waitForFences' [fence] True fenceTimeout >>= \case
TIMEOUT -> throwString "Timed out waiting for image render and copy"
_ -> pure ()
-- If the cpu image allocation is not HOST_COHERENT this will ensure the
-- changes are present on the CPU.
invalidateAllocation' cpuImageAllocation 0 WHOLE_SIZE
-- Find the image layout and read it into a JuicyPixels Image
cpuImageLayout <- getImageSubresourceLayout'
cpuImage
ImageSubresource { aspectMask = IMAGE_ASPECT_COLOR_BIT
, mipLevel = 0
, arrayLayer = 0
}
let pixelAddr :: Int -> Int -> Ptr Word32
pixelAddr x y = plusPtr
(mappedData cpuImageAllocationInfo)
( fromIntegral (offset (cpuImageLayout :: SubresourceLayout))
+ (y * fromIntegral (rowPitch cpuImageLayout))
+ (x * sizeOf (0 :: Word32))
)
liftIO $ JP.withImage
width
height
(\x y -> JP.unpackPixel @JP.PixelRGBA8 <$> peek (pixelAddr x y))
-- | Create a vertex and fragment shader which render a colored triangle
createShaders :: V (V.Vector (SomeStruct PipelineShaderStageCreateInfo))
createShaders = do
let fragCode = [frag|
#version 450
#extension GL_ARB_separate_shader_objects : enable
layout(location = 0) in vec3 fragColor;
layout(location = 0) out vec4 outColor;
void main() {
outColor = vec4(fragColor, 1.0);
}
|]
vertCode = [vert|
#version 450
#extension GL_ARB_separate_shader_objects : enable
layout(location = 0) out vec3 fragColor;
vec2 positions[3] = vec2[](
vec2(0.0, -0.5),
vec2(0.5, 0.5),
vec2(-0.5, 0.5)
);
vec3 colors[3] = vec3[](
vec3(1.0, 1.0, 0.0),
vec3(0.0, 1.0, 1.0),
vec3(1.0, 0.0, 1.0)
);
void main() {
gl_Position = vec4(positions[gl_VertexIndex], 0.0, 1.0);
fragColor = colors[gl_VertexIndex];
}
|]
(_, fragModule) <- withShaderModule' zero { code = fragCode }
(_, vertModule) <- withShaderModule' zero { code = vertCode }
let vertShaderStageCreateInfo = zero { stage = SHADER_STAGE_VERTEX_BIT
, module' = vertModule
, name = "main"
}
fragShaderStageCreateInfo = zero { stage = SHADER_STAGE_FRAGMENT_BIT
, module' = fragModule
, name = "main"
}
pure
[SomeStruct vertShaderStageCreateInfo, SomeStruct fragShaderStageCreateInfo]
----------------------------------------------------------------
-- Initialization
----------------------------------------------------------------
myApiVersion :: Word32
myApiVersion = API_VERSION_1_0
-- | Create an instance with a debug messenger
createInstance :: MonadResource m => m Instance
createInstance = do
availableExtensionNames <-
toList
. fmap extensionName
. snd
<$> enumerateInstanceExtensionProperties Nothing
availableLayerNames <-
toList . fmap layerName . snd <$> enumerateInstanceLayerProperties
let requiredLayers = []
optionalLayers = ["VK_LAYER_KHRONOS_validation"]
requiredExtensions = [EXT_DEBUG_UTILS_EXTENSION_NAME]
optionalExtensions = [EXT_VALIDATION_FEATURES_EXTENSION_NAME]
extensions <- partitionOptReq "extension"
availableExtensionNames
optionalExtensions
requiredExtensions
layers <- partitionOptReq "layer"
availableLayerNames
optionalLayers
requiredLayers
let debugMessengerCreateInfo = zero
{ messageSeverity = DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT
.|. DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT
, messageType = DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT
.|. DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT
.|. DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT
, pfnUserCallback = debugCallbackPtr
}
instanceCreateInfo =
zero
{ applicationInfo = Just zero { applicationName = Nothing
, apiVersion = myApiVersion
}
, enabledLayerNames = V.fromList layers
, enabledExtensionNames = V.fromList extensions
}
::& debugMessengerCreateInfo
:& ValidationFeaturesEXT
[VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT]
[]
:& ()
(_, inst) <- withInstance' instanceCreateInfo
_ <- withDebugUtilsMessengerEXT inst debugMessengerCreateInfo Nothing allocate
pure inst
createDevice
:: (MonadResource m, MonadThrow m)
=> Instance
-> m (PhysicalDevice, PhysicalDeviceInfo, Device)
createDevice inst = do
(pdi, phys) <- pickPhysicalDevice inst physicalDeviceInfo
sayErr . ("Using device: " <>) =<< physicalDeviceName phys
let deviceCreateInfo = zero
{ queueCreateInfos =
[ SomeStruct zero { queueFamilyIndex = pdiGraphicsQueueFamilyIndex pdi
, queuePriorities = [1]
}
]
}
(_, dev) <- withDevice phys deviceCreateInfo Nothing allocate
pure (phys, pdi, dev)
----------------------------------------------------------------
-- Physical device tools
----------------------------------------------------------------
-- | Get a single PhysicalDevice deciding with a scoring function
pickPhysicalDevice
:: (MonadIO m, MonadThrow m, Ord a)
=> Instance
-> (PhysicalDevice -> m (Maybe a))
-- ^ Some "score" for a PhysicalDevice, Nothing if it is not to be chosen.
-> m (a, PhysicalDevice)
pickPhysicalDevice inst devScore = do
(_, devs) <- enumeratePhysicalDevices inst
scores <- catMaybes
<$> sequence [ fmap (, d) <$> devScore d | d <- toList devs ]
case scores of
[] -> throwString "Unable to find appropriate PhysicalDevice"
_ -> pure (maximumBy (comparing fst) scores)
-- | The Ord instance prioritises devices with more memory
data PhysicalDeviceInfo = PhysicalDeviceInfo
{ pdiTotalMemory :: Word64
, pdiGraphicsQueueFamilyIndex :: Word32
}
deriving (Eq, Ord)
physicalDeviceInfo
:: MonadIO m => PhysicalDevice -> m (Maybe PhysicalDeviceInfo)
physicalDeviceInfo phys = runMaybeT $ do
pdiTotalMemory <- do
heaps <- memoryHeaps <$> getPhysicalDeviceMemoryProperties phys
pure $ sum ((size :: MemoryHeap -> DeviceSize) <$> heaps)
pdiGraphicsQueueFamilyIndex <- do
queueFamilyProperties <- getPhysicalDeviceQueueFamilyProperties phys
let isGraphicsQueue q =
(QUEUE_GRAPHICS_BIT .&&. queueFlags q) && (queueCount q > 0)
graphicsQueueIndices = fromIntegral . fst <$> V.filter
(isGraphicsQueue . snd)
(V.indexed queueFamilyProperties)
MaybeT (pure $ graphicsQueueIndices V.!? 0)
pure PhysicalDeviceInfo { .. }
physicalDeviceName :: MonadIO m => PhysicalDevice -> m Text
physicalDeviceName phys = do
props <- getPhysicalDeviceProperties phys
pure $ decodeUtf8 (deviceName props)
----------------------------------------------------------------
-- Utils
----------------------------------------------------------------
partitionOptReq
:: (Show a, Eq a, MonadIO m) => Text -> [a] -> [a] -> [a] -> m [a]
partitionOptReq type' available optional required = do
let (optHave, optMissing) = partition (`elem` available) optional
(reqHave, reqMissing) = partition (`elem` available) required
tShow = T.pack . show
for_ optMissing
$ \n -> sayErr $ "Missing optional " <> type' <> ": " <> tShow n
case reqMissing of
[] -> pure ()
[x] -> sayErr $ "Missing required " <> type' <> ": " <> tShow x
xs -> sayErr $ "Missing required " <> type' <> "s: " <> tShow xs
pure (reqHave <> optHave)
----------------------------------------------------------------
-- Bit utils
----------------------------------------------------------------
(.&&.) :: Bits a => a -> a -> Bool
x .&&. y = (/= zeroBits) (x .&. y)
|
expipiplus1/vulkan
|
examples/offscreen/Main.hs
|
bsd-3-clause
| 29,433 | 0 | 20 | 10,930 | 4,508 | 2,544 | 1,964 | -1 | -1 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
module Data.Arib.PSI.Internal.Common where
import Control.Applicative
import Control.Monad
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
import Data.Bits
import Data.Word
import Data.Binary.Get
import Data.Typeable
import Data.Tagged
import Data.Arib.CRC
class HasPSIHeader a where
header :: a -> Word64
tableId :: a -> Word8
tableId a = fromIntegral $ shiftR (header a) 56
sectionLength :: a -> Int
sectionLength a = fromIntegral $ shiftR (header a) 40 .&. 0xFFF
versionNumber :: a -> Word8
versionNumber a = fromIntegral $ shiftR (header a) 17 .&. 0x1F
currentNextIndicator :: a -> Bool
currentNextIndicator a = testBit (header a) 16
sectionNumber :: a -> Word8
sectionNumber a = fromIntegral $ shiftR (header a) 8
lastSectionNumber :: a -> Word8
lastSectionNumber a = fromIntegral $ header a
{-# INLINE tableId #-}
{-# INLINE sectionLength #-}
{-# INLINE versionNumber #-}
{-# INLINE currentNextIndicator #-}
{-# INLINE sectionNumber #-}
{-# INLINE lastSectionNumber #-}
checkAndGetHeader :: Get PSIHeader
checkAndGetHeader = {-# SCC "checkAndGetHeader" #-} do
{-# SCC "checkAndGetHeader[check]" #-} lookAhead $ do
[h,u,l] <- S.unpack <$> getByteString 3
let len = shiftL (fromIntegral u .&. 0xf) 8 .|. fromIntegral l
s <- getLazyByteString len
unless (crc32 (h `L.cons` u `L.cons` l `L.cons` s) == 0) $ fail "CRC32 check failed."
{-# SCC "checkAndGetHeader[get]" #-} PSIHeader <$> getWord64be
{-# INLINE checkAndGetHeader #-}
runPsi :: (PSIHeader -> Get a) -> L.ByteString -> [a]
runPsi get = {-# SCC "runPsi" #-} go
where
go s = case runGetOrFail ((checkAndGetHeader >>= get) <* skip 4) s of
Left _ -> []
Right (s',_,p) | L.null s' -> [p]
| L.head s' == 0xFF -> [p]
| otherwise -> p : go s'
{-# INLINE runPsi #-}
newtype PSIHeader = PSIHeader Word64
deriving (Eq, Typeable)
instance Show PSIHeader where
show h = "PSIHeader {tableId = " ++ show (tableId h) ++
", sectionLength = " ++ show (sectionLength h) ++
", versionNumber = " ++ show (versionNumber h) ++
", currentNextIndicator = " ++ show (currentNextIndicator h) ++
", sectionNumber = " ++ show (sectionNumber h) ++
", lastSectionNumber = " ++ show (lastSectionNumber h) ++
"}"
instance HasPSIHeader PSIHeader where
header (PSIHeader h) = h
{-# INLINE header #-}
instance HasPSIHeader Word64 where
header = id
{-# INLINE header #-}
type PSITag a = Tagged a (Int -> Bool)
class (Typeable a, Show a) => PSI a where
getPSI :: PSITag a -> L.ByteString -> [a]
raw :: PSITag L.ByteString
raw = Tagged (const True)
{-# INLINE raw #-}
instance PSI L.ByteString where
getPSI _ = (:[])
{-# INLINE getPSI #-}
class Pretty a where
pretty :: a -> String
class FromBinary a where
type BinaryRep a
fromBinary :: BinaryRep a -> a
|
philopon/arib
|
src/Data/Arib/PSI/Internal/Common.hs
|
bsd-3-clause
| 3,330 | 0 | 20 | 823 | 920 | 485 | 435 | 86 | 2 |
{-# LANGUAGE OverloadedStrings #-}
import Data.Attoparsec.Text
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import HEP.Parser.LHCOAnalysis.Parse
main = do
str <- TIO.readFile "test.lhco"
let str' = str
let r = parseOnly lhco str'
case r of
Left err -> print err
Right lst -> print (length lst)
|
wavewave/LHCOAnalysis
|
test/newtest.hs
|
bsd-3-clause
| 353 | 0 | 12 | 84 | 109 | 57 | 52 | 12 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.