code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module Language.Lua.AST where
import Text.Parsec (SourcePos)
import Language.Lua.Symbol (Intrinsic)
data LuaProgram = LuaProgram FilePath Block deriving(Eq, Ord, Show)
data Block = Block [Statement] (Maybe LastStatement) deriving(Eq, Ord, Show)
data Function = Function SourcePos [Name] Block SourcePos deriving(Eq, Ord, Show)
data Statement =
Assign SourcePos [Exp] [Exp]
|LocalDef SourcePos [Name] [Exp]
|Do SourcePos Block
|If SourcePos Exp Block (Maybe Statement)
|For SourcePos Name Exp Exp (Maybe Exp) Block
|ForEach SourcePos [Name] [Exp] Block
|While SourcePos Exp Block
|Repeat SourcePos Block Exp
|StatementCall FunctionCall
|LocalFunctionDef SourcePos Name Function
|FunctionDef SourcePos Exp Function
|FunctionDefSelf SourcePos Exp Name Function
|EmptyStatement SourcePos
deriving(Eq, Ord, Show)
type Name = (SourcePos, String)
data LastStatement =
Return SourcePos [Exp]
|Break SourcePos
deriving(Eq, Ord, Show)
data FunctionCall =
Call SourcePos Exp [Exp]
|CallSelf SourcePos Exp Name [Exp]
deriving(Eq, Ord, Show)
data Exp =
Literal SourcePos TypedValue
|Fetch Name
|Index SourcePos Exp Exp
|Parens Exp
|Dot SourcePos Exp Name
|ExpCall FunctionCall
|Binop SourcePos Intrinsic Exp Exp
|Unop SourcePos Intrinsic Exp
|Nop
deriving(Eq, Ord, Show)
data TypedValue =
TypedString { typedStringSyntax :: StringSyntax, typedStringValue :: String }
|TypedInt Integer
|TypedReal Double
|TypedBool Bool
|TypedTable [TableItem]
|TypedVararg
|TypedFunction Function
|TypedUserdata
|TypedThread
|TypedNil
deriving(Eq, Ord, Show)
data StringSyntax = DoubleQuoted | SingleQuoted | MultiLined deriving (Eq, Ord, Show)
data TableItem =
TableItemValue SourcePos Exp
|TableItemKeyValue SourcePos String Exp
|TableItemValueValue SourcePos Exp Exp
deriving(Eq, Ord, Show)
data TableItemSyntax = TableItemSyntaxValue | TableItemSyntaxKeyValue | TableItemSyntaxValueValue deriving(Eq, Ord, Show)
-- XXX: uncomfortable
getTableItemSyntax (TableItemValue _ _) = TableItemSyntaxValue
getTableItemSyntax (TableItemKeyValue _ _ _) = TableItemSyntaxKeyValue
getTableItemSyntax (TableItemValueValue _ _ _) = TableItemSyntaxValueValue
| ykst/llint | Language/Lua/AST.hs | mit | 2,282 | 0 | 8 | 421 | 692 | 390 | 302 | 63 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
-- | This module enables debugging all 'ByteString' to 'Text' to 'String' conversions.
-- This is an internal module.
--
-- @since 0.5.67
module B9.Text
( Text,
LazyText,
ByteString,
LazyByteString,
Textual (..),
writeTextFile,
unsafeRenderToText,
unsafeParseFromText,
parseFromTextWithErrorMessage,
encodeAsUtf8LazyByteString,
)
where
import Control.Exception (displayException)
-- import qualified Data.ByteString as Strict
-- import qualified Data.Text.Encoding.Error as Text
import Control.Monad.IO.Class
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy as LazyByteString
import qualified Data.Text as Text
import Data.Text (Text)
import qualified Data.Text.Encoding as Text
import qualified Data.Text.IO as Text
import qualified Data.Text.Lazy as LazyText
import qualified Data.Text.Lazy.Encoding as LazyText
import GHC.Stack
-- | Lazy byte strings.
--
-- A type alias to 'Lazy.ByteString' that can be used everywhere such that
-- references don't need to be qualified with the complete module name everywere.
--
-- @since 0.5.67
type LazyByteString = LazyByteString.ByteString
-- | Lazy texts.
--
-- A type alias to 'LazyText.Text' that can be used everywhere such that
-- references don't need to be qualified with the complete module name everywere.
--
-- @since 0.5.67
type LazyText = LazyText.Text
-- | A class for values that can be converted to/from 'Text'.
--
-- @since 0.5.67
class Textual a where
-- | Convert a 'String' to 'Text'
-- If an error occured, return 'Left' with the error message.
--
-- @since 0.5.67
renderToText :: HasCallStack => a -> Either String Text
-- | Convert a 'Text' to 'String'
--
-- @since 0.5.67
parseFromText :: HasCallStack => Text -> Either String a
instance Textual Text where
renderToText = Right
parseFromText = Right
instance Textual String where
renderToText = Right . Text.pack
parseFromText = Right . Text.unpack
-- | Convert a 'ByteString' with UTF-8 encoded string to 'Text'
--
-- @since 0.5.67
instance Textual ByteString where
renderToText x = case Text.decodeUtf8' x of
Left u ->
Left
( "renderToText of the ByteString failed: "
++ displayException u
++ " "
++ show x
++ "\nat:\n"
++ prettyCallStack callStack
)
Right t -> Right t
parseFromText = Right . Text.encodeUtf8
-- | Convert a 'LazyByteString' with UTF-8 encoded string to 'Text'
--
-- @since 0.5.67
instance Textual LazyByteString where
renderToText x = case LazyText.decodeUtf8' x of
Left u ->
Left
( "renderToText of the LazyByteString failed: "
++ displayException u
++ " "
++ show x
++ "\nat:\n"
++ prettyCallStack callStack
)
Right t -> Right (LazyText.toStrict t)
parseFromText = Right . LazyByteString.fromStrict . Text.encodeUtf8
-- | Render a 'Text' to a file.
--
-- @since 0.5.67
writeTextFile :: (HasCallStack, MonadIO m) => FilePath -> Text -> m ()
writeTextFile f = liftIO . Text.writeFile f
-- | Render a 'Text' via 'renderToText' and throw a runtime exception when rendering fails.
--
-- @since 0.5.67
unsafeRenderToText :: (Textual a, HasCallStack) => a -> Text
unsafeRenderToText = either error id . renderToText
-- | Parse a 'Text' via 'parseFromText' and throw a runtime exception when parsing fails.
--
-- @since 0.5.67
unsafeParseFromText :: (Textual a, HasCallStack) => Text -> a
unsafeParseFromText = either error id . parseFromText
-- | Encode a 'String' as UTF-8 encoded into a 'LazyByteString'.
--
-- @since 0.5.67
encodeAsUtf8LazyByteString :: HasCallStack => String -> LazyByteString
encodeAsUtf8LazyByteString =
LazyByteString.fromStrict . Text.encodeUtf8 . Text.pack
-- | Parse the given 'Text'. \
-- Return @Left errorMessage@ or @Right a@.
--
-- error message.
--
-- @since 0.5.67
parseFromTextWithErrorMessage ::
(HasCallStack, Textual a) =>
-- | An arbitrary string for error messages
String ->
Text ->
Either String a
parseFromTextWithErrorMessage errorMessage b = case parseFromText b of
Left e -> Left (unwords [errorMessage, e])
Right a -> Right a
| sheyll/b9-vm-image-builder | src/lib/B9/Text.hs | mit | 4,306 | 0 | 16 | 901 | 740 | 423 | 317 | -1 | -1 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ApplicativeDo #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE EmptyCase #-}
module Unison.Codebase.Editor.HandleInput
( loop
, loopState0
, LoopState(..)
, currentPath
, parseSearchType
)
where
import Unison.Prelude
-- TODO: Don't import backend
import qualified Unison.Server.Backend as Backend
import Unison.Server.QueryResult
import Unison.Server.Backend (ShallowListEntry(..), TermEntry(..), TypeEntry(..))
import qualified Unison.Codebase.MainTerm as MainTerm
import Unison.Codebase.Editor.Command as Command
import Unison.Codebase.Editor.Input
import Unison.Codebase.Editor.Output
import Unison.Codebase.Editor.DisplayObject
import qualified Unison.Codebase.Editor.Output as Output
import Unison.Codebase.Editor.SlurpResult (SlurpResult(..))
import qualified Unison.Codebase.Editor.SlurpResult as Slurp
import Unison.Codebase.Editor.SlurpComponent (SlurpComponent(..))
import qualified Unison.Codebase.Editor.SlurpComponent as SC
import Unison.Codebase.Editor.RemoteRepo (printNamespace, WriteRemotePath, writeToRead, writePathToRead)
import qualified Unison.CommandLine.InputPattern as InputPattern
import qualified Unison.CommandLine.InputPatterns as InputPatterns
import Control.Lens
import Control.Monad.State ( StateT )
import qualified Control.Monad.State as State
import Control.Monad.Except ( ExceptT(..), runExceptT, withExceptT)
import Data.Bifunctor ( second, first )
import Data.Configurator ()
import qualified Data.Foldable as Foldable
import qualified Data.List as List
import Data.List.Extra ( nubOrd )
import qualified Data.Map as Map
import qualified Data.Text as Text
import qualified Text.Megaparsec as P
import qualified Data.Set as Set
import Data.Sequence ( Seq(..) )
import qualified Unison.ABT as ABT
import qualified Unison.Codebase.BranchDiff as BranchDiff
import qualified Unison.Codebase.Editor.Output.BranchDiff as OBranchDiff
import Unison.Codebase.Branch ( Branch(..)
, Branch0(..)
)
import qualified Unison.Codebase.Branch as Branch
import qualified Unison.Codebase.BranchUtil as BranchUtil
import qualified Unison.Codebase.Causal as Causal
import qualified Unison.Codebase.Editor.Output.DumpNamespace as Output.DN
import qualified Unison.Codebase.Metadata as Metadata
import Unison.Codebase.Patch ( Patch(..) )
import qualified Unison.Codebase.Patch as Patch
import Unison.Codebase.Path ( Path
, Path'(..) )
import qualified Unison.Codebase.Path as Path
import qualified Unison.Codebase.Reflog as Reflog
import Unison.Server.SearchResult ( SearchResult )
import qualified Unison.Server.SearchResult as SR
import qualified Unison.Server.SearchResult' as SR'
import qualified Unison.Codebase.ShortBranchHash as SBH
import qualified Unison.Codebase.SyncMode as SyncMode
import qualified Unison.Builtin.Decls as DD
import qualified Unison.Runtime.IOSource as DD
import qualified Unison.DataDeclaration as DD
import qualified Unison.HashQualified as HQ
import qualified Unison.HashQualified' as HQ'
import qualified Unison.Name as Name
import Unison.Name ( Name )
import Unison.Names3 ( Names(..), Names0
, pattern Names0 )
import qualified Unison.Names2 as Names
import qualified Unison.Names3 as Names3
import Unison.Parser ( Ann(..) )
import Unison.Reference ( Reference(..) )
import qualified Unison.Reference as Reference
import Unison.Referent ( Referent )
import qualified Unison.Referent as Referent
import Unison.Result ( pattern Result )
import qualified Unison.ShortHash as SH
import Unison.Term (Term)
import qualified Unison.Term as Term
import qualified Unison.Type as Type
import qualified Unison.Result as Result
import qualified Unison.UnisonFile as UF
import qualified Unison.Util.Find as Find
import Unison.Util.Free ( Free )
import qualified Unison.Util.Free as Free
import Unison.Util.List ( uniqueBy )
import qualified Unison.Util.Relation as R
import qualified Unison.Util.Relation4 as R4
import U.Util.Timing (unsafeTime)
import Unison.Util.TransitiveClosure (transitiveClosure)
import Unison.Var ( Var )
import qualified Unison.Var as Var
import qualified Unison.Codebase.TypeEdit as TypeEdit
import Unison.Codebase.TermEdit (TermEdit(..))
import qualified Unison.Codebase.TermEdit as TermEdit
import qualified Unison.Typechecker as Typechecker
import qualified Unison.PrettyPrintEnv as PPE
import Unison.Runtime.IOSource ( isTest )
import qualified Unison.Runtime.IOSource as IOSource
import qualified Unison.Util.Monoid as Monoid
import Unison.UnisonFile (TypecheckedUnisonFile)
import qualified Unison.Codebase.Editor.TodoOutput as TO
import qualified Unison.Lexer as L
import qualified Unison.LabeledDependency as LD
import Unison.LabeledDependency (LabeledDependency)
import Unison.Type (Type)
import qualified Unison.Builtin as Builtin
import qualified Unison.Builtin.Terms as Builtin
import Unison.NameSegment (NameSegment(..))
import qualified Unison.NameSegment as NameSegment
import Unison.Codebase.ShortBranchHash (ShortBranchHash)
import qualified Unison.Codebase.Editor.Propagate as Propagate
import qualified Unison.Codebase.Editor.UriParser as UriParser
import Data.Tuple.Extra (uncurry3)
import qualified Unison.CommandLine.DisplayValues as DisplayValues
import qualified Control.Error.Util as ErrorUtil
import Unison.Util.Monoid (intercalateMap)
import qualified Unison.Util.Star3 as Star3
import qualified Unison.Util.Pretty as P
import qualified Unison.Util.Relation as Relation
import Data.List.NonEmpty (NonEmpty)
import qualified Data.List.NonEmpty as Nel
import Unison.Codebase.Editor.AuthorInfo (AuthorInfo(..))
type F m i v = Free (Command m i v)
-- type (Action m i v) a
type Action m i v = MaybeT (StateT (LoopState m v) (F m i v))
data LoopState m v
= LoopState
{ _root :: Branch m
, _lastSavedRoot :: Branch m
-- the current position in the namespace
, _currentPathStack :: NonEmpty Path.Absolute
-- TBD
-- , _activeEdits :: Set Branch.EditGuid
-- The file name last modified, and whether to skip the next file
-- change event for that path (we skip file changes if the file has
-- just been modified programmatically)
, _latestFile :: Maybe (FilePath, SkipNextUpdate)
, _latestTypecheckedFile :: Maybe (UF.TypecheckedUnisonFile v Ann)
-- The previous user input. Used to request confirmation of
-- questionable user commands.
, _lastInput :: Maybe Input
-- A 1-indexed list of strings that can be referenced by index at the
-- CLI prompt. e.g. Given ["Foo.bat", "Foo.cat"],
-- `rename 2 Foo.foo` will rename `Foo.cat` to `Foo.foo`.
, _numberedArgs :: NumberedArgs
}
type SkipNextUpdate = Bool
type InputDescription = Text
makeLenses ''LoopState
-- replacing the old read/write scalar Lens with "peek" Getter for the NonEmpty
currentPath :: Getter (LoopState m v) Path.Absolute
currentPath = currentPathStack . to Nel.head
loopState0 :: Branch m -> Path.Absolute -> LoopState m v
loopState0 b p = LoopState b b (pure p) Nothing Nothing Nothing []
type Action' m v = Action m (Either Event Input) v
defaultPatchNameSegment :: NameSegment
defaultPatchNameSegment = "patch"
prettyPrintEnvDecl :: Names -> Action' m v PPE.PrettyPrintEnvDecl
prettyPrintEnvDecl ns = eval CodebaseHashLength <&> (`PPE.fromNamesDecl` ns)
loop :: forall m v . (Monad m, Var v) => Action m (Either Event Input) v ()
loop = do
uf <- use latestTypecheckedFile
root' <- use root
currentPath' <- use currentPath
latestFile' <- use latestFile
currentBranch' <- getAt currentPath'
e <- eval Input
hqLength <- eval CodebaseHashLength
sbhLength <- eval BranchHashLength
let
currentPath'' = Path.unabsolute currentPath'
hqNameQuery q = eval $ HQNameQuery (Just currentPath'') root' q
sbh = SBH.fromHash sbhLength
root0 = Branch.head root'
currentBranch0 = Branch.head currentBranch'
defaultPatchPath :: PatchPath
defaultPatchPath = (Path' $ Left currentPath', defaultPatchNameSegment)
resolveSplit' :: (Path', a) -> (Path, a)
resolveSplit' = Path.fromAbsoluteSplit . Path.toAbsoluteSplit currentPath'
resolveToAbsolute :: Path' -> Path.Absolute
resolveToAbsolute = Path.resolve currentPath'
getAtSplit :: Path.Split -> Maybe (Branch m)
getAtSplit p = BranchUtil.getBranch p root0
getAtSplit' :: Path.Split' -> Maybe (Branch m)
getAtSplit' = getAtSplit . resolveSplit'
getPatchAtSplit' :: Path.Split' -> Action' m v (Maybe Patch)
getPatchAtSplit' s = do
let (p, seg) = Path.toAbsoluteSplit currentPath' s
b <- getAt p
eval . Eval $ Branch.getMaybePatch seg (Branch.head b)
getHQ'TermsIncludingHistorical p =
getTermsIncludingHistorical (resolveSplit' p) root0
getHQ'Terms :: Path.HQSplit' -> Set Referent
getHQ'Terms p = BranchUtil.getTerm (resolveSplit' p) root0
getHQ'Types :: Path.HQSplit' -> Set Reference
getHQ'Types p = BranchUtil.getType (resolveSplit' p) root0
getHQTerms :: HQ.HashQualified Name -> Action' m v (Set Referent)
getHQTerms hq = case hq of
HQ.NameOnly n -> let
-- absolute-ify the name, then lookup in deepTerms of root
path :: Path.Path'
path = Path.fromName' n
Path.Absolute absPath = resolveToAbsolute path
in pure $ R.lookupRan (Path.toName absPath) (Branch.deepTerms root0)
HQ.HashOnly sh -> hashOnly sh
HQ.HashQualified _ sh -> hashOnly sh
where
hashOnly sh = eval $ TermReferentsByShortHash sh
basicPrettyPrintNames0 =
Backend.basicPrettyPrintNames0 root' (Path.unabsolute currentPath')
resolveHHQS'Types :: HashOrHQSplit' -> Action' m v (Set Reference)
resolveHHQS'Types = either
(eval . TypeReferencesByShortHash)
(pure . getHQ'Types)
-- Term Refs and Cons
resolveHHQS'Referents = either
(eval . TermReferentsByShortHash)
(pure . getHQ'Terms)
getTypes :: Path.Split' -> Set Reference
getTypes = getHQ'Types . fmap HQ'.NameOnly
getTerms :: Path.Split' -> Set Referent
getTerms = getHQ'Terms . fmap HQ'.NameOnly
getPatchAt :: Path.Split' -> Action' m v Patch
getPatchAt patchPath' = do
let (p, seg) = Path.toAbsoluteSplit currentPath' patchPath'
b <- getAt p
eval . Eval $ Branch.getPatch seg (Branch.head b)
withFile ambient sourceName lexed@(text, tokens) k = do
let
getHQ = \case
L.Backticks s (Just sh) ->
Just (HQ.HashQualified (Name.unsafeFromString s) sh)
L.WordyId s (Just sh) ->
Just (HQ.HashQualified (Name.unsafeFromString s) sh)
L.SymbolyId s (Just sh) ->
Just (HQ.HashQualified (Name.unsafeFromString s) sh)
L.Hash sh -> Just (HQ.HashOnly sh)
_ -> Nothing
hqs = Set.fromList . mapMaybe (getHQ . L.payload) $ tokens
let parseNames = Backend.getCurrentParseNames currentPath'' root'
latestFile .= Just (Text.unpack sourceName, False)
latestTypecheckedFile .= Nothing
Result notes r <- eval $ Typecheck ambient parseNames sourceName lexed
case r of
-- Parsing failed
Nothing -> respond $
ParseErrors text [ err | Result.Parsing err <- toList notes ]
Just (Left errNames) -> do
ns <- makeShadowedPrintNamesFromHQ hqs errNames
ppe <- suffixifiedPPE ns
let tes = [ err | Result.TypeError err <- toList notes ]
cbs = [ bug
| Result.CompilerBug (Result.TypecheckerBug bug)
<- toList notes
]
when (not $ null tes) . respond $ TypeErrors text ppe tes
when (not $ null cbs) . respond $ CompilerBugs text ppe cbs
Just (Right uf) -> k uf
loadUnisonFile sourceName text = do
let lexed = L.lexer (Text.unpack sourceName) (Text.unpack text)
withFile [] sourceName (text, lexed) $ \unisonFile -> do
sr <- toSlurpResult currentPath' unisonFile <$> slurpResultNames0
names <- displayNames unisonFile
pped <- prettyPrintEnvDecl names
let ppe = PPE.suffixifiedPPE pped
eval . Notify $ Typechecked sourceName ppe sr unisonFile
unlessError' EvaluationFailure do
(bindings, e) <- ExceptT . eval . Evaluate ppe $ unisonFile
lift do
let e' = Map.map go e
go (ann, kind, _hash, _uneval, eval, isHit) = (ann, kind, eval, isHit)
unless (null e') $
eval . Notify $ Evaluated text ppe bindings e'
latestTypecheckedFile .= Just unisonFile
case e of
Left (IncomingRootBranch hashes) ->
eval . Notify $ WarnIncomingRootBranch
(SBH.fromHash sbhLength $ Branch.headHash root')
(Set.map (SBH.fromHash sbhLength) hashes)
Left (UnisonFileChanged sourceName text) ->
-- We skip this update if it was programmatically generated
if maybe False snd latestFile'
then modifying latestFile (fmap (const False) <$>)
else loadUnisonFile sourceName text
Right input ->
let
ifConfirmed = ifM (confirmedCommand input)
branchNotFound = respond . BranchNotFound
branchNotFound' = respond . BranchNotFound . Path.unsplit'
patchNotFound :: Path.Split' -> Action' m v ()
patchNotFound s = respond $ PatchNotFound s
patchExists :: Path.Split' -> Action' m v ()
patchExists s = respond $ PatchAlreadyExists s
typeNotFound = respond . TypeNotFound
typeNotFound' = respond . TypeNotFound'
termNotFound = respond . TermNotFound
termNotFound' = respond . TermNotFound'
nameConflicted src tms tys = respond (DeleteNameAmbiguous hqLength src tms tys)
typeConflicted src = nameConflicted src Set.empty
termConflicted src tms = nameConflicted src tms Set.empty
hashConflicted src = respond . HashAmbiguous src
typeReferences :: [SearchResult] -> [Reference]
typeReferences rs
= [ r | SR.Tp (SR.TypeResult _ r _) <- rs ]
termReferences :: [SearchResult] -> [Reference]
termReferences rs =
[ r | SR.Tm (SR.TermResult _ (Referent.Ref r) _) <- rs ]
termResults rs = [ r | SR.Tm r <- rs ]
typeResults rs = [ r | SR.Tp r <- rs ]
doRemoveReplacement from patchPath isTerm = do
let patchPath' = fromMaybe defaultPatchPath patchPath
patch <- getPatchAt patchPath'
QueryResult misses' hits <- hqNameQuery [from]
let tpRefs = Set.fromList $ typeReferences hits
tmRefs = Set.fromList $ termReferences hits
misses = Set.difference (Set.fromList misses') if isTerm
then Set.fromList $ HQ'.toHQ . SR.termName <$> termResults hits
else Set.fromList $ HQ'.toHQ . SR.typeName <$> typeResults hits
go :: Reference -> Action m (Either Event Input) v ()
go fr = do
let termPatch =
over Patch.termEdits (R.deleteDom fr) patch
typePatch =
over Patch.typeEdits (R.deleteDom fr) patch
(patchPath'', patchName) = resolveSplit' patchPath'
-- Save the modified patch
stepAtM inputDescription
(patchPath'',
Branch.modifyPatches
patchName
(const (if isTerm then termPatch else typePatch)))
-- Say something
success
unless (Set.null misses) $
respond $ SearchTermsNotFound (Set.toList misses)
traverse_ go (if isTerm then tmRefs else tpRefs)
branchExists dest _x = respond $ BranchAlreadyExists dest
branchExistsSplit = branchExists . Path.unsplit'
typeExists dest = respond . TypeAlreadyExists dest
termExists dest = respond . TermAlreadyExists dest
-- | try to get these as close as possible to the command that caused the change
inputDescription :: InputDescription
inputDescription = case input of
ForkLocalBranchI src dest -> "fork " <> hp' src <> " " <> p' dest
MergeLocalBranchI src dest mode -> case mode of
Branch.RegularMerge -> "merge " <> p' src <> " " <> p' dest
Branch.SquashMerge -> "merge.squash " <> p' src <> " " <> p' dest
ResetRootI src -> "reset-root " <> hp' src
AliasTermI src dest -> "alias.term " <> hhqs' src <> " " <> ps' dest
AliasTypeI src dest -> "alias.type " <> hhqs' src <> " " <> ps' dest
AliasManyI srcs dest ->
"alias.many " <> intercalateMap " " hqs srcs <> " " <> p' dest
MoveTermI src dest -> "move.term " <> hqs' src <> " " <> ps' dest
MoveTypeI src dest -> "move.type " <> hqs' src <> " " <> ps' dest
MoveBranchI src dest -> "move.namespace " <> ops' src <> " " <> ps' dest
MovePatchI src dest -> "move.patch " <> ps' src <> " " <> ps' dest
CopyPatchI src dest -> "copy.patch " <> ps' src <> " " <> ps' dest
DeleteI thing -> "delete " <> hqs' thing
DeleteTermI def -> "delete.term " <> hqs' def
DeleteTypeI def -> "delete.type " <> hqs' def
DeleteBranchI opath -> "delete.namespace " <> ops' opath
DeletePatchI path -> "delete.patch " <> ps' path
ReplaceI src target p ->
"replace " <> HQ.toText src <> " "
<> HQ.toText target <> " "
<> opatch p
ResolveTermNameI path -> "resolve.termName " <> hqs' path
ResolveTypeNameI path -> "resolve.typeName " <> hqs' path
AddI _selection -> "add"
UpdateI p _selection -> "update " <> opatch p
PropagatePatchI p scope -> "patch " <> ps' p <> " " <> p' scope
UndoI{} -> "undo"
UiI -> "ui"
ExecuteI s -> "execute " <> Text.pack s
IOTestI hq -> "io.test " <> HQ.toText hq
LinkI md defs ->
"link " <> HQ.toText md <> " " <> intercalateMap " " hqs' defs
UnlinkI md defs ->
"unlink " <> HQ.toText md <> " " <> intercalateMap " " hqs' defs
UpdateBuiltinsI -> "builtins.update"
MergeBuiltinsI -> "builtins.merge"
MergeIOBuiltinsI -> "builtins.mergeio"
PullRemoteBranchI orepo dest _syncMode ->
(Text.pack . InputPattern.patternName
$ InputPatterns.patternFromInput input)
<> " "
-- todo: show the actual config-loaded namespace
<> maybe "(remote namespace from .unisonConfig)"
(uncurry3 printNamespace) orepo
<> " "
<> p' dest
LoadI{} -> wat
PreviewAddI{} -> wat
PreviewUpdateI{} -> wat
CreateAuthorI (NameSegment id) name -> "create.author " <> id <> " " <> name
CreatePullRequestI{} -> wat
LoadPullRequestI base head dest ->
"pr.load "
<> uncurry3 printNamespace base
<> " "
<> uncurry3 printNamespace head
<> " "
<> p' dest
PushRemoteBranchI{} -> wat
PreviewMergeLocalBranchI{} -> wat
DiffNamespaceI{} -> wat
SwitchBranchI{} -> wat
PopBranchI{} -> wat
NamesI{} -> wat
TodoI{} -> wat
ListEditsI{} -> wat
ListDependenciesI{} -> wat
ListDependentsI{} -> wat
HistoryI{} -> wat
TestI{} -> wat
LinksI{} -> wat
SearchByNameI{} -> wat
FindShallowI{} -> wat
FindPatchI{} -> wat
ShowDefinitionI{} -> wat
DisplayI{} -> wat
DocsI{} -> wat
ShowDefinitionByPrefixI{} -> wat
ShowReflogI{} -> wat
DebugNumberedArgsI{} -> wat
DebugBranchHistoryI{} -> wat
DebugTypecheckedUnisonFileI{} -> wat
DebugDumpNamespacesI{} -> wat
DebugDumpNamespaceSimpleI{} -> wat
DebugClearWatchI {} -> wat
QuitI{} -> wat
DeprecateTermI{} -> undefined
DeprecateTypeI{} -> undefined
RemoveTermReplacementI src p ->
"delete.term-replacement" <> HQ.toText src <> " " <> opatch p
RemoveTypeReplacementI src p ->
"delete.type-replacement" <> HQ.toText src <> " " <> opatch p
where
hp' = either (Text.pack . show) p'
p' = Text.pack . show . resolveToAbsolute
ops' = maybe "." ps'
opatch = ps' . fromMaybe defaultPatchPath
wat = error $ show input ++ " is not expected to alter the branch"
hhqs' (Left sh) = SH.toText sh
hhqs' (Right x) = hqs' x
hqs' (p, hq) =
Monoid.unlessM (Path.isRoot' p) (p' p) <> "." <> Text.pack (show hq)
hqs (p, hq) = hqs' (Path' . Right . Path.Relative $ p, hq)
ps' = p' . Path.unsplit'
stepAt = Unison.Codebase.Editor.HandleInput.stepAt inputDescription
stepManyAt = Unison.Codebase.Editor.HandleInput.stepManyAt inputDescription
stepManyAtNoSync =
Unison.Codebase.Editor.HandleInput.stepManyAtNoSync
updateRoot = flip Unison.Codebase.Editor.HandleInput.updateRoot inputDescription
syncRoot = use root >>= updateRoot
updateAtM = Unison.Codebase.Editor.HandleInput.updateAtM inputDescription
unlessGitError = unlessError' (Output.GitError input)
importRemoteBranch ns mode = ExceptT . eval $ ImportRemoteBranch ns mode
viewRemoteBranch ns = ExceptT . eval $ ViewRemoteBranch ns
syncRemoteRootBranch repo b mode =
ExceptT . eval $ SyncRemoteRootBranch repo b mode
loadSearchResults = eval . LoadSearchResults
handleFailedDelete failed failedDependents = do
failed <- loadSearchResults $ SR.fromNames failed
failedDependents <- loadSearchResults $ SR.fromNames failedDependents
ppe <- fqnPPE =<< makePrintNamesFromLabeled'
(foldMap SR'.labeledDependencies $ failed <> failedDependents)
respond $ CantDelete ppe failed failedDependents
saveAndApplyPatch patchPath'' patchName patch' = do
stepAtM (inputDescription <> " (1/2)")
(patchPath'',
Branch.modifyPatches patchName (const patch'))
-- Apply the modified patch to the current path
-- since we might be able to propagate further.
void $ propagatePatch inputDescription patch' currentPath'
-- Say something
success
previewResponse sourceName sr uf = do
names <- displayNames uf
ppe <- PPE.suffixifiedPPE <$> prettyPrintEnvDecl names
respond $ Typechecked (Text.pack sourceName) ppe sr uf
addDefaultMetadata
:: SlurpComponent v
-> Action m (Either Event Input) v ()
addDefaultMetadata adds = do
let addedVs = Set.toList $ SC.types adds <> SC.terms adds
addedNs = traverse (Path.hqSplitFromName' . Name.fromVar) addedVs
case addedNs of
Nothing ->
error $ "I couldn't parse a name I just added to the codebase! "
<> "-- Added names: " <> show addedVs
Just addedNames -> do
dm <- resolveDefaultMetadata currentPath'
case toList dm of
[] -> pure ()
dm' -> do
let hqs = traverse InputPatterns.parseHashQualifiedName dm'
case hqs of
Left e -> respond $ ConfiguredMetadataParseError
(Path.absoluteToPath' currentPath')
(show dm')
e
Right defaultMeta ->
manageLinks True addedNames defaultMeta Metadata.insert
-- Add/remove links between definitions and metadata.
-- `silent` controls whether this produces any output to the user.
-- `srcs` is (names of the) definitions to pass to `op`
-- `mdValues` is (names of the) metadata to pass to `op`
-- `op` is the operation to add/remove/alter metadata mappings.
-- e.g. `Metadata.insert` is passed to add metadata links.
manageLinks :: Bool
-> [(Path', HQ'.HQSegment)]
-> [HQ.HashQualified Name]
-> (forall r. Ord r
=> (r, Metadata.Type, Metadata.Value)
-> Branch.Star r NameSegment
-> Branch.Star r NameSegment)
-> Action m (Either Event Input) v ()
manageLinks silent srcs mdValues op = do
mdValuels <- fmap (first toList) <$>
traverse (\x -> fmap (,x) (getHQTerms x)) mdValues
before <- Branch.head <$> use root
traverse_ go mdValuels
after <- Branch.head <$> use root
(ppe, outputDiff) <- diffHelper before after
if not silent then
if OBranchDiff.isEmpty outputDiff
then respond NoOp
else respondNumbered $ ShowDiffNamespace Path.absoluteEmpty
Path.absoluteEmpty
ppe
outputDiff
else unless (OBranchDiff.isEmpty outputDiff) $
respond DefaultMetadataNotification
where
go (mdl, hqn) = do
newRoot <- use root
let r0 = Branch.head newRoot
getTerms p = BranchUtil.getTerm (resolveSplit' p) r0
getTypes p = BranchUtil.getType (resolveSplit' p) r0
!srcle = toList . getTerms =<< srcs
!srclt = toList . getTypes =<< srcs
ppe = Backend.basicSuffixifiedNames
sbhLength
newRoot
(Path.unabsolute currentPath')
case mdl of
[r@(Referent.Ref mdValue)] -> do
mdType <- eval $ LoadTypeOfTerm mdValue
case mdType of
Nothing -> respond $ MetadataMissingType ppe r
Just ty -> do
let steps =
bimap (Path.unabsolute . resolveToAbsolute)
(const . step $ Type.toReference ty)
<$> srcs
stepManyAtNoSync steps
where
step mdType b0 =
let tmUpdates terms = foldl' go terms srcle
where go terms src = op (src, mdType, mdValue) terms
tyUpdates types = foldl' go types srclt
where go types src = op (src, mdType, mdValue) types
in over Branch.terms tmUpdates . over Branch.types tyUpdates $ b0
mdValues -> respond $ MetadataAmbiguous hqn ppe mdValues
delete
:: (Path.HQSplit' -> Set Referent) -- compute matching terms
-> (Path.HQSplit' -> Set Reference) -- compute matching types
-> Path.HQSplit'
-> Action' m v ()
delete getHQ'Terms getHQ'Types hq = do
let matchingTerms = toList (getHQ'Terms hq)
let matchingTypes = toList (getHQ'Types hq)
case (matchingTerms, matchingTypes) of
([], []) -> respond (NameNotFound hq)
(Set.fromList -> tms, Set.fromList -> tys) -> goMany tms tys
where
resolvedPath = resolveSplit' (HQ'.toName <$> hq)
goMany tms tys = do
let rootNames = Branch.toNames0 root0
name = Path.toName (Path.unsplit resolvedPath)
toRel :: Ord ref => Set ref -> R.Relation Name ref
toRel = R.fromList . fmap (name,) . toList
-- these names are relative to the root
toDelete = Names0 (toRel tms) (toRel tys)
(failed, failedDependents) <-
getEndangeredDependents (eval . GetDependents) toDelete rootNames
if failed == mempty then do
let makeDeleteTermNames = fmap (BranchUtil.makeDeleteTermName resolvedPath) . toList $ tms
let makeDeleteTypeNames = fmap (BranchUtil.makeDeleteTypeName resolvedPath) . toList $ tys
stepManyAt (makeDeleteTermNames ++ makeDeleteTypeNames)
root'' <- use root
diffHelper (Branch.head root') (Branch.head root'') >>=
respondNumbered . uncurry ShowDiffAfterDeleteDefinitions
else handleFailedDelete failed failedDependents
displayI outputLoc hq = do
uf <- use latestTypecheckedFile >>= addWatch (HQ.toString hq)
case uf of
Nothing -> do
let parseNames0 = (`Names3.Names` mempty) basicPrettyPrintNames0
results = Names3.lookupHQTerm hq parseNames0
if Set.null results then
respond $ SearchTermsNotFound [hq]
else if Set.size results > 1 then
respond $ TermAmbiguous hq results
-- ... but use the unsuffixed names for display
else do
let tm = Term.fromReferent External $ Set.findMin results
pped <- prettyPrintEnvDecl parseNames0
tm <- eval $ Evaluate1 (PPE.suffixifiedPPE pped) True tm
case tm of
Left e -> respond (EvaluationFailure e)
Right tm -> doDisplay outputLoc parseNames0 (Term.unannotate tm)
Just (toDisplay, unisonFile) -> do
ppe <- executePPE unisonFile
unlessError' EvaluationFailure do
evalResult <- ExceptT . eval . Evaluate ppe $ unisonFile
case Command.lookupEvalResult toDisplay evalResult of
Nothing -> error $ "Evaluation dropped a watch expression: " <> HQ.toString hq
Just tm -> lift do
ns <- displayNames unisonFile
doDisplay outputLoc ns tm
in case input of
ShowReflogI -> do
entries <- convertEntries Nothing [] <$> eval LoadReflog
numberedArgs .=
fmap (('#':) . SBH.toString . Output.hash) entries
respond $ ShowReflog entries
where
-- reverses & formats entries, adds synthetic entries when there is a
-- discontinuity in the reflog.
convertEntries :: Maybe Branch.Hash
-> [Output.ReflogEntry]
-> [Reflog.Entry]
-> [Output.ReflogEntry]
convertEntries _ acc [] = acc
convertEntries Nothing acc entries@(Reflog.Entry old _ _ : _) =
convertEntries
(Just old)
(Output.ReflogEntry (SBH.fromHash sbhLength old) "(initial reflogged namespace)" : acc)
entries
convertEntries (Just lastHash) acc entries@(Reflog.Entry old new reason : rest) =
if lastHash /= old then
convertEntries
(Just old)
(Output.ReflogEntry (SBH.fromHash sbhLength old) "(external change)" : acc)
entries
else
convertEntries
(Just new)
(Output.ReflogEntry (SBH.fromHash sbhLength new) reason : acc)
rest
ResetRootI src0 ->
case src0 of
Left hash -> unlessError do
newRoot <- resolveShortBranchHash hash
lift do
updateRoot newRoot
success
Right path' -> do
newRoot <- getAt $ resolveToAbsolute path'
if Branch.isEmpty newRoot then respond $ BranchNotFound path'
else do
updateRoot newRoot
success
ForkLocalBranchI src0 dest0 -> do
let tryUpdateDest srcb dest0 = do
let dest = resolveToAbsolute dest0
-- if dest isn't empty: leave dest unchanged, and complain.
destb <- getAt dest
if Branch.isEmpty destb then do
ok <- updateAtM dest (const $ pure srcb)
if ok then success else respond $ BranchEmpty src0
else respond $ BranchAlreadyExists dest0
case src0 of
Left hash -> unlessError do
srcb <- resolveShortBranchHash hash
lift $ tryUpdateDest srcb dest0
Right path' -> do
srcb <- getAt $ resolveToAbsolute path'
if Branch.isEmpty srcb then respond $ BranchNotFound path'
else tryUpdateDest srcb dest0
MergeLocalBranchI src0 dest0 mergeMode -> do
let [src, dest] = resolveToAbsolute <$> [src0, dest0]
srcb <- getAt src
if Branch.isEmpty srcb then branchNotFound src0
else do
let err = Just $ MergeAlreadyUpToDate src0 dest0
mergeBranchAndPropagateDefaultPatch mergeMode inputDescription err srcb (Just dest0) dest
PreviewMergeLocalBranchI src0 dest0 -> do
let [src, dest] = resolveToAbsolute <$> [src0, dest0]
srcb <- getAt src
if Branch.isEmpty srcb then branchNotFound src0
else do
destb <- getAt dest
merged <- eval $ Merge Branch.RegularMerge srcb destb
if merged == destb
then respond (PreviewMergeAlreadyUpToDate src0 dest0)
else
diffHelper (Branch.head destb) (Branch.head merged) >>=
respondNumbered . uncurry (ShowDiffAfterMergePreview dest0 dest)
DiffNamespaceI before0 after0 -> do
let [beforep, afterp] =
resolveToAbsolute <$> [before0, after0]
before <- Branch.head <$> getAt beforep
after <- Branch.head <$> getAt afterp
(ppe, outputDiff) <- diffHelper before after
respondNumbered $ ShowDiffNamespace beforep afterp ppe outputDiff
CreatePullRequestI baseRepo headRepo -> unlessGitError do
(cleanupBase, baseBranch) <- viewRemoteBranch baseRepo
(cleanupHead, headBranch) <- viewRemoteBranch headRepo
lift do
merged <- eval $ Merge Branch.RegularMerge baseBranch headBranch
(ppe, diff) <- diffHelper (Branch.head baseBranch) (Branch.head merged)
respondNumbered $ ShowDiffAfterCreatePR baseRepo headRepo ppe diff
eval . Eval $ do
cleanupBase
cleanupHead
LoadPullRequestI baseRepo headRepo dest0 -> do
let desta = resolveToAbsolute dest0
let dest = Path.unabsolute desta
destb <- getAt desta
if Branch.isEmpty0 (Branch.head destb) then unlessGitError do
baseb <- importRemoteBranch baseRepo SyncMode.ShortCircuit
headb <- importRemoteBranch headRepo SyncMode.ShortCircuit
lift $ do
mergedb <- eval $ Merge Branch.RegularMerge baseb headb
squashedb <- eval $ Merge Branch.SquashMerge headb baseb
stepManyAt
[BranchUtil.makeSetBranch (dest, "base") baseb
,BranchUtil.makeSetBranch (dest, "head") headb
,BranchUtil.makeSetBranch (dest, "merged") mergedb
,BranchUtil.makeSetBranch (dest, "squashed") squashedb]
let base = snoc dest0 "base"
head = snoc dest0 "head"
merged = snoc dest0 "merged"
squashed = snoc dest0 "squashed"
respond $ LoadPullRequest baseRepo headRepo base head merged squashed
loadPropagateDiffDefaultPatch
inputDescription
(Just merged)
(snoc desta "merged")
else
respond . BranchNotEmpty . Path.Path' . Left $ currentPath'
-- move the root to a sub-branch
MoveBranchI Nothing dest -> do
b <- use root
stepManyAt [ (Path.empty, const Branch.empty0)
, BranchUtil.makeSetBranch (resolveSplit' dest) b ]
success
MoveBranchI (Just src) dest ->
maybe (branchNotFound' src) srcOk (getAtSplit' src)
where
srcOk b = maybe (destOk b) (branchExistsSplit dest) (getAtSplit' dest)
destOk b = do
stepManyAt
[ BranchUtil.makeSetBranch (resolveSplit' src) Branch.empty
, BranchUtil.makeSetBranch (resolveSplit' dest) b ]
success -- could give rando stats about new defns
MovePatchI src dest -> do
psrc <- getPatchAtSplit' src
pdest <- getPatchAtSplit' dest
case (psrc, pdest) of
(Nothing, _) -> patchNotFound src
(_, Just _) -> patchExists dest
(Just p, Nothing) -> do
stepManyAt [
BranchUtil.makeDeletePatch (resolveSplit' src),
BranchUtil.makeReplacePatch (resolveSplit' dest) p ]
success
CopyPatchI src dest -> do
psrc <- getPatchAtSplit' src
pdest <- getPatchAtSplit' dest
case (psrc, pdest) of
(Nothing, _) -> patchNotFound src
(_, Just _) -> patchExists dest
(Just p, Nothing) -> do
stepAt (BranchUtil.makeReplacePatch (resolveSplit' dest) p)
success
DeletePatchI src -> do
psrc <- getPatchAtSplit' src
case psrc of
Nothing -> patchNotFound src
Just _ -> do
stepAt (BranchUtil.makeDeletePatch (resolveSplit' src))
success
DeleteBranchI Nothing ->
ifConfirmed
(do
stepAt (Path.empty, const Branch.empty0)
respond DeletedEverything)
(respond DeleteEverythingConfirmation)
DeleteBranchI (Just p) ->
maybe (branchNotFound' p) go $ getAtSplit' p
where
go (Branch.head -> b) = do
(failed, failedDependents) <-
let rootNames = Branch.toNames0 root0
toDelete = Names.prefix0
(Path.toName . Path.unsplit . resolveSplit' $ p) -- resolveSplit' incorporates currentPath
(Branch.toNames0 b)
in getEndangeredDependents (eval . GetDependents) toDelete rootNames
if failed == mempty then do
stepAt $ BranchUtil.makeSetBranch (resolveSplit' p) Branch.empty
-- Looks similar to the 'toDelete' above... investigate me! ;)
diffHelper b Branch.empty0 >>=
respondNumbered
. uncurry (ShowDiffAfterDeleteBranch
$ resolveToAbsolute (Path.unsplit' p))
else handleFailedDelete failed failedDependents
SwitchBranchI path' -> do
let path = resolveToAbsolute path'
currentPathStack %= Nel.cons path
branch' <- getAt path
when (Branch.isEmpty branch') (respond $ CreatedNewBranch path)
PopBranchI -> use (currentPathStack . to Nel.uncons) >>= \case
(_, Nothing) -> respond StartOfCurrentPathHistory
(_, Just t) -> currentPathStack .= t
HistoryI resultsCap diffCap from -> case from of
Left hash -> unlessError do
b <- resolveShortBranchHash hash
lift $ doHistory 0 b []
Right path' -> do
let path = resolveToAbsolute path'
branch' <- getAt path
if Branch.isEmpty branch' then respond $ CreatedNewBranch path
else doHistory 0 branch' []
where
doHistory !n b acc =
if maybe False (n >=) resultsCap then
respond $ History diffCap acc (PageEnd (sbh $ Branch.headHash b) n)
else case Branch._history b of
Causal.One{} ->
respond $ History diffCap acc (EndOfLog . sbh $ Branch.headHash b)
Causal.Merge{..} ->
respond $ History diffCap acc (MergeTail (sbh $ Branch.headHash b) . map sbh $ Map.keys tails)
Causal.Cons{..} -> do
b' <- fmap Branch.Branch . eval . Eval $ snd tail
let elem = (sbh $ Branch.headHash b, Branch.namesDiff b' b)
doHistory (n+1) b' (elem : acc)
UndoI -> do
prev <- eval . Eval $ Branch.uncons root'
case prev of
Nothing ->
respond . CantUndo $ if Branch.isOne root' then CantUndoPastStart
else CantUndoPastMerge
Just (_, prev) -> do
updateRoot prev
diffHelper (Branch.head prev) (Branch.head root') >>=
respondNumbered . uncurry Output.ShowDiffAfterUndo
UiI -> eval UI
AliasTermI src dest -> do
referents <- resolveHHQS'Referents src
case (toList referents, toList (getTerms dest)) of
([r], []) -> do
stepAt (BranchUtil.makeAddTermName (resolveSplit' dest) r (oldMD r))
success
([_], rs@(_:_)) -> termExists dest (Set.fromList rs)
([], _) -> either termNotFound' termNotFound src
(rs, _) ->
either hashConflicted termConflicted src (Set.fromList rs)
where
oldMD r = either (const mempty)
(\src ->
let p = resolveSplit' src in
BranchUtil.getTermMetadataAt p r root0)
src
AliasTypeI src dest -> do
refs <- resolveHHQS'Types src
case (toList refs, toList (getTypes dest)) of
([r], []) -> do
stepAt (BranchUtil.makeAddTypeName (resolveSplit' dest) r (oldMD r))
success
([_], rs@(_:_)) -> typeExists dest (Set.fromList rs)
([], _) -> either typeNotFound' typeNotFound src
(rs, _) ->
either
(\src -> hashConflicted src . Set.map Referent.Ref)
typeConflicted
src
(Set.fromList rs)
where
oldMD r =
either (const mempty)
(\src ->
let p = resolveSplit' src in
BranchUtil.getTypeMetadataAt p r root0)
src
-- this implementation will happily produce name conflicts,
-- but will surface them in a normal diff at the end of the operation.
AliasManyI srcs dest' -> do
let destAbs = resolveToAbsolute dest'
old <- getAt destAbs
let (unknown, actions) = foldl' go mempty srcs
stepManyAt actions
new <- getAt destAbs
diffHelper (Branch.head old) (Branch.head new) >>=
respondNumbered . uncurry (ShowDiffAfterModifyBranch dest' destAbs)
unless (null unknown) $
respond . SearchTermsNotFound . fmap fixupOutput $ unknown
where
-- a list of missing sources (if any) and the actions that do the work
go :: ([Path.HQSplit], [(Path, Branch0 m -> Branch0 m)])
-> Path.HQSplit
-> ([Path.HQSplit], [(Path, Branch0 m -> Branch0 m)])
go (missingSrcs, actions) hqsrc =
let
src :: Path.Split
src = second HQ'.toName hqsrc
proposedDest :: Path.Split
proposedDest = second HQ'.toName hqProposedDest
hqProposedDest :: Path.HQSplit
hqProposedDest = first Path.unabsolute $
Path.resolve (resolveToAbsolute dest') hqsrc
-- `Nothing` if src doesn't exist
doType :: Maybe [(Path, Branch0 m -> Branch0 m)]
doType = case ( BranchUtil.getType hqsrc currentBranch0
, BranchUtil.getType hqProposedDest root0
) of
(null -> True, _) -> Nothing -- missing src
(rsrcs, existing) -> -- happy path
Just . map addAlias . toList $ Set.difference rsrcs existing
where
addAlias r = BranchUtil.makeAddTypeName proposedDest r (oldMD r)
oldMD r = BranchUtil.getTypeMetadataAt src r currentBranch0
doTerm :: Maybe [(Path, Branch0 m -> Branch0 m)]
doTerm = case ( BranchUtil.getTerm hqsrc currentBranch0
, BranchUtil.getTerm hqProposedDest root0
) of
(null -> True, _) -> Nothing -- missing src
(rsrcs, existing) ->
Just . map addAlias . toList $ Set.difference rsrcs existing
where
addAlias r = BranchUtil.makeAddTermName proposedDest r (oldMD r)
oldMD r = BranchUtil.getTermMetadataAt src r currentBranch0
in case (doType, doTerm) of
(Nothing, Nothing) -> (missingSrcs :> hqsrc, actions)
(Just as, Nothing) -> (missingSrcs, actions ++ as)
(Nothing, Just as) -> (missingSrcs, actions ++ as)
(Just as1, Just as2) -> (missingSrcs, actions ++ as1 ++ as2)
fixupOutput :: Path.HQSplit -> HQ.HashQualified Name
fixupOutput = fmap Path.toName . HQ'.toHQ . Path.unsplitHQ
NamesI thing -> do
ns0 <- basicParseNames0
let ns = Names ns0 mempty
terms = Names3.lookupHQTerm thing ns
types = Names3.lookupHQType thing ns
printNames = Names basicPrettyPrintNames0 mempty
terms' :: Set (Referent, Set (HQ'.HashQualified Name))
terms' = Set.map go terms where
go r = (r, Names3.termName hqLength r printNames)
types' :: Set (Reference, Set (HQ'.HashQualified Name))
types' = Set.map go types where
go r = (r, Names3.typeName hqLength r printNames)
respond $ ListNames hqLength (toList types') (toList terms')
LinkI mdValue srcs -> do
manageLinks False srcs [mdValue] Metadata.insert
syncRoot
UnlinkI mdValue srcs -> do
manageLinks False srcs [mdValue] Metadata.delete
syncRoot
-- > links List.map (.Docs .English)
-- > links List.map -- give me all the
-- > links Optional License
LinksI src mdTypeStr -> unlessError do
(ppe, out) <- getLinks input src (Right mdTypeStr)
lift do
numberedArgs .= fmap (HQ.toString . view _1) out
respond $ ListOfLinks ppe out
DocsI src -> fileByName where
{- Given `docs foo`, we look for docs in 3 places, in this order:
(fileByName) First check the file for `foo.doc`, and if found do `display foo.doc`
(codebaseByMetadata) Next check for doc metadata linked to `foo` in the codebase
(codebaseByName) Lastly check for `foo.doc` in the codebase and if found do `display foo.doc`
-}
hq :: HQ.HashQualified Name
hq = let
hq' :: HQ'.HashQualified Name
hq' = Name.convert @Path.Path' @Name <$> Name.convert src
in Name.convert hq'
dotDoc :: HQ.HashQualified Name
dotDoc = hq <&> \n -> Name.joinDot n "doc"
fileByName = do
ns <- maybe mempty UF.typecheckedToNames0 <$> use latestTypecheckedFile
fnames <- pure $ Names3.Names ns mempty
case Names3.lookupHQTerm dotDoc fnames of
s | Set.size s == 1 -> do
-- the displayI command expects full term names, so we resolve
-- the hash back to its full name in the file
fname' <- pure $ Names3.longestTermName 10 (Set.findMin s) fnames
displayI ConsoleLocation fname'
_ -> codebaseByMetadata
codebaseByMetadata = unlessError do
(ppe, out) <- getLinks input src (Left $ Set.fromList [DD.docRef, DD.doc2Ref])
lift case out of
[] -> codebaseByName
[(_name, ref, _tm)] -> do
len <- eval BranchHashLength
let names = Names3.Names basicPrettyPrintNames0 mempty
let tm = Term.ref External ref
tm <- eval $ Evaluate1 (PPE.fromNames len names) True tm
case tm of
Left e -> respond (EvaluationFailure e)
Right tm -> doDisplay ConsoleLocation names (Term.unannotate tm)
out -> do
numberedArgs .= fmap (HQ.toString . view _1) out
respond $ ListOfLinks ppe out
codebaseByName = do
parseNames <- basicParseNames0
case Names3.lookupHQTerm dotDoc (Names3.Names parseNames mempty) of
s | Set.size s == 1 -> displayI ConsoleLocation dotDoc
| Set.size s == 0 -> respond $ ListOfLinks mempty []
| otherwise -> -- todo: return a list of links here too
respond $ ListOfLinks mempty []
CreateAuthorI authorNameSegment authorFullName -> do
initialBranch <- getAt currentPath'
AuthorInfo
guid@(guidRef, _, _)
author@(authorRef, _, _)
copyrightHolder@(copyrightHolderRef, _, _) <-
eval $ CreateAuthorInfo authorFullName
-- add the new definitions to the codebase and to the namespace
traverse_ (eval . uncurry3 PutTerm) [guid, author, copyrightHolder]
stepManyAt
[ BranchUtil.makeAddTermName (resolveSplit' authorPath) (d authorRef) mempty
, BranchUtil.makeAddTermName (resolveSplit' copyrightHolderPath) (d copyrightHolderRef) mempty
, BranchUtil.makeAddTermName (resolveSplit' guidPath) (d guidRef) mempty
]
finalBranch <- getAt currentPath'
-- print some output
diffHelper (Branch.head initialBranch) (Branch.head finalBranch) >>=
respondNumbered
. uncurry (ShowDiffAfterCreateAuthor
authorNameSegment
(Path.unsplit' base)
currentPath')
where
d :: Reference.Id -> Referent
d = Referent.Ref . Reference.DerivedId
base :: Path.Split' = (Path.relativeEmpty', "metadata")
authorPath = base |> "authors" |> authorNameSegment
copyrightHolderPath = base |> "copyrightHolders" |> authorNameSegment
guidPath = authorPath |> "guid"
MoveTermI src dest ->
case (toList (getHQ'Terms src), toList (getTerms dest)) of
([r], []) -> do
stepManyAt
[ BranchUtil.makeDeleteTermName p r
, BranchUtil.makeAddTermName (resolveSplit' dest) r (mdSrc r)]
success
([_], rs) -> termExists dest (Set.fromList rs)
([], _) -> termNotFound src
(rs, _) -> termConflicted src (Set.fromList rs)
where p = resolveSplit' (HQ'.toName <$> src)
mdSrc r = BranchUtil.getTermMetadataAt p r root0
MoveTypeI src dest ->
case (toList (getHQ'Types src), toList (getTypes dest)) of
([r], []) -> do
stepManyAt
[ BranchUtil.makeDeleteTypeName p r
, BranchUtil.makeAddTypeName (resolveSplit' dest) r (mdSrc r) ]
success
([_], rs) -> typeExists dest (Set.fromList rs)
([], _) -> typeNotFound src
(rs, _) -> typeConflicted src (Set.fromList rs)
where
p = resolveSplit' (HQ'.toName <$> src)
mdSrc r = BranchUtil.getTypeMetadataAt p r root0
DeleteI hq -> delete getHQ'Terms getHQ'Types hq
DeleteTypeI hq -> delete (const Set.empty) getHQ'Types hq
DeleteTermI hq -> delete getHQ'Terms (const Set.empty) hq
DisplayI outputLoc hq -> displayI outputLoc hq
ShowDefinitionI outputLoc query -> do
res <- eval $ GetDefinitionsBySuffixes (Just currentPath'') root' query
case res of
Left e -> handleBackendError e
Right (Backend.DefinitionResults terms types misses) -> do
let loc = case outputLoc of
ConsoleLocation -> Nothing
FileLocation path -> Just path
LatestFileLocation ->
fmap fst latestFile' <|> Just "scratch.u"
printNames =
Backend.getCurrentPrettyNames currentPath'' root'
ppe = PPE.fromNamesDecl hqLength printNames
unless (null types && null terms) $
eval . Notify $
DisplayDefinitions loc ppe types terms
unless (null misses) $
eval . Notify $ SearchTermsNotFound misses
-- We set latestFile to be programmatically generated, if we
-- are viewing these definitions to a file - this will skip the
-- next update for that file (which will happen immediately)
latestFile .= ((, True) <$> loc)
FindPatchI -> do
let patches =
[ Path.toName $ Path.snoc p seg
| (p, b) <- Branch.toList0 currentBranch0
, (seg, _) <- Map.toList (Branch._edits b) ]
respond $ ListOfPatches $ Set.fromList patches
numberedArgs .= fmap Name.toString patches
FindShallowI pathArg -> do
let pathArgAbs = resolveToAbsolute pathArg
ppe = Backend.basicSuffixifiedNames
sbhLength
root'
(Path.fromPath' pathArg)
res <- eval $ FindShallow pathArgAbs
case res of
Left e -> handleBackendError e
Right entries -> do
-- caching the result as an absolute path, for easier jumping around
numberedArgs .= fmap entryToHQString entries
respond $ ListShallow ppe entries
where
entryToHQString :: ShallowListEntry v Ann -> String
entryToHQString e =
fixup $ case e of
ShallowTypeEntry (TypeEntry _ hq _) -> HQ'.toString hq
ShallowTermEntry (TermEntry _ hq _ _) -> HQ'.toString hq
ShallowBranchEntry ns _ _ -> NameSegment.toString ns
ShallowPatchEntry ns -> NameSegment.toString ns
where
fixup s = case pathArgStr of
"" -> s
p | last p == '.' -> p ++ s
p -> p ++ "." ++ s
pathArgStr = show pathArg
SearchByNameI isVerbose _showAll ws -> do
let prettyPrintNames0 = basicPrettyPrintNames0
unlessError do
results <- case ws of
-- no query, list everything
[] -> pure . listBranch $ Branch.head currentBranch'
-- type query
":" : ws ->
ExceptT (parseSearchType input (unwords ws)) >>= \typ ->
ExceptT $ do
let named = Branch.deepReferents root0
matches <- fmap toList . eval $ GetTermsOfType typ
matches <- filter (`Set.member` named) <$>
if null matches then do
respond NoExactTypeMatches
fmap toList . eval $ GetTermsMentioningType typ
else pure matches
let results =
-- in verbose mode, aliases are shown, so we collapse all
-- aliases to a single search result; in non-verbose mode,
-- a separate result may be shown for each alias
(if isVerbose then uniqueBy SR.toReferent else id) $
searchResultsFor prettyPrintNames0 matches []
pure . pure $ results
-- name query
(map HQ.unsafeFromString -> qs) -> do
let ns = basicPrettyPrintNames0
let srs = searchBranchScored ns fuzzyNameDistance qs
pure $ uniqueBy SR.toReferent srs
lift do
numberedArgs .= fmap searchResultToHQString results
results' <- loadSearchResults results
ppe <- suffixifiedPPE =<<
makePrintNamesFromLabeled'
(foldMap SR'.labeledDependencies results')
respond $ ListOfDefinitions ppe isVerbose results'
ResolveTypeNameI hq ->
zeroOneOrMore (getHQ'Types hq) (typeNotFound hq) go (typeConflicted hq)
where
conflicted = getHQ'Types (fmap HQ'.toNameOnly hq)
makeDelete =
BranchUtil.makeDeleteTypeName (resolveSplit' (HQ'.toName <$> hq))
go r = stepManyAt . fmap makeDelete . toList . Set.delete r $ conflicted
ResolveTermNameI hq -> do
refs <- getHQ'TermsIncludingHistorical hq
zeroOneOrMore refs (termNotFound hq) go (termConflicted hq)
where
conflicted = getHQ'Terms (fmap HQ'.toNameOnly hq)
makeDelete =
BranchUtil.makeDeleteTermName (resolveSplit' (HQ'.toName <$> hq))
go r = stepManyAt . fmap makeDelete . toList . Set.delete r $ conflicted
ReplaceI from to patchPath -> do
let patchPath' = fromMaybe defaultPatchPath patchPath
patch <- getPatchAt patchPath'
QueryResult fromMisses' fromHits <- hqNameQuery [from]
QueryResult toMisses' toHits <- hqNameQuery [to]
let termsFromRefs = termReferences fromHits
termsToRefs = termReferences toHits
typesFromRefs = typeReferences fromHits
typesToRefs = typeReferences toHits
--- Here are all the kinds of misses
--- [X] [X]
--- [Type] [Term]
--- [Term] [Type]
--- [Type] [X]
--- [Term] [X]
--- [X] [Type]
--- [X] [Term]
-- Type hits are term misses
termFromMisses = fromMisses'
<> (HQ'.toHQ . SR.typeName <$> typeResults fromHits)
termToMisses = toMisses'
<> (HQ'.toHQ . SR.typeName <$> typeResults toHits)
-- Term hits are type misses
typeFromMisses = fromMisses'
<> (HQ'.toHQ . SR.termName <$> termResults fromHits)
typeToMisses = toMisses'
<> (HQ'.toHQ . SR.termName <$> termResults toHits)
termMisses = termFromMisses <> termToMisses
typeMisses = typeFromMisses <> typeToMisses
replaceTerms :: Reference
-> Reference
-> Action m (Either Event Input) v ()
replaceTerms fr tr = do
mft <- eval $ LoadTypeOfTerm fr
mtt <- eval $ LoadTypeOfTerm tr
let termNotFound = respond . TermNotFound'
. SH.take hqLength
. Reference.toShortHash
case (mft, mtt) of
(Nothing, _) -> termNotFound fr
(_, Nothing) -> termNotFound tr
(Just ft, Just tt) -> do
let
patch' =
-- The modified patch
over Patch.termEdits
(R.insert fr (Replace tr (TermEdit.typing tt ft))
. R.deleteDom fr)
patch
(patchPath'', patchName) = resolveSplit' patchPath'
saveAndApplyPatch patchPath'' patchName patch'
replaceTypes :: Reference
-> Reference
-> Action m (Either Event Input) v ()
replaceTypes fr tr = do
let patch' =
-- The modified patch
over Patch.typeEdits
(R.insert fr (TypeEdit.Replace tr) . R.deleteDom fr) patch
(patchPath'', patchName) = resolveSplit' patchPath'
saveAndApplyPatch patchPath'' patchName patch'
ambiguous t rs =
let rs' = Set.map Referent.Ref $ Set.fromList rs
in case t of
HQ.HashOnly h ->
hashConflicted h rs'
(Path.parseHQSplit' . HQ.toString -> Right n) ->
termConflicted n rs'
_ -> respond . BadName $ HQ.toString t
mismatch typeName termName = respond $ TypeTermMismatch typeName termName
case (termsFromRefs, termsToRefs, typesFromRefs, typesToRefs) of
([], [], [], []) -> respond $ SearchTermsNotFound termMisses
([_], [], [], [_]) -> mismatch to from
([], [_], [_], []) -> mismatch from to
([_], [], _, _) -> respond $ SearchTermsNotFound termMisses
([], [_], _, _) -> respond $ SearchTermsNotFound termMisses
(_, _, [_], []) -> respond $ SearchTermsNotFound typeMisses
(_, _, [], [_]) -> respond $ SearchTermsNotFound typeMisses
([fr], [tr], [], []) -> replaceTerms fr tr
([], [], [fr], [tr]) -> replaceTypes fr tr
(froms, [_], [], []) -> ambiguous from froms
([], [], froms, [_]) -> ambiguous from froms
([_], tos, [], []) -> ambiguous to tos
([], [], [_], tos) -> ambiguous to tos
(_, _, _, _) -> error "unpossible"
LoadI maybePath ->
case maybePath <|> (fst <$> latestFile') of
Nothing -> respond NoUnisonFile
Just path -> do
res <- eval . LoadSource . Text.pack $ path
case res of
InvalidSourceNameError -> respond $ InvalidSourceName path
LoadError -> respond $ SourceLoadFailed path
LoadSuccess contents -> loadUnisonFile (Text.pack path) contents
AddI hqs -> case uf of
Nothing -> respond NoUnisonFile
Just uf -> do
sr <- Slurp.disallowUpdates
. applySelection hqs uf
. toSlurpResult currentPath' uf
<$> slurpResultNames0
let adds = Slurp.adds sr
when (Slurp.isNonempty sr) $ do
stepAtNoSync ( Path.unabsolute currentPath'
, doSlurpAdds adds uf)
eval . AddDefsToCodebase . filterBySlurpResult sr $ uf
ppe <- prettyPrintEnvDecl =<< displayNames uf
respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) sr
addDefaultMetadata adds
syncRoot
PreviewAddI hqs -> case (latestFile', uf) of
(Just (sourceName, _), Just uf) -> do
sr <- Slurp.disallowUpdates
. applySelection hqs uf
. toSlurpResult currentPath' uf
<$> slurpResultNames0
previewResponse sourceName sr uf
_ -> respond NoUnisonFile
UpdateI maybePatchPath hqs -> case uf of
Nothing -> respond NoUnisonFile
Just uf -> do
let patchPath = fromMaybe defaultPatchPath maybePatchPath
slurpCheckNames0 <- slurpResultNames0
currentPathNames0 <- currentPathNames0
let sr = applySelection hqs uf
. toSlurpResult currentPath' uf
$ slurpCheckNames0
addsAndUpdates = Slurp.updates sr <> Slurp.adds sr
fileNames0 = UF.typecheckedToNames0 uf
-- todo: display some error if typeEdits or termEdits itself contains a loop
typeEdits :: Map Name (Reference, Reference)
typeEdits = Map.fromList $ map f (toList $ SC.types (updates sr)) where
f v = case (toList (Names.typesNamed slurpCheckNames0 n)
,toList (Names.typesNamed fileNames0 n)) of
([old],[new]) -> (n, (old, new))
_ -> error $ "Expected unique matches for "
++ Var.nameStr v ++ " but got: "
++ show otherwise
where n = Name.fromVar v
hashTerms :: Map Reference (Type v Ann)
hashTerms = Map.fromList (toList hashTerms0) where
hashTerms0 = (\(r, _, typ) -> (r, typ)) <$> UF.hashTerms uf
termEdits :: Map Name (Reference, Reference)
termEdits = Map.fromList $ map g (toList $ SC.terms (updates sr)) where
g v = case ( toList (Names.refTermsNamed slurpCheckNames0 n)
, toList (Names.refTermsNamed fileNames0 n)) of
([old], [new]) -> (n, (old, new))
_ -> error $ "Expected unique matches for "
++ Var.nameStr v ++ " but got: "
++ show otherwise
where n = Name.fromVar v
termDeprecations :: [(Name, Referent)]
termDeprecations =
[ (n, r) | (oldTypeRef,_) <- Map.elems typeEdits
, (n, r) <- Names3.constructorsForType0 oldTypeRef currentPathNames0 ]
ye'ol'Patch <- getPatchAt patchPath
-- If `uf` updates a -> a', we want to replace all (a0 -> a) in patch
-- with (a0 -> a') in patch'.
-- So for all (a0 -> a) in patch, for all (a -> a') in `uf`,
-- we must know the type of a0, a, a'.
let
-- we need:
-- all of the `old` references from the `new` edits,
-- plus all of the `old` references for edits from patch we're replacing
collectOldForTyping :: [(Reference, Reference)] -> Patch -> Set Reference
collectOldForTyping new old = foldl' f mempty (new ++ fromOld) where
f acc (r, _r') = Set.insert r acc
newLHS = Set.fromList . fmap fst $ new
fromOld :: [(Reference, Reference)]
fromOld = [ (r,r') | (r, TermEdit.Replace r' _) <- R.toList . Patch._termEdits $ old
, Set.member r' newLHS ]
neededTypes = collectOldForTyping (toList termEdits) ye'ol'Patch
allTypes :: Map Reference (Type v Ann) <-
fmap Map.fromList . for (toList neededTypes) $ \r ->
(r,) . fromMaybe (Type.builtin External "unknown type")
<$> (eval . LoadTypeOfTerm) r
let typing r1 r2 = case (Map.lookup r1 allTypes, Map.lookup r2 hashTerms) of
(Just t1, Just t2)
| Typechecker.isEqual t1 t2 -> TermEdit.Same
| Typechecker.isSubtype t1 t2 -> TermEdit.Subtype
| otherwise -> TermEdit.Different
e -> error $ "compiler bug: typing map not constructed properly\n" <>
"typing " <> show r1 <> " " <> show r2 <> " : " <> show e
let updatePatch :: Patch -> Patch
updatePatch p = foldl' step2 p' termEdits
where
p' = foldl' step1 p typeEdits
step1 p (r,r') = Patch.updateType r (TypeEdit.Replace r') p
step2 p (r,r') = Patch.updateTerm typing r (TermEdit.Replace r' (typing r r')) p
(p, seg) = Path.toAbsoluteSplit currentPath' patchPath
updatePatches :: Branch0 m -> m (Branch0 m)
updatePatches = Branch.modifyPatches seg updatePatch
when (Slurp.isNonempty sr) $ do
-- take a look at the `updates` from the SlurpResult
-- and make a patch diff to record a replacement from the old to new references
stepManyAtMNoSync
[( Path.unabsolute currentPath'
, pure . doSlurpUpdates typeEdits termEdits termDeprecations)
,( Path.unabsolute currentPath'
, pure . doSlurpAdds addsAndUpdates uf)
,( Path.unabsolute p, updatePatches )]
eval . AddDefsToCodebase . filterBySlurpResult sr $ uf
ppe <- prettyPrintEnvDecl =<< displayNames uf
respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) sr
-- propagatePatch prints TodoOutput
void $ propagatePatchNoSync (updatePatch ye'ol'Patch) currentPath'
addDefaultMetadata addsAndUpdates
syncRoot
PreviewUpdateI hqs -> case (latestFile', uf) of
(Just (sourceName, _), Just uf) -> do
sr <- applySelection hqs uf
. toSlurpResult currentPath' uf
<$> slurpResultNames0
previewResponse sourceName sr uf
_ -> respond NoUnisonFile
TodoI patchPath branchPath' -> do
patch <- getPatchAt (fromMaybe defaultPatchPath patchPath)
doShowTodoOutput patch $ resolveToAbsolute branchPath'
TestI showOk showFail -> do
let
testTerms = Map.keys . R4.d1 . uncurry R4.selectD34 isTest
. Branch.deepTermMetadata $ currentBranch0
testRefs = Set.fromList [ r | Referent.Ref r <- toList testTerms ]
oks results =
[ (r, msg)
| (r, Term.List' ts) <- Map.toList results
, Term.App' (Term.Constructor' ref cid) (Term.Text' msg) <- toList ts
, cid == DD.okConstructorId && ref == DD.testResultRef ]
fails results =
[ (r, msg)
| (r, Term.List' ts) <- Map.toList results
, Term.App' (Term.Constructor' ref cid) (Term.Text' msg) <- toList ts
, cid == DD.failConstructorId && ref == DD.testResultRef ]
cachedTests <- fmap Map.fromList . eval $ LoadWatches UF.TestWatch testRefs
let stats = Output.CachedTests (Set.size testRefs) (Map.size cachedTests)
names <- makePrintNamesFromLabeled' $
LD.referents testTerms <>
LD.referents [ DD.okConstructorReferent, DD.failConstructorReferent ]
ppe <- fqnPPE names
respond $ TestResults stats ppe showOk showFail
(oks cachedTests) (fails cachedTests)
let toCompute = Set.difference testRefs (Map.keysSet cachedTests)
unless (Set.null toCompute) $ do
let total = Set.size toCompute
computedTests <- fmap join . for (toList toCompute `zip` [1..]) $ \(r,n) ->
case r of
Reference.DerivedId rid -> do
tm <- eval $ LoadTerm rid
case tm of
Nothing -> [] <$ respond (TermNotFound' . SH.take hqLength . Reference.toShortHash $ Reference.DerivedId rid)
Just tm -> do
respond $ TestIncrementalOutputStart ppe (n,total) r tm
-- v don't cache; test cache populated below
tm' <- eval $ Evaluate1 ppe False tm
case tm' of
Left e -> respond (EvaluationFailure e) $> []
Right tm' -> do
-- After evaluation, cache the result of the test
eval $ PutWatch UF.TestWatch rid tm'
respond $ TestIncrementalOutputEnd ppe (n,total) r tm'
pure [(r, tm')]
r -> error $ "unpossible, tests can't be builtins: " <> show r
let m = Map.fromList computedTests
respond $ TestResults Output.NewlyComputed ppe showOk showFail (oks m) (fails m)
-- ListBranchesI ->
-- eval ListBranches >>= respond . ListOfBranches currentBranchName'
-- DeleteBranchI branchNames -> withBranches branchNames $ \bnbs -> do
-- uniqueToDelete <- prettyUniqueDefinitions bnbs
-- let deleteBranches b =
-- traverse (eval . DeleteBranch) b >> respond (Success input)
-- if (currentBranchName' `elem` branchNames)
-- then respond DeletingCurrentBranch
-- else if null uniqueToDelete
-- then deleteBranches branchNames
-- else ifM (confirmedCommand input)
-- (deleteBranches branchNames)
-- (respond . DeleteBranchConfirmation $ uniqueToDelete)
PropagatePatchI patchPath scopePath -> do
patch <- getPatchAt patchPath
updated <- propagatePatch inputDescription patch (resolveToAbsolute scopePath)
unless updated (respond $ NothingToPatch patchPath scopePath)
ExecuteI main -> addRunMain main uf >>= \case
NoTermWithThatName -> do
ppe <- suffixifiedPPE (Names3.Names basicPrettyPrintNames0 mempty)
mainType <- eval RuntimeMain
respond $ NoMainFunction main ppe [mainType]
TermHasBadType ty -> do
ppe <- suffixifiedPPE (Names3.Names basicPrettyPrintNames0 mempty)
mainType <- eval RuntimeMain
respond $ BadMainFunction main ty ppe [mainType]
RunMainSuccess unisonFile -> do
ppe <- executePPE unisonFile
e <- eval $ Execute ppe unisonFile
case e of
Left e -> respond $ EvaluationFailure e
Right _ -> pure () -- TODO
IOTestI main -> do
-- todo - allow this to run tests from scratch file, using addRunMain
testType <- eval RuntimeTest
parseNames <- (`Names3.Names` mempty) <$> basicPrettyPrintNames0A
ppe <- suffixifiedPPE parseNames
-- use suffixed names for resolving the argument to display
let
oks results =
[ (r, msg)
| (r, Term.List' ts) <- results
, Term.App' (Term.Constructor' ref cid) (Term.Text' msg) <- toList ts
, cid == DD.okConstructorId && ref == DD.testResultRef ]
fails results =
[ (r, msg)
| (r, Term.List' ts) <- results
, Term.App' (Term.Constructor' ref cid) (Term.Text' msg) <- toList ts
, cid == DD.failConstructorId && ref == DD.testResultRef ]
results = Names3.lookupHQTerm main parseNames in
case toList results of
[Referent.Ref ref] -> do
typ <- loadTypeOfTerm (Referent.Ref ref)
case typ of
Just typ | Typechecker.isSubtype typ testType -> do
let a = ABT.annotation tm
tm = DD.forceTerm a a (Term.ref a ref) in do
-- v Don't cache IO tests
tm' <- eval $ Evaluate1 ppe False tm
case tm' of
Left e -> respond (EvaluationFailure e)
Right tm' ->
respond $ TestResults Output.NewlyComputed ppe True True (oks [(ref, tm')]) (fails [(ref, tm')])
_ -> respond $ NoMainFunction (HQ.toString main) ppe [testType]
_ -> respond $ NoMainFunction (HQ.toString main) ppe [testType]
-- UpdateBuiltinsI -> do
-- stepAt updateBuiltins
-- checkTodo
MergeBuiltinsI -> do
-- these were added once, but maybe they've changed and need to be
-- added again.
let uf = UF.typecheckedUnisonFile (Map.fromList Builtin.builtinDataDecls)
(Map.fromList Builtin.builtinEffectDecls)
[Builtin.builtinTermsSrc Intrinsic]
mempty
eval $ AddDefsToCodebase uf
-- add the names; note, there are more names than definitions
-- due to builtin terms; so we don't just reuse `uf` above.
let srcb = BranchUtil.fromNames0 Builtin.names0
_ <- updateAtM (currentPath' `snoc` "builtin") $ \destb ->
eval $ Merge Branch.RegularMerge srcb destb
success
MergeIOBuiltinsI -> do
-- these were added once, but maybe they've changed and need to be
-- added again.
let uf = UF.typecheckedUnisonFile (Map.fromList Builtin.builtinDataDecls)
(Map.fromList Builtin.builtinEffectDecls)
[Builtin.builtinTermsSrc Intrinsic]
mempty
eval $ AddDefsToCodebase uf
-- these have not necessarily been added yet
eval $ AddDefsToCodebase IOSource.typecheckedFile'
-- add the names; note, there are more names than definitions
-- due to builtin terms; so we don't just reuse `uf` above.
let names0 = Builtin.names0
<> UF.typecheckedToNames0 @v IOSource.typecheckedFile'
let srcb = BranchUtil.fromNames0 names0
_ <- updateAtM (currentPath' `snoc` "builtin") $ \destb ->
eval $ Merge Branch.RegularMerge srcb destb
success
ListEditsI maybePath -> do
let (p, seg) =
maybe (Path.toAbsoluteSplit currentPath' defaultPatchPath)
(Path.toAbsoluteSplit currentPath')
maybePath
patch <- eval . Eval . Branch.getPatch seg . Branch.head =<< getAt p
ppe <- suffixifiedPPE =<<
makePrintNamesFromLabeled' (Patch.labeledDependencies patch)
respond $ ListEdits patch ppe
PullRemoteBranchI mayRepo path syncMode -> unlessError do
ns <- maybe (writePathToRead <$> resolveConfiguredGitUrl Pull path) pure mayRepo
lift $ unlessGitError do
b <- importRemoteBranch ns syncMode
let msg = Just $ PullAlreadyUpToDate ns path
let destAbs = resolveToAbsolute path
lift $ mergeBranchAndPropagateDefaultPatch Branch.RegularMerge inputDescription msg b (Just path) destAbs
PushRemoteBranchI mayRepo path syncMode -> do
let srcAbs = resolveToAbsolute path
srcb <- getAt srcAbs
unlessError do
(repo, remotePath) <- maybe (resolveConfiguredGitUrl Push path) pure mayRepo
lift $ unlessGitError do
(cleanup, remoteRoot) <- unsafeTime "Push viewRemoteBranch" $
viewRemoteBranch (writeToRead repo, Nothing, Path.empty)
-- We don't merge `srcb` with the remote namespace, `r`, we just
-- replace it. The push will be rejected if this rewinds time
-- or misses any new updates in `r` that aren't in `srcb` already.
let newRemoteRoot = Branch.modifyAt remotePath (const srcb) remoteRoot
unsafeTime "Push syncRemoteRootBranch" $
syncRemoteRootBranch repo newRemoteRoot syncMode
lift . eval $ Eval cleanup
lift $ respond Success
ListDependentsI hq -> -- todo: add flag to handle transitive efficiently
resolveHQToLabeledDependencies hq >>= \lds ->
if null lds
then respond $ LabeledReferenceNotFound hq
else for_ lds $ \ld -> do
dependents <- let
tp r = eval $ GetDependents r
tm (Referent.Ref r) = eval $ GetDependents r
tm (Referent.Con r _i _ct) = eval $ GetDependents r
in LD.fold tp tm ld
(missing, names0) <- eval . Eval $ Branch.findHistoricalRefs' dependents root'
let types = R.toList $ Names3.types0 names0
let terms = fmap (second Referent.toReference) $ R.toList $ Names.terms names0
let names = types <> terms
numberedArgs .= fmap (Text.unpack . Reference.toText) ((fmap snd names) <> toList missing)
respond $ ListDependents hqLength ld names missing
ListDependenciesI hq -> -- todo: add flag to handle transitive efficiently
resolveHQToLabeledDependencies hq >>= \lds ->
if null lds
then respond $ LabeledReferenceNotFound hq
else for_ lds $ \ld -> do
dependencies :: Set Reference <- let
tp r@(Reference.DerivedId i) = eval (LoadType i) <&> \case
Nothing -> error $ "What happened to " ++ show i ++ "?"
Just decl -> Set.delete r . DD.dependencies $ DD.asDataDecl decl
tp _ = pure mempty
tm (Referent.Ref r@(Reference.DerivedId i)) = eval (LoadTerm i) <&> \case
Nothing -> error $ "What happened to " ++ show i ++ "?"
Just tm -> Set.delete r $ Term.dependencies tm
tm con@(Referent.Con (Reference.DerivedId i) cid _ct) = eval (LoadType i) <&> \case
Nothing -> error $ "What happened to " ++ show i ++ "?"
Just decl -> case DD.typeOfConstructor (DD.asDataDecl decl) cid of
Nothing -> error $ "What happened to " ++ show con ++ "?"
Just tp -> Type.dependencies tp
tm _ = pure mempty
in LD.fold tp tm ld
(missing, names0) <- eval . Eval $ Branch.findHistoricalRefs' dependencies root'
let types = R.toList $ Names3.types0 names0
let terms = fmap (second Referent.toReference) $ R.toList $ Names.terms names0
let names = types <> terms
numberedArgs .= fmap (Text.unpack . Reference.toText) ((fmap snd names) <> toList missing)
respond $ ListDependencies hqLength ld names missing
DebugNumberedArgsI -> use numberedArgs >>= respond . DumpNumberedArgs
DebugBranchHistoryI ->
eval . Notify . DumpBitBooster (Branch.headHash currentBranch') =<<
(eval . Eval $ Causal.hashToRaw (Branch._history currentBranch'))
DebugTypecheckedUnisonFileI -> case uf of
Nothing -> respond NoUnisonFile
Just uf -> let
datas, effects, terms :: [(Name, Reference.Id)]
datas = [ (Name.fromVar v, r) | (v, (r, _d)) <- Map.toList $ UF.dataDeclarationsId' uf ]
effects = [ (Name.fromVar v, r) | (v, (r, _e)) <- Map.toList $ UF.effectDeclarationsId' uf ]
terms = [ (Name.fromVar v, r) | (v, (r, _tm, _tp)) <- Map.toList $ UF.hashTermsId uf ]
in eval . Notify $ DumpUnisonFileHashes hqLength datas effects terms
DebugDumpNamespacesI -> do
let seen h = State.gets (Set.member h)
set h = State.modify (Set.insert h)
getCausal b = (Branch.headHash b, pure $ Branch._history b)
goCausal :: forall m. Monad m => [(Branch.Hash, m (Branch.UnwrappedBranch m))] -> StateT (Set Branch.Hash) m ()
goCausal [] = pure ()
goCausal ((h, mc) : queue) = do
ifM (seen h) (goCausal queue) do
lift mc >>= \case
Causal.One h b -> goBranch h b mempty queue
Causal.Cons h b tail -> goBranch h b [fst tail] (tail : queue)
Causal.Merge h b (Map.toList -> tails) -> goBranch h b (map fst tails) (tails ++ queue)
goBranch :: forall m. Monad m => Branch.Hash -> Branch0 m -> [Branch.Hash] -> [(Branch.Hash, m (Branch.UnwrappedBranch m))] -> StateT (Set Branch.Hash) m ()
goBranch h b (Set.fromList -> causalParents) queue = case b of
Branch0 terms0 types0 children0 patches0 _ _ _ _ _ _ -> let
wrangleMetadata :: (Ord r, Ord n) => Metadata.Star r n -> r -> (r, (Set n, Set Metadata.Value))
wrangleMetadata s r =
(r, (R.lookupDom r $ Star3.d1 s, Set.map snd . R.lookupDom r $ Star3.d3 s))
terms = Map.fromList . map (wrangleMetadata terms0) . Foldable.toList $ Star3.fact terms0
types = Map.fromList . map (wrangleMetadata types0) . Foldable.toList $ Star3.fact types0
patches = fmap fst patches0
children = fmap Branch.headHash children0
in do
let d = Output.DN.DumpNamespace terms types patches children causalParents
-- the alternate implementation that doesn't rely on `traceM` blows up
traceM $ P.toPlain 200 (prettyDump (h, d))
set h
goCausal (map getCausal (Foldable.toList children0) ++ queue)
prettyDump (h, Output.DN.DumpNamespace terms types patches children causalParents) =
P.lit "Namespace " <> P.shown h <> P.newline <> (P.indentN 2 $ P.linesNonEmpty [
Monoid.unlessM (null causalParents) $ P.lit "Causal Parents:" <> P.newline <> P.indentN 2 (P.lines (map P.shown $ Set.toList causalParents))
, Monoid.unlessM (null terms) $ P.lit "Terms:" <> P.newline <> P.indentN 2 (P.lines (map (prettyDefn Referent.toText) $ Map.toList terms))
, Monoid.unlessM (null types) $ P.lit "Types:" <> P.newline <> P.indentN 2 (P.lines (map (prettyDefn Reference.toText) $ Map.toList types))
, Monoid.unlessM (null patches) $ P.lit "Patches:" <> P.newline <> P.indentN 2 (P.column2 (map (bimap P.shown P.shown) $ Map.toList patches))
, Monoid.unlessM (null children) $ P.lit "Children:" <> P.newline <> P.indentN 2 (P.column2 (map (bimap P.shown P.shown) $ Map.toList children))
])
where
prettyLinks renderR r [] = P.indentN 2 $ P.text (renderR r)
prettyLinks renderR r links = P.indentN 2 (P.lines (P.text (renderR r) : (links <&> \r -> "+ " <> P.text (Reference.toText r))))
prettyDefn renderR (r, (Foldable.toList -> names, Foldable.toList -> links)) =
P.lines (P.shown <$> if null names then [NameSegment "<unnamed>"] else names) <> P.newline <> prettyLinks renderR r links
void . eval . Eval . flip State.execStateT mempty $ goCausal [getCausal root']
DebugDumpNamespaceSimpleI -> do
for_ (Relation.toList . Branch.deepTypes . Branch.head $ root') \(r, name) ->
traceM $ show name ++ ",Type," ++ Text.unpack (Reference.toText r)
for_ (Relation.toList . Branch.deepTerms . Branch.head $ root') \(r, name) ->
traceM $ show name ++ ",Term," ++ Text.unpack (Referent.toText r)
DebugClearWatchI {} -> eval ClearWatchCache
DeprecateTermI {} -> notImplemented
DeprecateTypeI {} -> notImplemented
RemoveTermReplacementI from patchPath ->
doRemoveReplacement from patchPath True
RemoveTypeReplacementI from patchPath ->
doRemoveReplacement from patchPath False
ShowDefinitionByPrefixI {} -> notImplemented
UpdateBuiltinsI -> notImplemented
QuitI -> MaybeT $ pure Nothing
where
notImplemented = eval $ Notify NotImplemented
success = respond Success
resolveDefaultMetadata :: Path.Absolute -> Action' m v [String]
resolveDefaultMetadata path = do
let superpaths = Path.ancestors path
xs <- for
superpaths
(\path -> do
mayNames <-
eval . ConfigLookup @[String] $ configKey "DefaultMetadata" path
pure . join $ toList mayNames
)
pure . join $ toList xs
configKey k p =
Text.intercalate "." . toList $ k :<| fmap
NameSegment.toText
(Path.toSeq $ Path.unabsolute p)
-- Takes a maybe (namespace address triple); returns it as-is if `Just`;
-- otherwise, tries to load a value from .unisonConfig, and complains
-- if needed.
resolveConfiguredGitUrl
:: PushPull
-> Path'
-> ExceptT (Output v) (Action' m v) WriteRemotePath
resolveConfiguredGitUrl pushPull destPath' = ExceptT do
let destPath = resolveToAbsolute destPath'
let configKey = gitUrlKey destPath
(eval . ConfigLookup) configKey >>= \case
Just url ->
case P.parse UriParser.writeRepoPath (Text.unpack configKey) url of
Left e ->
pure . Left $
ConfiguredGitUrlParseError pushPull destPath' url (show e)
Right ns ->
pure . Right $ ns
Nothing ->
pure . Left $ NoConfiguredGitUrl pushPull destPath'
gitUrlKey = configKey "GitUrl"
case e of
Right input -> lastInput .= Just input
_ -> pure ()
-- todo: compare to `getHQTerms` / `getHQTypes`. Is one universally better?
resolveHQToLabeledDependencies :: Functor m => HQ.HashQualified Name -> Action' m v (Set LabeledDependency)
resolveHQToLabeledDependencies = \case
HQ.NameOnly n -> do
parseNames <- basicParseNames0
let terms, types :: Set LabeledDependency
terms = Set.map LD.referent . Name.searchBySuffix n $ Names3.terms0 parseNames
types = Set.map LD.typeRef . Name.searchBySuffix n $ Names3.types0 parseNames
pure $ terms <> types
-- rationale: the hash should be unique enough that the name never helps
HQ.HashQualified _n sh -> resolveHashOnly sh
HQ.HashOnly sh -> resolveHashOnly sh
where
resolveHashOnly sh = do
terms <- eval $ TermReferentsByShortHash sh
types <- eval $ TypeReferencesByShortHash sh
pure $ Set.map LD.referent terms <> Set.map LD.typeRef types
doDisplay :: Var v => OutputLocation -> Names -> Term v () -> Action' m v ()
doDisplay outputLoc names tm = do
ppe <- prettyPrintEnvDecl names
tf <- use latestTypecheckedFile
let (tms, typs) = maybe mempty UF.indexByReference tf
latestFile' <- use latestFile
let
loc = case outputLoc of
ConsoleLocation -> Nothing
FileLocation path -> Just path
LatestFileLocation -> fmap fst latestFile' <|> Just "scratch.u"
useCache = True
evalTerm tm = fmap ErrorUtil.hush . fmap (fmap Term.unannotate) . eval $
Evaluate1 (PPE.suffixifiedPPE ppe) useCache (Term.amap (const External) tm)
loadTerm (Reference.DerivedId r) = case Map.lookup r tms of
Nothing -> fmap (fmap Term.unannotate) . eval $ LoadTerm r
Just (tm,_) -> pure (Just $ Term.unannotate tm)
loadTerm _ = pure Nothing
loadDecl (Reference.DerivedId r) = case Map.lookup r typs of
Nothing -> fmap (fmap $ DD.amap (const ())) . eval $ LoadType r
Just decl -> pure (Just $ DD.amap (const ()) decl)
loadDecl _ = pure Nothing
loadTypeOfTerm' (Referent.Ref (Reference.DerivedId r))
| Just (_,ty) <- Map.lookup r tms = pure $ Just (void ty)
loadTypeOfTerm' r = fmap (fmap void) . loadTypeOfTerm $ r
rendered <- DisplayValues.displayTerm ppe loadTerm loadTypeOfTerm' evalTerm loadDecl tm
respond $ DisplayRendered loc rendered
getLinks :: (Var v, Monad m)
=> Input
-> Path.HQSplit'
-> Either (Set Reference) (Maybe String)
-> ExceptT (Output v)
(Action' m v)
(PPE.PrettyPrintEnv,
-- e.g. ("Foo.doc", #foodoc, Just (#builtin.Doc)
[(HQ.HashQualified Name, Reference, Maybe (Type v Ann))])
getLinks input src mdTypeStr = ExceptT $ do
let go = fmap Right . getLinks' src
case mdTypeStr of
Left s -> go (Just s)
Right Nothing -> go Nothing
Right (Just mdTypeStr) -> parseType input mdTypeStr >>= \case
Left e -> pure $ Left e
Right typ -> go . Just . Set.singleton $ Type.toReference typ
getLinks' :: (Var v, Monad m)
=> Path.HQSplit' -- definition to print metadata of
-> Maybe (Set Reference) -- return all metadata if empty
-> Action' m v (PPE.PrettyPrintEnv,
-- e.g. ("Foo.doc", #foodoc, Just (#builtin.Doc)
[(HQ.HashQualified Name, Reference, Maybe (Type v Ann))])
getLinks' src selection0 = do
root0 <- Branch.head <$> use root
currentPath' <- use currentPath
let resolveSplit' = Path.fromAbsoluteSplit . Path.toAbsoluteSplit currentPath'
p = resolveSplit' src -- ex: the (parent,hqsegment) of `List.map` - `List`
-- all metadata (type+value) associated with name `src`
allMd = R4.d34 (BranchUtil.getTermMetadataHQNamed p root0)
<> R4.d34 (BranchUtil.getTypeMetadataHQNamed p root0)
allMd' = maybe allMd (`R.restrictDom` allMd) selection0
-- then list the values after filtering by type
allRefs :: Set Reference = R.ran allMd'
sigs <- for (toList allRefs) (loadTypeOfTerm . Referent.Ref)
let deps = Set.map LD.termRef allRefs <>
Set.unions [ Set.map LD.typeRef . Type.dependencies $ t | Just t <- sigs ]
ppe <- prettyPrintEnvDecl =<< makePrintNamesFromLabeled' deps
let ppeDecl = PPE.unsuffixifiedPPE ppe
let sortedSigs = sortOn snd (toList allRefs `zip` sigs)
let out = [(PPE.termName ppeDecl (Referent.Ref r), r, t) | (r, t) <- sortedSigs ]
pure (PPE.suffixifiedPPE ppe, out)
resolveShortBranchHash ::
ShortBranchHash -> ExceptT (Output v) (Action' m v) (Branch m)
resolveShortBranchHash hash = ExceptT do
hashSet <- eval $ BranchHashesByPrefix hash
len <- eval BranchHashLength
case Set.toList hashSet of
[] -> pure . Left $ NoBranchWithHash hash
[h] -> fmap Right . eval $ LoadLocalBranch h
_ -> pure . Left $ BranchHashAmbiguous hash (Set.map (SBH.fromHash len) hashSet)
-- Returns True if the operation changed the namespace, False otherwise.
propagatePatchNoSync
:: (Monad m, Var v)
=> Patch
-> Path.Absolute
-> Action' m v Bool
propagatePatchNoSync patch scopePath = do
r <- use root
let nroot = Branch.toNames0 (Branch.head r)
stepAtMNoSync' (Path.unabsolute scopePath,
lift . lift . Propagate.propagateAndApply nroot patch)
-- Returns True if the operation changed the namespace, False otherwise.
propagatePatch :: (Monad m, Var v) =>
InputDescription -> Patch -> Path.Absolute -> Action' m v Bool
propagatePatch inputDescription patch scopePath = do
r <- use root
let nroot = Branch.toNames0 (Branch.head r)
stepAtM' (inputDescription <> " (applying patch)")
(Path.unabsolute scopePath,
lift . lift . Propagate.propagateAndApply nroot patch)
-- | Create the args needed for showTodoOutput and call it
doShowTodoOutput :: Monad m => Patch -> Path.Absolute -> Action' m v ()
doShowTodoOutput patch scopePath = do
scope <- getAt scopePath
let names0 = Branch.toNames0 (Branch.head scope)
-- only needs the local references to check for obsolete defs
let getPpe = do
names <- makePrintNamesFromLabeled' (Patch.labeledDependencies patch)
prettyPrintEnvDecl names
showTodoOutput getPpe patch names0
-- | Show todo output if there are any conflicts or edits.
showTodoOutput
:: Action' m v PPE.PrettyPrintEnvDecl
-- ^ Action that fetches the pretty print env. It's expensive because it
-- involves looking up historical names, so only call it if necessary.
-> Patch
-> Names0
-> Action' m v ()
showTodoOutput getPpe patch names0 = do
todo <- checkTodo patch names0
if TO.noConflicts todo && TO.noEdits todo
then respond NoConflictsOrEdits
else do
numberedArgs .=
(Text.unpack . Reference.toText . view _2 <$>
fst (TO.todoFrontierDependents todo))
ppe <- getPpe
respond $ TodoOutput ppe todo
checkTodo :: Patch -> Names0 -> Action m i v (TO.TodoOutput v Ann)
checkTodo patch names0 = do
f <- computeFrontier (eval . GetDependents) patch names0
let dirty = R.dom f
frontier = R.ran f
(frontierTerms, frontierTypes) <- loadDisplayInfo frontier
(dirtyTerms, dirtyTypes) <- loadDisplayInfo dirty
-- todo: something more intelligent here?
let scoreFn = const 1
remainingTransitive <-
frontierTransitiveDependents (eval . GetDependents) names0 frontier
let
scoredDirtyTerms =
List.sortOn (view _1) [ (scoreFn r, r, t) | (r,t) <- dirtyTerms ]
scoredDirtyTypes =
List.sortOn (view _1) [ (scoreFn r, r, t) | (r,t) <- dirtyTypes ]
pure $
TO.TodoOutput
(Set.size remainingTransitive)
(frontierTerms, frontierTypes)
(scoredDirtyTerms, scoredDirtyTypes)
(Names.conflicts names0)
(Patch.conflicts patch)
where
frontierTransitiveDependents ::
Monad m => (Reference -> m (Set Reference)) -> Names0 -> Set Reference -> m (Set Reference)
frontierTransitiveDependents dependents names0 rs = do
let branchDependents r = Set.filter (Names.contains names0) <$> dependents r
tdeps <- transitiveClosure branchDependents rs
-- we don't want the frontier in the result
pure $ tdeps `Set.difference` rs
-- (d, f) when d is "dirty" (needs update),
-- f is in the frontier (an edited dependency of d),
-- and d depends on f
-- a ⋖ b = a depends directly on b
-- dirty(d) ∧ frontier(f) <=> not(edited(d)) ∧ edited(f) ∧ d ⋖ f
--
-- The range of this relation is the frontier, and the domain is
-- the set of dirty references.
computeFrontier :: forall m . Monad m
=> (Reference -> m (Set Reference)) -- eg Codebase.dependents codebase
-> Patch
-> Names0
-> m (R.Relation Reference Reference)
computeFrontier getDependents patch names = let
edited :: Set Reference
edited = R.dom (Patch._termEdits patch) <> R.dom (Patch._typeEdits patch)
addDependents :: R.Relation Reference Reference -> Reference -> m (R.Relation Reference Reference)
addDependents dependents ref =
(\ds -> R.insertManyDom ds ref dependents) . Set.filter (Names.contains names)
<$> getDependents ref
in do
-- (r,r2) ∈ dependsOn if r depends on r2
dependsOn <- foldM addDependents R.empty edited
-- Dirty is everything that `dependsOn` Frontier, minus already edited defns
pure $ R.filterDom (not . flip Set.member edited) dependsOn
eval :: Command m i v a -> Action m i v a
eval = lift . lift . Free.eval
confirmedCommand :: Input -> Action m i v Bool
confirmedCommand i = do
i0 <- use lastInput
pure $ Just i == i0
listBranch :: Branch0 m -> [SearchResult]
listBranch (Branch.toNames0 -> b) =
List.sortOn (\s -> (SR.name s, s)) (SR.fromNames b)
-- | restores the full hash to these search results, for _numberedArgs purposes
searchResultToHQString :: SearchResult -> String
searchResultToHQString = \case
SR.Tm' n r _ -> HQ'.toString $ HQ'.requalify n r
SR.Tp' n r _ -> HQ'.toString $ HQ'.requalify n (Referent.Ref r)
_ -> error "unpossible match failure"
-- Return a list of definitions whose names fuzzy match the given queries.
fuzzyNameDistance :: Name -> Name -> Maybe Int
fuzzyNameDistance (Name.toString -> q) (Name.toString -> n) =
Find.simpleFuzzyScore q n
-- return `name` and `name.<everything>...`
_searchBranchPrefix :: Branch m -> Name -> [SearchResult]
_searchBranchPrefix b n = case Path.unsnoc (Path.fromName n) of
Nothing -> []
Just (init, last) -> case Branch.getAt init b of
Nothing -> []
Just b -> SR.fromNames . Names.prefix0 n $ names0
where
lastName = Path.toName (Path.singleton last)
subnames = Branch.toNames0 . Branch.head $
Branch.getAt' (Path.singleton last) b
rootnames =
Names.filter (== lastName) .
Branch.toNames0 . set Branch.children mempty $ Branch.head b
names0 = rootnames <> Names.prefix0 lastName subnames
searchResultsFor :: Names0 -> [Referent] -> [Reference] -> [SearchResult]
searchResultsFor ns terms types =
[ SR.termSearchResult ns name ref
| ref <- terms
, name <- toList (Names.namesForReferent ns ref)
] <>
[ SR.typeSearchResult ns name ref
| ref <- types
, name <- toList (Names.namesForReference ns ref)
]
searchBranchScored :: forall score. (Ord score)
=> Names0
-> (Name -> Name -> Maybe score)
-> [HQ.HashQualified Name]
-> [SearchResult]
searchBranchScored names0 score queries =
nubOrd . fmap snd . toList $ searchTermNamespace <> searchTypeNamespace
where
searchTermNamespace = foldMap do1query queries
where
do1query :: HQ.HashQualified Name -> Set (Maybe score, SearchResult)
do1query q = foldMap (score1hq q) (R.toList . Names.terms $ names0)
score1hq :: HQ.HashQualified Name -> (Name, Referent) -> Set (Maybe score, SearchResult)
score1hq query (name, ref) = case query of
HQ.NameOnly qn ->
pair qn
HQ.HashQualified qn h | h `SH.isPrefixOf` Referent.toShortHash ref ->
pair qn
HQ.HashOnly h | h `SH.isPrefixOf` Referent.toShortHash ref ->
Set.singleton (Nothing, result)
_ -> mempty
where
result = SR.termSearchResult names0 name ref
pair qn = case score qn name of
Just score -> Set.singleton (Just score, result)
Nothing -> mempty
searchTypeNamespace = foldMap do1query queries
where
do1query :: HQ.HashQualified Name -> Set (Maybe score, SearchResult)
do1query q = foldMap (score1hq q) (R.toList . Names.types $ names0)
score1hq :: HQ.HashQualified Name -> (Name, Reference) -> Set (Maybe score, SearchResult)
score1hq query (name, ref) = case query of
HQ.NameOnly qn ->
pair qn
HQ.HashQualified qn h | h `SH.isPrefixOf` Reference.toShortHash ref ->
pair qn
HQ.HashOnly h | h `SH.isPrefixOf` Reference.toShortHash ref ->
Set.singleton (Nothing, result)
_ -> mempty
where
result = SR.typeSearchResult names0 name ref
pair qn = case score qn name of
Just score -> Set.singleton (Just score, result)
Nothing -> mempty
handleBackendError :: Backend.BackendError -> Action m i v ()
handleBackendError = \case
Backend.NoSuchNamespace path ->
respond . BranchNotFound $ Path.absoluteToPath' path
Backend.BadRootBranch e -> respond $ BadRootBranch e
Backend.NoBranchForHash h -> do
sbhLength <- eval BranchHashLength
respond . NoBranchWithHash $ SBH.fromHash sbhLength h
Backend.CouldntLoadBranch h -> do
respond . CouldntLoadBranch $ h
Backend.CouldntExpandBranchHash sbh -> respond $ NoBranchWithHash sbh
Backend.AmbiguousBranchHash h hashes ->
respond $ BranchHashAmbiguous h hashes
Backend.MissingSignatureForTerm r ->
respond $ TermMissingType r
respond :: Output v -> Action m i v ()
respond output = eval $ Notify output
respondNumbered :: NumberedOutput v -> Action m i v ()
respondNumbered output = do
args <- eval $ NotifyNumbered output
unless (null args) $
numberedArgs .= toList args
unlessError :: ExceptT (Output v) (Action' m v) () -> Action' m v ()
unlessError ma = runExceptT ma >>= either (eval . Notify) pure
unlessError' :: (e -> Output v) -> ExceptT e (Action' m v) () -> Action' m v ()
unlessError' f ma = unlessError $ withExceptT f ma
-- | supply `dest0` if you want to print diff messages
-- supply unchangedMessage if you want to display it if merge had no effect
mergeBranchAndPropagateDefaultPatch :: (Monad m, Var v) => Branch.MergeMode ->
InputDescription -> Maybe (Output v) -> Branch m -> Maybe Path.Path' -> Path.Absolute -> Action' m v ()
mergeBranchAndPropagateDefaultPatch mode inputDescription unchangedMessage srcb dest0 dest =
ifM (mergeBranch mode inputDescription srcb dest0 dest)
(loadPropagateDiffDefaultPatch inputDescription dest0 dest)
(for_ unchangedMessage respond)
where
mergeBranch :: (Monad m, Var v) =>
Branch.MergeMode -> InputDescription -> Branch m -> Maybe Path.Path' -> Path.Absolute -> Action' m v Bool
mergeBranch mode inputDescription srcb dest0 dest = unsafeTime "Merge Branch" $ do
destb <- getAt dest
merged <- eval $ Merge mode srcb destb
b <- updateAtM inputDescription dest (const $ pure merged)
for_ dest0 $ \dest0 ->
diffHelper (Branch.head destb) (Branch.head merged) >>=
respondNumbered . uncurry (ShowDiffAfterMerge dest0 dest)
pure b
loadPropagateDiffDefaultPatch :: (Monad m, Var v) =>
InputDescription -> Maybe Path.Path' -> Path.Absolute -> Action' m v ()
loadPropagateDiffDefaultPatch inputDescription dest0 dest = unsafeTime "Propagate Default Patch" $ do
original <- getAt dest
patch <- eval . Eval $ Branch.getPatch defaultPatchNameSegment (Branch.head original)
patchDidChange <- propagatePatch inputDescription patch dest
when patchDidChange . for_ dest0 $ \dest0 -> do
patched <- getAt dest
let patchPath = snoc dest0 defaultPatchNameSegment
diffHelper (Branch.head original) (Branch.head patched) >>=
respondNumbered . uncurry (ShowDiffAfterMergePropagate dest0 dest patchPath)
getAt :: Functor m => Path.Absolute -> Action m i v (Branch m)
getAt (Path.Absolute p) =
use root <&> fromMaybe Branch.empty . Branch.getAt p
-- Update a branch at the given path, returning `True` if
-- an update occurred and false otherwise
updateAtM :: Applicative m
=> InputDescription
-> Path.Absolute
-> (Branch m -> Action m i v (Branch m))
-> Action m i v Bool
updateAtM reason (Path.Absolute p) f = do
b <- use lastSavedRoot
b' <- Branch.modifyAtM p f b
updateRoot b' reason
pure $ b /= b'
stepAt
:: forall m i v
. Monad m
=> InputDescription
-> (Path, Branch0 m -> Branch0 m)
-> Action m i v ()
stepAt cause = stepManyAt @m @[] cause . pure
stepAtNoSync :: forall m i v. Monad m
=> (Path, Branch0 m -> Branch0 m)
-> Action m i v ()
stepAtNoSync = stepManyAtNoSync @m @[] . pure
stepAtM :: forall m i v. Monad m
=> InputDescription
-> (Path, Branch0 m -> m (Branch0 m))
-> Action m i v ()
stepAtM cause = stepManyAtM @m @[] cause . pure
stepAtM'
:: forall m i v
. Monad m
=> InputDescription
-> (Path, Branch0 m -> Action m i v (Branch0 m))
-> Action m i v Bool
stepAtM' cause = stepManyAtM' @m @[] cause . pure
stepAtMNoSync'
:: forall m i v
. Monad m
=> (Path, Branch0 m -> Action m i v (Branch0 m))
-> Action m i v Bool
stepAtMNoSync' = stepManyAtMNoSync' @m @[] . pure
stepManyAt
:: (Monad m, Foldable f)
=> InputDescription
-> f (Path, Branch0 m -> Branch0 m)
-> Action m i v ()
stepManyAt reason actions = do
stepManyAtNoSync actions
b <- use root
updateRoot b reason
-- Like stepManyAt, but doesn't update the root
stepManyAtNoSync
:: (Monad m, Foldable f)
=> f (Path, Branch0 m -> Branch0 m)
-> Action m i v ()
stepManyAtNoSync actions = do
b <- use root
let new = Branch.stepManyAt actions b
root .= new
stepManyAtM :: (Monad m, Foldable f)
=> InputDescription
-> f (Path, Branch0 m -> m (Branch0 m))
-> Action m i v ()
stepManyAtM reason actions = do
stepManyAtMNoSync actions
b <- use root
updateRoot b reason
stepManyAtMNoSync :: (Monad m, Foldable f)
=> f (Path, Branch0 m -> m (Branch0 m))
-> Action m i v ()
stepManyAtMNoSync actions = do
b <- use root
b' <- eval . Eval $ Branch.stepManyAtM actions b
root .= b'
stepManyAtM' :: (Monad m, Foldable f)
=> InputDescription
-> f (Path, Branch0 m -> Action m i v (Branch0 m))
-> Action m i v Bool
stepManyAtM' reason actions = do
b <- use root
b' <- Branch.stepManyAtM actions b
updateRoot b' reason
pure (b /= b')
stepManyAtMNoSync' :: (Monad m, Foldable f)
=> f (Path, Branch0 m -> Action m i v (Branch0 m))
-> Action m i v Bool
stepManyAtMNoSync' actions = do
b <- use root
b' <- Branch.stepManyAtM actions b
root .= b'
pure (b /= b')
updateRoot :: Branch m -> InputDescription -> Action m i v ()
updateRoot new reason = do
old <- use lastSavedRoot
when (old /= new) $ do
root .= new
eval $ SyncLocalRootBranch new
eval $ AppendToReflog reason old new
lastSavedRoot .= new
-- cata for 0, 1, or more elements of a Foldable
-- tries to match as lazily as possible
zeroOneOrMore :: Foldable f => f a -> b -> (a -> b) -> (f a -> b) -> b
zeroOneOrMore f zero one more = case toList f of
_ : _ : _ -> more f
a : _ -> one a
_ -> zero
-- Goal: If `remaining = root - toBeDeleted` contains definitions X which
-- depend on definitions Y not in `remaining` (which should also be in
-- `toBeDeleted`), then complain by returning (Y, X).
getEndangeredDependents :: forall m. Monad m
=> (Reference -> m (Set Reference))
-> Names0
-> Names0
-> m (Names0, Names0)
getEndangeredDependents getDependents toDelete root = do
let remaining = root `Names.difference` toDelete
toDelete', remaining', extinct :: Set Reference
toDelete' = Names.allReferences toDelete
remaining' = Names.allReferences remaining -- left over after delete
extinct = toDelete' `Set.difference` remaining' -- deleting and not left over
accumulateDependents m r = getDependents r <&> \ds -> Map.insert r ds m
dependentsOfExtinct :: Map Reference (Set Reference) <-
foldM accumulateDependents mempty extinct
let orphaned, endangered, failed :: Set Reference
orphaned = fold dependentsOfExtinct
endangered = orphaned `Set.intersection` remaining'
failed = Set.filter hasEndangeredDependent extinct
hasEndangeredDependent r = any (`Set.member` endangered)
(dependentsOfExtinct Map.! r)
pure ( Names.restrictReferences failed toDelete
, Names.restrictReferences endangered root `Names.difference` toDelete)
-- Applies the selection filter to the adds/updates of a slurp result,
-- meaning that adds/updates should only contain the selection or its transitive
-- dependencies, any unselected transitive dependencies of the selection will
-- be added to `extraDefinitions`.
applySelection
:: forall v a
. Var v
=> [HQ'.HashQualified Name]
-> UF.TypecheckedUnisonFile v a
-> SlurpResult v
-> SlurpResult v
applySelection [] _ = id
applySelection hqs file = \sr@SlurpResult{..} ->
sr { adds = adds `SC.intersection` closed
, updates = updates `SC.intersection` closed
, extraDefinitions = closed `SC.difference` selection
}
where
selectedNames0 =
Names.filterByHQs (Set.fromList hqs) (UF.typecheckedToNames0 file)
selection, closed :: SlurpComponent v
selection = SlurpComponent selectedTypes selectedTerms
closed = SC.closeWithDependencies file selection
selectedTypes, selectedTerms :: Set v
selectedTypes = Set.map var $ R.dom (Names.types selectedNames0)
selectedTerms = Set.map var $ R.dom (Names.terms selectedNames0)
var :: Var v => Name -> v
var name = Var.named (Name.toText name)
toSlurpResult
:: forall v
. Var v
=> Path.Absolute
-> UF.TypecheckedUnisonFile v Ann
-> Names0
-> SlurpResult v
toSlurpResult currentPath uf existingNames =
Slurp.subtractComponent (conflicts <> ctorCollisions) $ SlurpResult
uf
mempty
adds
dups
mempty
conflicts
updates
termCtorCollisions
ctorTermCollisions
termAliases
typeAliases
mempty
where
fileNames0 = UF.typecheckedToNames0 uf
sc :: R.Relation Name Referent -> R.Relation Name Reference -> SlurpComponent v
sc terms types = SlurpComponent { terms = Set.map var (R.dom terms)
, types = Set.map var (R.dom types) }
-- conflict (n,r) if n is conflicted in names0
conflicts :: SlurpComponent v
conflicts = sc terms types where
terms = R.filterDom (conflicted . Names.termsNamed existingNames)
(Names.terms fileNames0)
types = R.filterDom (conflicted . Names.typesNamed existingNames)
(Names.types fileNames0)
conflicted s = Set.size s > 1
ctorCollisions :: SlurpComponent v
ctorCollisions =
mempty { SC.terms = termCtorCollisions <> ctorTermCollisions }
-- termCtorCollision (n,r) if (n, r' /= r) exists in existingNames and
-- r is Ref and r' is Con
termCtorCollisions :: Set v
termCtorCollisions = Set.fromList
[ var n
| (n, Referent.Ref{}) <- R.toList (Names.terms fileNames0)
, [[email protected]{}] <- [toList $ Names.termsNamed existingNames n]
-- ignore collisions w/ ctors of types being updated
, Set.notMember (Referent.toReference r) typesToUpdate
]
-- the set of typerefs that are being updated by this file
typesToUpdate :: Set Reference
typesToUpdate = Set.fromList
[ r
| (n, r') <- R.toList (Names.types fileNames0)
, r <- toList (Names.typesNamed existingNames n)
, r /= r'
]
-- ctorTermCollisions (n,r) if (n, r' /= r) exists in names0 and r is Con
-- and r' is Ref except we relaxed it to where r' can be Con or Ref
-- what if (n,r) and (n,r' /= r) exists in names and r, r' are Con
ctorTermCollisions :: Set v
ctorTermCollisions = Set.fromList
[ var n
| (n, Referent.Con{}) <- R.toList (Names.terms fileNames0)
, r <- toList $ Names.termsNamed existingNames n
-- ignore collisions w/ ctors of types being updated
, Set.notMember (Referent.toReference r) typesToUpdate
, Set.notMember (var n) (terms dups)
]
-- duplicate (n,r) if (n,r) exists in names0
dups :: SlurpComponent v
dups = sc terms types where
terms = R.intersection (Names.terms existingNames) (Names.terms fileNames0)
types = R.intersection (Names.types existingNames) (Names.types fileNames0)
-- update (n,r) if (n,r' /= r) exists in existingNames and r, r' are Ref
updates :: SlurpComponent v
updates = SlurpComponent (Set.fromList types) (Set.fromList terms) where
terms =
[ var n
| (n, r'@Referent.Ref{}) <- R.toList (Names.terms fileNames0)
, [[email protected]{}] <- [toList $ Names.termsNamed existingNames n]
, r' /= r
]
types =
[ var n
| (n, r') <- R.toList (Names.types fileNames0)
, [r] <- [toList $ Names.typesNamed existingNames n]
, r' /= r
]
buildAliases
:: R.Relation Name Referent
-> R.Relation Name Referent
-> Set v
-> Map v Slurp.Aliases
buildAliases existingNames namesFromFile duplicates = Map.fromList
[ ( var n
, if null aliasesOfOld
then Slurp.AddAliases aliasesOfNew
else Slurp.UpdateAliases aliasesOfOld aliasesOfNew
)
| (n, [email protected]{}) <- R.toList namesFromFile
-- All the refs whose names include `n`, and are not `r`
, let
refs = Set.delete r $ R.lookupDom n existingNames
aliasesOfNew =
Set.map (Path.unprefixName currentPath) . Set.delete n $
R.lookupRan r existingNames
aliasesOfOld =
Set.map (Path.unprefixName currentPath) . Set.delete n . R.dom $
R.restrictRan existingNames refs
, not (null aliasesOfNew && null aliasesOfOld)
, Set.notMember (var n) duplicates
]
termAliases :: Map v Slurp.Aliases
termAliases = buildAliases (Names.terms existingNames)
(Names.terms fileNames0)
(SC.terms dups)
typeAliases :: Map v Slurp.Aliases
typeAliases = buildAliases (R.mapRan Referent.Ref $ Names.types existingNames)
(R.mapRan Referent.Ref $ Names.types fileNames0)
(SC.types dups)
-- (n,r) is in `adds` if n isn't in existingNames
adds = sc terms types where
terms = addTerms (Names.terms existingNames) (Names.terms fileNames0)
types = addTypes (Names.types existingNames) (Names.types fileNames0)
addTerms existingNames = R.filter go where
go (n, Referent.Ref{}) = (not . R.memberDom n) existingNames
go _ = False
addTypes existingNames = R.filter go where
go (n, _) = (not . R.memberDom n) existingNames
filterBySlurpResult :: Ord v
=> SlurpResult v
-> UF.TypecheckedUnisonFile v Ann
-> UF.TypecheckedUnisonFile v Ann
filterBySlurpResult SlurpResult{..}
(UF.TypecheckedUnisonFileId
dataDeclarations'
effectDeclarations'
topLevelComponents'
watchComponents
hashTerms) =
UF.TypecheckedUnisonFileId datas effects tlcs watches hashTerms'
where
keep = updates <> adds
keepTerms = SC.terms keep
keepTypes = SC.types keep
hashTerms' = Map.restrictKeys hashTerms keepTerms
datas = Map.restrictKeys dataDeclarations' keepTypes
effects = Map.restrictKeys effectDeclarations' keepTypes
tlcs = filter (not.null) $ fmap (List.filter filterTLC) topLevelComponents'
watches = filter (not.null.snd) $ fmap (second (List.filter filterTLC)) watchComponents
filterTLC (v,_,_) = Set.member v keepTerms
-- updates the namespace for adding `slurp`
doSlurpAdds :: forall m v. (Monad m, Var v)
=> SlurpComponent v
-> UF.TypecheckedUnisonFile v Ann
-> (Branch0 m -> Branch0 m)
doSlurpAdds slurp uf = Branch.stepManyAt0 (typeActions <> termActions)
where
typeActions = map doType . toList $ SC.types slurp
termActions = map doTerm . toList $
SC.terms slurp <> Slurp.constructorsFor (SC.types slurp) uf
names = UF.typecheckedToNames0 uf
tests = Set.fromList $ fst <$> UF.watchesOfKind UF.TestWatch (UF.discardTypes uf)
(isTestType, isTestValue) = isTest
md v =
if Set.member v tests then Metadata.singleton isTestType isTestValue
else Metadata.empty
doTerm :: v -> (Path, Branch0 m -> Branch0 m)
doTerm v = case toList (Names.termsNamed names (Name.fromVar v)) of
[] -> errorMissingVar v
[r] -> case Path.splitFromName (Name.fromVar v) of
Nothing -> errorEmptyVar
Just split -> BranchUtil.makeAddTermName split r (md v)
wha -> error $ "Unison bug, typechecked file w/ multiple terms named "
<> Var.nameStr v <> ": " <> show wha
doType :: v -> (Path, Branch0 m -> Branch0 m)
doType v = case toList (Names.typesNamed names (Name.fromVar v)) of
[] -> errorMissingVar v
[r] -> case Path.splitFromName (Name.fromVar v) of
Nothing -> errorEmptyVar
Just split -> BranchUtil.makeAddTypeName split r Metadata.empty
wha -> error $ "Unison bug, typechecked file w/ multiple types named "
<> Var.nameStr v <> ": " <> show wha
errorEmptyVar = error "encountered an empty var name"
errorMissingVar v = error $ "expected to find " ++ show v ++ " in " ++ show uf
doSlurpUpdates :: Monad m
=> Map Name (Reference, Reference)
-> Map Name (Reference, Reference)
-> [(Name, Referent)]
-> (Branch0 m -> Branch0 m)
doSlurpUpdates typeEdits termEdits deprecated b0 =
Branch.stepManyAt0 (typeActions <> termActions <> deprecateActions) b0
where
typeActions = join . map doType . Map.toList $ typeEdits
termActions = join . map doTerm . Map.toList $ termEdits
deprecateActions = join . map doDeprecate $ deprecated where
doDeprecate (n, r) = case Path.splitFromName n of
Nothing -> errorEmptyVar
Just split -> [BranchUtil.makeDeleteTermName split r]
-- we copy over the metadata on the old thing
-- todo: if the thing being updated, m, is metadata for something x in b0
-- update x's md to reference `m`
doType, doTerm ::
(Name, (Reference, Reference)) -> [(Path, Branch0 m -> Branch0 m)]
doType (n, (old, new)) = case Path.splitFromName n of
Nothing -> errorEmptyVar
Just split -> [ BranchUtil.makeDeleteTypeName split old
, BranchUtil.makeAddTypeName split new oldMd ]
where
oldMd = BranchUtil.getTypeMetadataAt split old b0
doTerm (n, (old, new)) = case Path.splitFromName n of
Nothing -> errorEmptyVar
Just split -> [ BranchUtil.makeDeleteTermName split (Referent.Ref old)
, BranchUtil.makeAddTermName split (Referent.Ref new) oldMd ]
where
-- oldMd is the metadata linked to the old definition
-- we relink it to the new definition
oldMd = BranchUtil.getTermMetadataAt split (Referent.Ref old) b0
errorEmptyVar = error "encountered an empty var name"
loadDisplayInfo ::
Set Reference -> Action m i v ([(Reference, Maybe (Type v Ann))]
,[(Reference, DisplayObject () (DD.Decl v Ann))])
loadDisplayInfo refs = do
termRefs <- filterM (eval . IsTerm) (toList refs)
typeRefs <- filterM (eval . IsType) (toList refs)
terms <- forM termRefs $ \r -> (r,) <$> eval (LoadTypeOfTerm r)
types <- forM typeRefs $ \r -> (r,) <$> loadTypeDisplayObject r
pure (terms, types)
-- Any absolute names in the input which have `currentPath` as a prefix
-- are converted to names relative to current path. all other names are
-- converted to absolute names. For example:
--
-- e.g. if currentPath = .foo.bar
-- then name foo.bar.baz becomes baz
-- name cat.dog becomes .cat.dog
fixupNamesRelative :: Path.Absolute -> Names0 -> Names0
fixupNamesRelative currentPath' = Names3.map0 fixName where
prefix = Path.toName (Path.unabsolute currentPath')
fixName n = if currentPath' == Path.absoluteEmpty then n else
fromMaybe (Name.makeAbsolute n) (Name.stripNamePrefix prefix n)
makeHistoricalParsingNames ::
Monad m => Set (HQ.HashQualified Name) -> Action' m v Names
makeHistoricalParsingNames lexedHQs = do
rawHistoricalNames <- findHistoricalHQs lexedHQs
basicNames0 <- basicParseNames0
currentPath <- use currentPath
pure $ Names basicNames0
(Names3.makeAbsolute0 rawHistoricalNames <>
fixupNamesRelative currentPath rawHistoricalNames)
loadTypeDisplayObject
:: Reference -> Action m i v (DisplayObject () (DD.Decl v Ann))
loadTypeDisplayObject = \case
Reference.Builtin _ -> pure (BuiltinObject ())
Reference.DerivedId id ->
maybe (MissingObject $ Reference.idToShortHash id) UserObject
<$> eval (LoadType id)
lexedSource :: Monad m => SourceName -> Source -> Action' m v (Names, LexedSource)
lexedSource name src = do
let tokens = L.lexer (Text.unpack name) (Text.unpack src)
getHQ = \case
L.Backticks s (Just sh) -> Just (HQ.HashQualified (Name.unsafeFromString s) sh)
L.WordyId s (Just sh) -> Just (HQ.HashQualified (Name.unsafeFromString s) sh)
L.SymbolyId s (Just sh) -> Just (HQ.HashQualified (Name.unsafeFromString s) sh)
L.Hash sh -> Just (HQ.HashOnly sh)
_ -> Nothing
hqs = Set.fromList . mapMaybe (getHQ . L.payload) $ tokens
parseNames <- makeHistoricalParsingNames hqs
pure (parseNames, (src, tokens))
suffixifiedPPE :: Names -> Action' m v PPE.PrettyPrintEnv
suffixifiedPPE ns = eval CodebaseHashLength <&> (`PPE.fromSuffixNames` ns)
fqnPPE :: Names -> Action' m v PPE.PrettyPrintEnv
fqnPPE ns = eval CodebaseHashLength <&> (`PPE.fromNames` ns)
parseSearchType :: (Monad m, Var v)
=> Input -> String -> Action' m v (Either (Output v) (Type v Ann))
parseSearchType input typ = fmap Type.removeAllEffectVars <$> parseType input typ
parseType :: (Monad m, Var v)
=> Input -> String -> Action' m v (Either (Output v) (Type v Ann))
parseType input src = do
-- `show Input` is the name of the "file" being lexed
(names0, lexed) <- lexedSource (Text.pack $ show input) (Text.pack src)
parseNames <- basicParseNames0
let names = Names3.push (Names3.currentNames names0)
(Names3.Names parseNames (Names3.oldNames names0))
e <- eval $ ParseType names lexed
pure $ case e of
Left err -> Left $ TypeParseError src err
Right typ -> case Type.bindNames mempty (Names3.currentNames names)
$ Type.generalizeLowercase mempty typ of
Left es -> Left $ ParseResolutionFailures src (toList es)
Right typ -> Right typ
makeShadowedPrintNamesFromLabeled
:: Monad m => Set LabeledDependency -> Names0 -> Action' m v Names
makeShadowedPrintNamesFromLabeled deps shadowing =
Names3.shadowing shadowing <$> makePrintNamesFromLabeled' deps
makePrintNamesFromLabeled'
:: Monad m => Set LabeledDependency -> Action' m v Names
makePrintNamesFromLabeled' deps = do
root <- use root
currentPath <- use currentPath
(_missing, rawHistoricalNames) <- eval . Eval $ Branch.findHistoricalRefs
deps
root
basicNames0 <- basicPrettyPrintNames0A
pure $ Names basicNames0 (fixupNamesRelative currentPath rawHistoricalNames)
getTermsIncludingHistorical
:: Monad m => Path.HQSplit -> Branch0 m -> Action' m v (Set Referent)
getTermsIncludingHistorical (p, hq) b = case Set.toList refs of
[] -> case hq of
HQ'.HashQualified n hs -> do
names <- findHistoricalHQs
$ Set.fromList [HQ.HashQualified (Name.unsafeFromText (NameSegment.toText n)) hs]
pure . R.ran $ Names.terms names
_ -> pure Set.empty
_ -> pure refs
where refs = BranchUtil.getTerm (p, hq) b
-- discards inputs that aren't hashqualified;
-- I'd enforce it with finer-grained types if we had them.
findHistoricalHQs :: Monad m => Set (HQ.HashQualified Name) -> Action' m v Names0
findHistoricalHQs lexedHQs0 = do
root <- use root
currentPath <- use currentPath
let
-- omg this nightmare name-to-path parsing code is littered everywhere.
-- We need to refactor so that the absolute-ness of a name isn't represented
-- by magical text combinations.
-- Anyway, this function takes a name, tries to determine whether it is
-- relative or absolute, and tries to return the corresponding name that is
-- /relative/ to the root.
preprocess n = case Name.toString n of
-- some absolute name that isn't just "."
'.' : t@(_:_) -> Name.unsafeFromString t
-- something in current path
_ -> if Path.isRoot currentPath then n
else Name.joinDot (Path.toName . Path.unabsolute $ currentPath) n
lexedHQs = Set.map (fmap preprocess) . Set.filter HQ.hasHash $ lexedHQs0
(_missing, rawHistoricalNames) <- eval . Eval $ Branch.findHistoricalHQs lexedHQs root
pure rawHistoricalNames
basicPrettyPrintNames0A :: Functor m => Action' m v Names0
basicPrettyPrintNames0A = snd <$> basicNames0'
makeShadowedPrintNamesFromHQ :: Monad m => Set (HQ.HashQualified Name) -> Names0 -> Action' m v Names
makeShadowedPrintNamesFromHQ lexedHQs shadowing = do
rawHistoricalNames <- findHistoricalHQs lexedHQs
basicNames0 <- basicPrettyPrintNames0A
currentPath <- use currentPath
-- The basic names go into "current", but are shadowed by "shadowing".
-- They go again into "historical" as a hack that makes them available HQ-ed.
pure $
Names3.shadowing
shadowing
(Names basicNames0 (fixupNamesRelative currentPath rawHistoricalNames))
basicParseNames0, slurpResultNames0 :: Functor m => Action' m v Names0
basicParseNames0 = fst <$> basicNames0'
-- we check the file against everything in the current path
slurpResultNames0 = currentPathNames0
currentPathNames0 :: Functor m => Action' m v Names0
currentPathNames0 = do
currentPath' <- use currentPath
currentBranch' <- getAt currentPath'
pure $ Branch.toNames0 (Branch.head currentBranch')
-- implementation detail of basicParseNames0 and basicPrettyPrintNames0
basicNames0' :: Functor m => Action' m v (Names0, Names0)
basicNames0' = do
root' <- use root
currentPath' <- use currentPath
pure $ Backend.basicNames0' root' (Path.unabsolute currentPath')
data AddRunMainResult v
= NoTermWithThatName
| TermHasBadType (Type v Ann)
| RunMainSuccess (TypecheckedUnisonFile v Ann)
-- Adds a watch expression of the given name to the file, if
-- it would resolve to a TLD in the file. Returns the freshened
-- variable name and the new typechecked file.
--
-- Otherwise, returns `Nothing`.
addWatch
:: (Monad m, Var v)
=> String
-> Maybe (TypecheckedUnisonFile v Ann)
-> Action' m v (Maybe (v, TypecheckedUnisonFile v Ann))
addWatch _watchName Nothing = pure Nothing
addWatch watchName (Just uf) = do
let components = join $ UF.topLevelComponents uf
let mainComponent = filter ((\v -> Var.nameStr v == watchName) . view _1) components
case mainComponent of
[(v, tm, ty)] -> pure . pure $ let
v2 = Var.freshIn (Set.fromList [v]) v
a = ABT.annotation tm
in (v2, UF.typecheckedUnisonFile
(UF.dataDeclarationsId' uf)
(UF.effectDeclarationsId' uf)
(UF.topLevelComponents' uf)
(UF.watchComponents uf <> [(UF.RegularWatch, [(v2, Term.var a v, ty)])]))
_ -> addWatch watchName Nothing
-- Given a typechecked file with a main function called `mainName`
-- of the type `'{IO} ()`, adds an extra binding which
-- forces the `main` function.
--
-- If that function doesn't exist in the typechecked file, the
-- codebase is consulted.
addRunMain
:: (Monad m, Var v)
=> String
-> Maybe (TypecheckedUnisonFile v Ann)
-> Action' m v (AddRunMainResult v)
addRunMain mainName Nothing = do
parseNames0 <- basicParseNames0
let loadTypeOfTerm ref = eval $ LoadTypeOfTerm ref
mainType <- eval RuntimeMain
mainToFile <$>
MainTerm.getMainTerm loadTypeOfTerm parseNames0 mainName mainType
where
mainToFile (MainTerm.NotAFunctionName _) = NoTermWithThatName
mainToFile (MainTerm.NotFound _) = NoTermWithThatName
mainToFile (MainTerm.BadType _ ty) = maybe NoTermWithThatName TermHasBadType ty
mainToFile (MainTerm.Success hq tm typ) = RunMainSuccess $
let v = Var.named (HQ.toText hq) in
UF.typecheckedUnisonFile mempty mempty mempty [("main",[(v, tm, typ)])] -- mempty
addRunMain mainName (Just uf) = do
let components = join $ UF.topLevelComponents uf
let mainComponent = filter ((\v -> Var.nameStr v == mainName) . view _1) components
mainType <- eval RuntimeMain
case mainComponent of
[(v, tm, ty)] -> pure $ let
v2 = Var.freshIn (Set.fromList [v]) v
a = ABT.annotation tm
in
if Typechecker.isSubtype ty mainType then RunMainSuccess $ let
runMain = DD.forceTerm a a (Term.var a v)
in UF.typecheckedUnisonFile
(UF.dataDeclarationsId' uf)
(UF.effectDeclarationsId' uf)
(UF.topLevelComponents' uf)
(UF.watchComponents uf <> [("main", [(v2, runMain, mainType)])])
else TermHasBadType ty
_ -> addRunMain mainName Nothing
executePPE
:: (Var v, Monad m)
=> TypecheckedUnisonFile v a
-> Action' m v PPE.PrettyPrintEnv
executePPE unisonFile =
suffixifiedPPE =<< displayNames unisonFile
-- Produce a `Names` needed to display all the hashes used in the given file.
displayNames :: (Var v, Monad m)
=> TypecheckedUnisonFile v a
-> Action' m v Names
displayNames unisonFile =
-- voodoo
makeShadowedPrintNamesFromLabeled
(UF.termSignatureExternalLabeledDependencies unisonFile)
(UF.typecheckedToNames0 unisonFile)
diffHelper :: Monad m
=> Branch0 m
-> Branch0 m
-> Action' m v (PPE.PrettyPrintEnv, OBranchDiff.BranchDiffOutput v Ann)
diffHelper before after = do
hqLength <- eval CodebaseHashLength
diff <- eval . Eval $ BranchDiff.diff0 before after
names0 <- basicPrettyPrintNames0A
ppe <- PPE.suffixifiedPPE <$> prettyPrintEnvDecl (Names names0 mempty)
(ppe,) <$>
OBranchDiff.toOutput
loadTypeOfTerm
declOrBuiltin
hqLength
(Branch.toNames0 before)
(Branch.toNames0 after)
ppe
diff
loadTypeOfTerm :: Referent -> Action m i v (Maybe (Type v Ann))
loadTypeOfTerm (Referent.Ref r) = eval $ LoadTypeOfTerm r
loadTypeOfTerm (Referent.Con (Reference.DerivedId r) cid _) = do
decl <- eval $ LoadType r
case decl of
Just (either DD.toDataDecl id -> dd) -> pure $ DD.typeOfConstructor dd cid
Nothing -> pure Nothing
loadTypeOfTerm Referent.Con{} = error $
reportBug "924628772" "Attempt to load a type declaration which is a builtin!"
declOrBuiltin :: Reference -> Action m i v (Maybe (DD.DeclOrBuiltin v Ann))
declOrBuiltin r = case r of
Reference.Builtin{} ->
pure . fmap DD.Builtin $ Map.lookup r Builtin.builtinConstructorType
Reference.DerivedId id ->
fmap DD.Decl <$> eval (LoadType id)
| unisonweb/platform | parser-typechecker/src/Unison/Codebase/Editor/HandleInput.hs | mit | 132,775 | 0 | 42 | 40,681 | 36,988 | 18,359 | 18,629 | -1 | -1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# OPTIONS_GHC -Wno-orphans #-}
module Instances.Response where
import GHC.Generics
import Test.QuickCheck.Arbitrary.Generic
import Test.QuickCheck.Instances()
import Web.Facebook.Messenger.Types.Responses
import Instances.Request()
import Instances.Static()
deriving instance Generic MessageResponse
instance Arbitrary MessageResponse where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic SenderActionResponse
instance Arbitrary SenderActionResponse where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic SuccessResponse
instance Arbitrary SuccessResponse where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic MessageCreativeResponse
instance Arbitrary MessageCreativeResponse where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic BroadcastMessageResponse
instance Arbitrary BroadcastMessageResponse where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic GetProfileResponse
instance Arbitrary GetProfileResponse where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic ErrorResponse
instance Arbitrary ErrorResponse where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic ErrorDetails
instance Arbitrary ErrorDetails where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic AttachmentUploadResponse
instance Arbitrary AttachmentUploadResponse where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic UserProfileResponse
instance Arbitrary UserProfileResponse where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic MessengerCodeResponse
instance Arbitrary MessengerCodeResponse where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic AccountLinkingResponse
instance Arbitrary AccountLinkingResponse where
arbitrary = genericArbitrary
shrink = genericShrink
-- | Only taking 5 to speed up testing
deriving instance Generic CheckoutUpdateResponse
instance Arbitrary CheckoutUpdateResponse where
arbitrary = CheckoutUpdateResponse <$> fmap (take 5) arbitrary
shrink = genericShrink
deriving instance Generic Shipping
instance Arbitrary Shipping where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic ThreadControlResponse
instance Arbitrary ThreadControlResponse where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic ThreadOwnerResponse
instance Arbitrary ThreadOwnerResponse where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance {-# OVERLAPPABLE #-} Generic (DataResponse a)
instance {-# OVERLAPPABLE #-} (Arbitrary a, Generic a) => Arbitrary (DataResponse a) where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic DomainWhitelistingResponse
instance Arbitrary DomainWhitelistingResponse where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic SecondaryReceiverResponse
instance Arbitrary SecondaryReceiverResponse where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic SecondaryReceiverElement
instance Arbitrary SecondaryReceiverElement where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic TagResponse
instance Arbitrary TagResponse where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic TagElement
instance Arbitrary TagElement where
arbitrary = genericArbitrary
shrink = genericShrink
| Vlix/facebookmessenger | test/Instances/Response.hs | mit | 3,784 | 0 | 9 | 485 | 673 | 371 | 302 | 100 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UndecidableInstances #-}
module WeiXin.PublicPlatform.Conversation.Yesod where
-- {{{1 imports
import ClassyPrelude.Yesod hiding (Proxy, proxy)
import qualified Control.Exception.Safe as ExcSafe
import Control.Monad.Logger
import Data.Proxy
import qualified Data.ByteString.Lazy as LB
import qualified Data.Text as T
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as A
import qualified Data.Conduit.List as CL
import Control.Monad.Except hiding (forM_)
import Control.Monad.Trans.Maybe
import Data.Time (NominalDiffTime, addUTCTime)
import Data.List.NonEmpty (NonEmpty(..))
import WeiXin.PublicPlatform.Conversation
import WeiXin.PublicPlatform.Class
import WeiXin.PublicPlatform.Yesod.Model
import WeiXin.PublicPlatform.InMsgHandler
import WeiXin.PublicPlatform.WS
import WeiXin.PublicPlatform.Utils
import WeiXin.PublicPlatform.Media
-- }}}1
saveWxppTalkState :: forall m a r. ( MonadLoggerIO m, ToJSON a, WxTalkerState r m a)
=> WxppDbRunner
-> (a -> Text)
-> WxppTalkStateId
-> a
-> WxTalkerMonad r m ()
saveWxppTalkState db_runner get_state_type state_id x = mkWxTalkerMonad $ \env -> runExceptT $ do
now <- liftIO getCurrentTime
done <- ExceptT $ flip runWxTalkerMonad env $ wxTalkIfDone x
log_func <- askLoggerIO
liftIO $ flip runLoggingT log_func $ runWxppDB db_runner $ do
update state_id
[ WxppTalkStateTyp =. get_state_type x
, WxppTalkStateJson =. (LB.toStrict $ A.encode x)
, WxppTalkStateDone =. done
, WxppTalkStateUpdatedTime =. now
]
abortCurrentWxppTalkState :: forall m r. (MonadLoggerIO m)
=> WxppDbRunner
-> r
-> (Text -> Maybe (WxppTalkerAbortStateEntry r m))
-- ^ lookup WxppTalkerAbortStateEntry by state's type string
-> WxTalkAbortInitiator
-> WxppAppID
-> WxppOpenID
-> m (Maybe [WxppOutMsg])
-- {{{1
abortCurrentWxppTalkState db_runner common_env lookup_se initiator app_id open_id = do
log_func <- askLoggerIO
m_rec <- liftIO $ flip runLoggingT log_func $ runWxppDB db_runner $ loadWxppTalkStateCurrent app_id open_id
fmap join $ forM m_rec $ \ e_rec@(Entity rec_id rec) -> do
if not $ wxppTalkStateAborted rec || wxppTalkStateDone rec
then fmap Just $ do
out_msgs <- wxppExecTalkAbortForRecord common_env lookup_se initiator e_rec
liftIO $ flip runLoggingT log_func $ runWxppDB db_runner $ update rec_id [ WxppTalkStateAborted =. True ]
return out_msgs
else return Nothing
-- }}}1
wxppExecTalkAbortForRecord :: (MonadLogger m)
=> r
-> (Text -> Maybe (WxppTalkerAbortStateEntry r m))
-- ^ lookup WxppTalkerAbortStateEntry by state's type string
-> WxTalkAbortInitiator
-> Entity WxppTalkState
-> m [WxppOutMsg]
wxppExecTalkAbortForRecord common_env lookup_se initiator e_rec@(Entity rec_id rec) = do
-- {{{1
case lookup_se typ_str of
Nothing -> do
$logWarnS wxppLogSource $ "could not find state entry for record #" <> toPathPiece rec_id
<> ", type string was: " <> typ_str
return []
Just (WxppTalkerAbortStateEntry sp ext_env) -> do
err_or_st <- parseWxppTalkStateFromRecord sp e_rec
case err_or_st of
Left err -> do
$logErrorS wxppLogSource $ "parseWxppTalkStateFromRecord failed for record #" <> toPathPiece rec_id
<> ": " <> fromString err
return []
Right Nothing -> return []
Right (Just st) -> do
err_or_outmsgs <- flip runWxTalkerMonad (common_env, ext_env) $ wxTalkAbort st initiator
case err_or_outmsgs of
Left err -> do
$logErrorS wxppLogSource $ "wxTalkAbort failed for record #" <> toPathPiece rec_id
<> ": " <> fromString err
return []
Right x -> return x
where typ_str = wxppTalkStateTyp rec
-- }}}1
newWxppTalkState :: forall m a.
( MonadIO m, ToJSON a) =>
(a -> Text)
-> WxppAppID
-> WxppOpenID
-> a
-> ReaderT SqlBackend m WxppTalkStateId
newWxppTalkState get_state_type app_id open_id x = do
now <- liftIO getCurrentTime
insert $ WxppTalkState
app_id
open_id
(get_state_type x)
(LB.toStrict $ A.encode x)
False
False
now
now
newWxppTalkState' :: forall m a.
( MonadIO m, HasStateType a, ToJSON a ) =>
WxppAppID
-> WxppOpenID
-> a
-> ReaderT SqlBackend m WxppTalkStateId
newWxppTalkState' = newWxppTalkState getStateType'
saveAnyWxppTalkState :: forall m r a. (MonadLoggerIO m, ToJSON a, HasStateType a, WxTalkerState r m a)
=> WxppDbRunner
-> WxppTalkStateId
-> a
-> WxTalkerMonad r m ()
saveAnyWxppTalkState db_runner = saveWxppTalkState db_runner getStateType'
loadAnyWxppTalkState :: forall m a. (MonadLoggerIO m, FromJSON a, HasStateType a)
=> WxppDbRunner
-> Proxy a
-> WxppTalkStateId
-> m (Either String (Maybe a))
loadAnyWxppTalkState db_runner proxy state_id = runExceptT $ runMaybeT $ do
log_func <- askLoggerIO
rec <- MaybeT $ liftIO $ flip runLoggingT log_func $ runWxppDB db_runner $ get state_id
MaybeT $ ExceptT $ parseWxppTalkStateFromRecord proxy $ Entity state_id rec
parseWxppTalkStateFromRecord :: forall m a.
( MonadLogger m, HasStateType a, FromJSON a ) =>
Proxy a
-> Entity WxppTalkState
-> m (Either String (Maybe a))
parseWxppTalkStateFromRecord proxy (Entity rec_id rec) = runExceptT $ runMaybeT $ do
when ( wxppTalkStateAborted rec ) mzero
let state_type = wxppTalkStateTyp rec
when (state_type /= getStateType proxy) mzero
case A.eitherDecodeStrict (wxppTalkStateJson rec) of
Left err -> do
let err_msg = "cannot decode JSON ByteString: WxppTalkState #"
<> toPathPiece rec_id
<> ", " <> fromString err
$logErrorS wxppLogSource err_msg
throwError $ T.unpack err_msg
Right jv -> do
case A.parseEither parseJSON jv of
Left jerr -> do
let err_msg = "cannot decode JSON Value: WxppTalkState #"
<> toPathPiece rec_id
<> ", " <> fromString jerr
$logErrorS wxppLogSource err_msg
throwError $ T.unpack err_msg
Right x -> return x
{-
saveSomeWxppTalkState :: forall m r.
( MonadIO m ) =>
WxppTalkStateId
-> SomeWxppTalkState r (ReaderT SqlBackend m)
-> WxTalkerMonad r (ReaderT SqlBackend m) ()
saveSomeWxppTalkState = saveWxppTalkState getStateTypeOfSomeWxppTalkState
--}
loadWxppTalkStateCurrent :: forall m. (MonadIO m)
=> WxppAppID
-> WxppOpenID
-> ReaderT WxppDbBackend m (Maybe (Entity WxppTalkState))
loadWxppTalkStateCurrent app_id open_id = do
selectFirst [ WxppTalkStateOpenId ==. open_id
, WxppTalkStateAppId ==. app_id
]
[ Desc WxppTalkStateId ]
-- | used with loopRunBgJob
cleanUpTimedOutWxTalk :: (MonadLoggerIO m, HasWxppAppID r)
=> WxppDbRunner
-> r
-> [WxppTalkerAbortStateEntry r m]
-- ^ lookup WxppTalkerAbortStateEntry by state's type string
-> (NominalDiffTime, NominalDiffTime)
-> (WxppAppID -> WxppOpenID -> [WxppOutMsg] -> m ())
-> m ()
cleanUpTimedOutWxTalk db_runner common_env entries ttls on_abort_talk = do
-- {{{1
let timeout_ttl = uncurry min ttls
chk_ttl = uncurry max ttls
now <- liftIO getCurrentTime
let dt = addUTCTime (negate $ abs timeout_ttl) now
-- 为保证下面的 SQL 不会从一个太大集合中查找
let too_old = flip addUTCTime now $ negate $ chk_ttl
log_func <- askLoggerIO
m_new_records <- liftIO $ runResourceT $ flip runLoggingT log_func $ runWxppDB db_runner $ do
infos <- runConduit $
selectSource
[ WxppTalkStateDone ==. False
, WxppTalkStateAborted ==. False
, WxppTalkStateAppId ==. app_id
, WxppTalkStateUpdatedTime <. dt
, WxppTalkStateUpdatedTime >. too_old
]
[]
.| CL.map (id &&& (wxppTalkStateOpenId . entityVal))
.| CL.consume
-- 一次性用一个 SQL 更新
updateWhere
[ WxppTalkStateId <-. map (entityKey . fst) infos ]
[ WxppTalkStateAborted =. True ]
-- 然后通告用户
forM infos $ \(e_rec@(Entity rec_id _), open_id) -> do
m_new <- selectFirst
[ WxppTalkStateAppId ==. app_id
, WxppTalkStateOpenId ==. open_id
, WxppTalkStateId >. rec_id
]
[]
return (e_rec, open_id, m_new)
forM_ m_new_records $ \ (e_rec, open_id, m_new) -> do
-- 仅当那个用户没有更新的会话已建立时才发通告給用户
when (isNothing m_new) $ do
out_msgs <- wxppExecTalkAbortForRecord common_env lookup_se WxTalkAbortBySys e_rec
on_abort_talk app_id open_id out_msgs
where
app_id = getWxppAppID common_env
match_entry typ_str (WxppTalkerAbortStateEntry p _) = getStateType p == typ_str
lookup_se = \ x -> find (match_entry x) entries
-- }}}1
-- | 仅是为了减少代码重复而设
-- 由于这个对象可以构造其它需求稍低一些的 WxppTalkerStateEntry WxppTalkerFreshStateEntry WxppTalkerAbortStateEntry
data WxppTalkerFullStateEntry r0 m = forall s r.
(Eq s, ToJSON s, FromJSON s, HasStateType s
, WxTalkerState (r0, r) m s
, WxTalkerDoneAction (r0, r) m s
, WxTalkerAbortAction (r0, r) m s
, WxTalkerFreshState (r0, r) m s
)
=> WxppTalkerFullStateEntry (Proxy s) r
-- | 作为下面 WxppTalkHandlerGeneral 的参数用
-- r0 是 WxppTalkHandlerGeneral 提供的全局环境
data WxppTalkerStateEntry r0 m = forall s r.
(Eq s, ToJSON s, FromJSON s, HasStateType s
, WxTalkerState (r0, r) m s
, WxTalkerDoneAction (r0, r) m s
) =>
WxppTalkerStateEntry (Proxy s) r
wxppTalkerStateEntryFromFull :: WxppTalkerFullStateEntry r m -> WxppTalkerStateEntry r m
wxppTalkerStateEntryFromFull (WxppTalkerFullStateEntry p x) = WxppTalkerStateEntry p x
-- | 这个通用的对话处理器
-- 所有输入都应经过这个处理器处理一次
-- 如果在对话中,则有相应的处理
-- 不在对话中,则相当于空操作
data WxppTalkHandlerGeneral r m = WxppTalkHandlerGeneral
{ wxppTalkDbRunner :: WxppDbRunner -- ^ to run db functions
, wxppTalkDbReadOnlyEnv :: r -- ^ read only data/environment
, wxppTalkDStateEntry :: [WxppTalkerStateEntry r m ]
}
instance JsonConfigable (WxppTalkHandlerGeneral r m) where
type JsonConfigableUnconfigData (WxppTalkHandlerGeneral r m) =
(WxppDbRunner, r, [WxppTalkerStateEntry r m])
isNameOfInMsgHandler _ x = x == "any-talk"
parseWithExtraData _ (f1, f2, f3) _obj = return $ WxppTalkHandlerGeneral f1 f2 f3
type instance WxppInMsgProcessResult (WxppTalkHandlerGeneral r m) = WxppInMsgHandlerResult
instance (WxppApiMonad env m, MonadLoggerIO m) =>
IsWxppInMsgProcessor m (WxppTalkHandlerGeneral r m)
where
processInMsg (WxppTalkHandlerGeneral db_runner env entries) _cache app_info _bs ime = do
log_func <- askLoggerIO
runExceptT $ do
m_state_rec <- mapExceptT (liftIO . flip runLoggingT log_func . runWxppDB db_runner) $ do
lift $ loadWxppTalkStateCurrent app_id open_id
case m_state_rec of
Nothing -> return []
Just e_state_rec@(Entity state_id _) -> do
let mk :: WxppTalkerStateEntry r m -> MaybeT (ExceptT String m) WxppInMsgHandlerResult
mk (WxppTalkerStateEntry state_proxy rx) = MaybeT $ ExceptT $
processInMsgByWxTalk
db_runner
state_proxy
(env, rx)
e_state_rec
ime
m_result <- runMaybeT $ asum $ map mk entries
case m_result of
Nothing -> do
$logWarnS wxppLogSource $ "no handler could handle talk state: state_id="
<> toPathPiece state_id
return []
Just x -> return x
{-
m_state_info <- ExceptT $ loadWxppTalkStateCurrent open_id
case m_state_info of
Nothing -> return []
Just (db_id, _ :: SomeWxppTalkState CommonTalkEnv (ReaderT SqlBackend m) ) -> do
ExceptT $ processInMsgByWxTalk
(mk_env $ wxppInFromUserName ime)
db_id
ime
--}
where
app_id = procAppIdInfoReceiverId app_info
open_id = wxppInFromUserName ime
-- | 消息处理器:调用后会新建一个会话
-- 对话状态由类型参数 s 指定
-- 因为它本身不带条件,所以常常配合条件判断器使用
-- 但也条件判断也可以在 wxTalkInitiate 里实现
data WxppTalkInitiator r s = WxppTalkInitiator
{ wxppTalkInitDbRunner :: WxppDbRunner
, wxppTalkInitEnv :: r -- ^ 与对话种类无关的环境值
, wxppTalkInitStateEnv :: (WxppTalkStateExtraEnv s) -- ^ 对话特定相关的环境值
}
instance HasStateType s => JsonConfigable (WxppTalkInitiator r s) where
type JsonConfigableUnconfigData (WxppTalkInitiator r s) =
(WxppDbRunner, r, WxppTalkStateExtraEnv s)
isNameOfInMsgHandler _ x =
x == "initiate-talk:" <> getStateType (Proxy :: Proxy s)
parseWithExtraData _ (f1, f2, f3) _obj = return $ WxppTalkInitiator f1 f2 f3
type instance WxppInMsgProcessResult (WxppTalkInitiator r s) = WxppInMsgHandlerResult
instance
( HasStateType s, ToJSON s, FromJSON s, Eq s
, HasWxppAppID r
, WxTalkerDoneAction (r, r2) m s
, WxTalkerState (r, r2) m s
, WxTalkerFreshState (r, r2) m s
, r2 ~ WxppTalkStateExtraEnv s
, MonadLoggerIO m
) =>
IsWxppInMsgProcessor m (WxppTalkInitiator r s)
where
processInMsg (WxppTalkInitiator db_runner env extra_env) _cache _app_info _bs ime =
runExceptT $ do
let from_open_id = wxppInFromUserName ime
app_id = getWxppAppID env
log_func <- askLoggerIO
msgs_or_state <- flip runWxTalkerMonadE (env, extra_env) $ wxTalkInitiate ime
case msgs_or_state of
Left msgs -> do
-- cannot create conversation
return $ map ((False,) . Just) $ msgs
Right (state :: s) -> do
-- state_id <- lift $ newWxppTalkState' app_id from_open_id state
e_state <- liftIO . flip runLoggingT log_func . runWxppDB db_runner $ do
newWxppTalkState' app_id from_open_id state
ExceptT $ processJustInitedWxTalk db_runner
(Proxy :: Proxy s) (env, extra_env) e_state
-- | 与 WxppTalkerStateEntry 的区别只是多了 WxTalkerFreshState 的要求
data WxppTalkerFreshStateEntry r0 m = forall s r.
(Eq s, ToJSON s, FromJSON s, HasStateType s
, WxTalkerState (r0, r) m s
, WxTalkerDoneAction (r0, r) m s
, WxTalkerFreshState (r0, r) m s
) =>
WxppTalkerFreshStateEntry (Proxy s) r
wxppTalkerFreshStateEntryFromFull :: WxppTalkerFullStateEntry r m -> WxppTalkerFreshStateEntry r m
wxppTalkerFreshStateEntryFromFull (WxppTalkerFullStateEntry p x) = WxppTalkerFreshStateEntry p x
wxppTalkerFreshStateEntryToStateEntry :: WxppTalkerFreshStateEntry r m -> WxppTalkerStateEntry r m
wxppTalkerFreshStateEntryToStateEntry (WxppTalkerFreshStateEntry p x) = WxppTalkerStateEntry p x
-- | 消息处理器:调用后会新建一个会话
-- 它接受的 event key 必须是以下的形式: initiate-talk:XXX
-- 其中 XXX 是某个对话状态的 getStateType 内容
-- 与 WxppTalkInitiator 类似,不同的是:
-- * WxppTalkInitiator 只能初始化确定的某种对话,
-- WxppTalkEvtKeyInitiator则在一组可能选择里选择一个
-- * WxppTalkInitiator 本身不带判断条件,
-- WxppTalkEvtKeyInitiator 则根据 event key 内容选择合适的对话类型
data WxppTalkEvtKeyInitiator r m = WxppTalkEvtKeyInitiator
{ wxppTalkEvtKeyInitDbRunner :: WxppDbRunner
, wxppTalkEvtKeyInitEventEnv :: r -- ^ 与对话种类无关的环境值
, wxppTalkEvtKeyInitStateEntry :: [WxppTalkerFreshStateEntry r m]
}
instance JsonConfigable (WxppTalkEvtKeyInitiator r m) where
type JsonConfigableUnconfigData (WxppTalkEvtKeyInitiator r m) =
(WxppDbRunner, r, [WxppTalkerFreshStateEntry r m])
isNameOfInMsgHandler _ x = x == "evtkey-initiate-talk"
parseWithExtraData _ (f1, f2, f3) _obj = return $ WxppTalkEvtKeyInitiator f1 f2 f3
type instance WxppInMsgProcessResult (WxppTalkEvtKeyInitiator r m) = WxppInMsgHandlerResult
instance
( HasWxppAppID r
, MonadIO m, MonadLoggerIO m
) =>
IsWxppInMsgProcessor m (WxppTalkEvtKeyInitiator r m)
where
processInMsg (WxppTalkEvtKeyInitiator db_runner env entries) _cache _app_info _bs ime =
runExceptT $ do
case wxppInMessage ime of
WxppInMsgEvent (WxppEvtClickItem evtkey) -> do
case T.stripPrefix "initiate-talk:" evtkey of
Nothing -> return []
Just st_type -> do_work st_type
_ -> return []
where
do_work st_type = do
let match_st_type (WxppTalkerFreshStateEntry px _) = getStateType px == st_type
log_func <- askLoggerIO
case find match_st_type entries of
Nothing -> do
$logWarnS wxppLogSource $
"Failed to initiate talk from menu click,"
<> " because talk state type is unknown to me: "
<> st_type
return []
Just (WxppTalkerFreshStateEntry st_px extra_env) -> do
let from_open_id = wxppInFromUserName ime
app_id = getWxppAppID env
msgs_or_state <- flip runWxTalkerMonadE (env, extra_env) $
wxTalkInitiateBlank st_px from_open_id
case msgs_or_state of
Left msgs -> do
-- cannot create conversation
$logErrorS wxppLogSource $
"Couldn't create talk, providing error output messages: " <> tshow msgs
return $ map ((False,) . Just) $ msgs
Right state -> do
-- state_id <- lift $ newWxppTalkState' app_id from_open_id state
e_state <- liftIO $ flip runLoggingT log_func $ runWxppDB db_runner $
newWxppTalkState' app_id from_open_id state
ExceptT $ processJustInitedWxTalk db_runner st_px (env, extra_env) e_state
-- | 与 WxppTalkerStateEntry 的区别只是多了 WxTalkerFreshState 的要求
data WxppTalkerAbortStateEntry r0 m = forall s r.
(Eq s, ToJSON s, FromJSON s, HasStateType s
, WxTalkerAbortAction (r0, r) m s
)
=> WxppTalkerAbortStateEntry (Proxy s) r
wxppTalkerAbortStateEntryFromFull :: WxppTalkerFullStateEntry r m -> WxppTalkerAbortStateEntry r m
wxppTalkerAbortStateEntryFromFull (WxppTalkerFullStateEntry p x) = WxppTalkerAbortStateEntry p x
-- | 消息处理器:调用后会无条件结束当前会话
data WxppTalkTerminator r m = WxppTalkTerminator
{ wxppTalkTermDir :: (NonEmpty FilePath) -- ^ out-msg dir path
, wxppTalkTermDbRunner :: WxppDbRunner
, wxppTalkTermCommonEnv :: r -- ^ read only data/environment
, wxppTalkTermStateEntries :: [ WxppTalkerAbortStateEntry r m ]
-- , wxppTalkTermPrimary :: Bool -- ^ if primary
-- 一但这个处理器被调用,则很可能已发生数据库的实质修改
-- 这里再指定是否 primary 已无太大意义,应总是理解为primary响应
, wxppTalkTermOurMsg :: WxppOutMsgLoader -- ^ 打算回复用户的消息
}
instance JsonConfigable (WxppTalkTerminator r m) where
type JsonConfigableUnconfigData (WxppTalkTerminator r m) = (NonEmpty FilePath, WxppDbRunner, r, [ WxppTalkerAbortStateEntry r m ])
isNameOfInMsgHandler _ x = x == "terminate-talk"
parseWithExtraData _ (f1, f2, f3, f4) obj =
WxppTalkTerminator f1 f2 f3 f4
-- <$> (obj .:? "primary" .!= True)
<$> parseWxppOutMsgLoader obj
type instance WxppInMsgProcessResult (WxppTalkTerminator r m) = WxppInMsgHandlerResult
instance (WxppApiMonad env m, MonadLoggerIO m, ExcSafe.MonadCatch m) =>
IsWxppInMsgProcessor m (WxppTalkTerminator r m) where
processInMsg (WxppTalkTerminator msg_dirs db_runner common_env entries get_outmsg) cache app_info _bs ime =
-- {{{1
runExceptT $ do
let from_open_id = wxppInFromUserName ime
m_out_msgs_abort <-
lift $ abortCurrentWxppTalkState db_runner common_env (\ x -> find (match_entry x) entries) WxTalkAbortByUser app_id from_open_id
liftM (fromMaybe []) $ forM m_out_msgs_abort $ \ out_msgs_abort -> do
let get_atk = (tryWxppWsResultE "getting access token" $ liftIO $
wxppCacheGetAccessToken cache app_id)
>>= maybe (throwError $ "no access token available") (return . fst)
outmsg_l <- ExceptT $ runDelayedYamlLoaderL msg_dirs get_outmsg
out_msg <- tryWxppWsResultE "fromWxppOutMsgL" $
tryYamlExcE $ fromWxppOutMsgL msg_dirs cache get_atk outmsg_l
return $ map ((primary,) . Just) $ out_msgs_abort <> [ out_msg ]
where
primary = True
app_id = procAppIdInfoReceiverId app_info
match_entry typ_str (WxppTalkerAbortStateEntry p _) = getStateType p == typ_str
-- }}}1
processInMsgByWxTalk :: (HasStateType s, Eq s, FromJSON s, ToJSON s
, MonadLoggerIO m
-- , WxppApiMonad env m
, WxTalkerState r m s
, WxTalkerDoneAction r m s
)
=> WxppDbRunner
-> Proxy s
-> r
-> Entity WxppTalkState
-> WxppInMsgEntity
-> m (Either String (Maybe WxppInMsgHandlerResult))
processInMsgByWxTalk db_runner state_proxy env (Entity state_id state_rec) ime = do
let state_type = wxppTalkStateTyp state_rec
if (state_type /= getStateType state_proxy)
then return $ Right Nothing
else do
-- 处理会话时,状态未必会更新
-- 更新时间,以表明这个会话不是 idle 的
now <- liftIO getCurrentTime
log_func <- askLoggerIO
liftIO $ flip runLoggingT log_func $ runWxppDB db_runner $
update state_id [ WxppTalkStateUpdatedTime =. now ]
liftM (fmap Just) $
wxTalkerInputProcessInMsg
get_st set_st
env
(Just ime)
where
set_st _open_id = saveAnyWxppTalkState db_runner state_id
get_st _open_id = mkWxTalkerMonad $ \_ -> loadAnyWxppTalkState db_runner state_proxy state_id
processJustInitedWxTalk :: ( MonadLoggerIO m
, Eq s, FromJSON s, ToJSON s, HasStateType s
, WxTalkerDoneAction r m s
, WxTalkerState r m s
)
=> WxppDbRunner
-> Proxy s
-> r
-> WxppTalkStateId
-> m (Either String WxppInMsgHandlerResult)
processJustInitedWxTalk db_runner state_proxy env state_id = runExceptT $ do
ExceptT $ wxTalkerInputProcessJustInited get_st set_st env
where set_st = saveAnyWxppTalkState db_runner state_id
get_st = mkWxTalkerMonad $ \_ -> loadAnyWxppTalkState db_runner state_proxy state_id
-- vim: set foldmethod=marker:
| yoo-e/weixin-mp-sdk | WeiXin/PublicPlatform/Conversation/Yesod.hs | mit | 26,066 | 1 | 27 | 8,515 | 5,391 | 2,736 | 2,655 | -1 | -1 |
module Exercise where
perfect :: Int -> Bool
perfect n = sum (delers n) == n
delers :: Int -> [Int]
delers n = [x | x <- [1..n-1], n `mod` x == 0]
perfectTill :: Int -> [Int]
perfectTill n = filter perfect [1..n]
| tcoenraad/functioneel-programmeren | 2012/opg1a.hs | mit | 217 | 0 | 9 | 49 | 118 | 64 | 54 | 7 | 1 |
module Graphics.UI.Gtk.WebKit.WebView.Concrete (
titleChanged,
resourceRequestStarting
) where
import Graphics.UI.Gtk.WebKit.NetworkRequest (NetworkRequest)
import Graphics.UI.Gtk.WebKit.NetworkResponse (NetworkResponse)
import Graphics.UI.Gtk.WebKit.WebFrame (WebFrame)
import Graphics.UI.Gtk.WebKit.WebResource (WebResource)
import Graphics.UI.Gtk.WebKit.WebView (WebView)
import qualified Graphics.UI.Gtk.WebKit.WebView as Signal (titleChanged, resourceRequestStarting)
import System.Glib.Signals (Signal)
titleChanged :: Signal WebView (WebFrame -> String -> IO ())
titleChanged = Signal.titleChanged
resourceRequestStarting :: Signal WebView (WebFrame -> WebResource -> Maybe NetworkRequest -> Maybe NetworkResponse -> IO ())
resourceRequestStarting = Signal.resourceRequestStarting
| fmap/hwb | src/Graphics/UI/Gtk/WebKit/WebView/Concrete.hs | mit | 795 | 0 | 12 | 71 | 193 | 119 | 74 | 14 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeApplications #-}
module Examples.Rpc.EchoServer (main) where
import Network.Simple.TCP (serve)
import Capnp.New (SomeServer, def, defaultLimit, export, handleParsed)
import Capnp.Rpc (ConnConfig(..), handleConn, socketTransport, toClient)
import Capnp.Gen.Echo.New
data MyEchoServer = MyEchoServer
instance SomeServer MyEchoServer
instance Echo'server_ MyEchoServer where
echo'echo MyEchoServer = handleParsed $ \params ->
pure def { reply = query params }
main :: IO ()
main = serve "localhost" "4000" $ \(sock, _addr) ->
handleConn (socketTransport sock defaultLimit) def
{ debugMode = True
, getBootstrap = \sup -> Just . toClient <$> export @Echo sup MyEchoServer
}
| zenhack/haskell-capnp | examples/lib/Examples/Rpc/EchoServer.hs | mit | 817 | 0 | 13 | 152 | 215 | 124 | 91 | 18 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Concurrent (threadDelay)
import Control.Monad (forever)
import Control.Monad.IO.Class (liftIO)
import Data.Text (Text)
import System.Environment (getArgs)
import System.Random (randomRIO)
import HBar
main :: IO ()
main = do
selection <- head <$> getArgs
case selection of
"fruit" -> runFruitPopulation
"gop" -> runGopPrimarySimulation
_ -> print ("fruit or gop" :: String)
runFruitPopulation :: IO ()
runFruitPopulation =
runHBar "http://localhost:8888" "Fruit popularity by country"
fruitPopulation
fruitPopulation :: HBar ()
fruitPopulation = do
addItemWithCategory "Apple" "Sweden" 88
addItemWithCategory "Orange" "Sweden" 110
addItemWithCategory "Banana" "Sweden" 23
addItemWithCategory "Apple" "Norway" 67
addItemWithCategory "Orange" "Norway" 15
addItemWithCategory "Banana" "Norway" 90
addItemWithCategory "Pineapple" "Denmark" 45
addItemWithCategory "Apple" "Denmark" 19
addItemWithCategory "Orange" "Denmark" 46
addItemWithCategory "Banana" "Denmark" 8
commit
runGopPrimarySimulation :: IO ()
runGopPrimarySimulation =
runHBar "http://localhost:8888" "GOP primary poll's simulation"
gopPrimarySimulation
gopPrimarySimulation :: HBar ()
gopPrimarySimulation =
forever $ do
state <- selectFrom states
candidate <- selectFrom candidates
vote <- fromIntegral <$> selectFrom votes
addItemWithCategory candidate state vote
commit
wait
selectFrom :: [a] -> HBar a
selectFrom xs = do
ind <- liftIO $ randomRIO (0, length xs - 1)
return $ xs !! ind
states :: [Text]
states = [ "Arizona", "Ohio", "Oregon", "Florida", "Texas"
, "Nevada", "North Carolina", "Washington" ]
candidates :: [Text]
candidates = [ "Trump", "Cruz", "Kasich", "Fiorina"
, "Rubio", "Bush", "Carson" ]
votes :: [Int]
votes = [1..25]
wait :: HBar ()
wait = liftIO $ threadDelay 1000000
| kosmoskatten/plotly-hs | plotly-hbar-demo/src/Main.hs | mit | 1,971 | 0 | 12 | 379 | 533 | 273 | 260 | 60 | 3 |
-- Statistics for an Athletic Association
-- http://www.codewars.com/kata/55b3425df71c1201a800009c/
module Codewars.G964.Stat where
import Data.List (sort, intercalate)
import Data.List.Split (split, dropDelims, oneOf)
import Text.Printf (printf)
stat :: String -> String
stat "" = ""
stat results = present . map ((sum . zipWith (*) [3600, 60, 1]) . map (\s -> read s :: Double) . split (dropDelims $ oneOf "|")) . split (dropDelims $ oneOf ",") $ results
where present xs = "Range: " ++ (formatT . range $ xs) ++ " Average: " ++ (formatT . mean $ xs) ++ " Median: " ++ (formatT . median $ xs)
range xs = maximum xs - minimum xs
mean xs = sum xs / (fromIntegral . length $ xs)
median xs | odd n = head $ drop (n `div` 2) xs'
| even n = mean $ take 2 $ drop i xs'
where i = (length xs' `div` 2) - 1
xs' = sort xs
n = length xs
formatT x = intercalate "|" . map (printf "%02d") $ [h, m, s]
where t = truncate x :: Int
h = t `div` 3600
m = (t `div` 60) `mod` 60
s = t `mod` 60
| gafiatulin/codewars | src/6 kyu/Stat.hs | mit | 1,191 | 0 | 15 | 413 | 467 | 250 | 217 | 20 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module System.Etc.Resolver.Cli.CommandTest where
import RIO
import qualified RIO.Set as Set
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit (assertBool, assertEqual, assertFailure, testCase)
import System.Etc
with_command_option_tests :: TestTree
with_command_option_tests = testGroup
"option input"
[ testCase "entry accepts short" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"string\""
, " , \"cli\": {"
, " \"input\": \"option\""
, " , \"short\": \"g\""
, " , \"long\": \"greeting\""
, " , \"commands\": [\"test\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
(cmd, config) <- resolveCommandCliPure spec "program" ["test", "-g", "hello cli"]
assertEqual "invalid command output" "test" cmd
case getAllConfigSources ["greeting"] config of
Nothing -> assertFailure ("expecting to get entries for greeting\n" <> show config)
Just aSet -> assertBool ("expecting to see entry from env; got " <> show aSet)
(Set.member (Cli "hello cli") aSet)
, testCase "entry accepts long" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"string\""
, " , \"cli\": {"
, " \"input\": \"option\""
, " , \"short\": \"g\""
, " , \"long\": \"greeting\""
, " , \"commands\": [\"test\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
(cmd, config) <- resolveCommandCliPure spec
"program"
["test", "--greeting", "hello cli"]
assertEqual "invalid command output" "test" cmd
case getAllConfigSources ["greeting"] config of
Nothing -> assertFailure ("expecting to get entries for greeting\n" <> show config)
Just aSet -> assertBool ("expecting to see entry from env; got " <> show aSet)
(Set.member (Cli "hello cli") aSet)
, testCase "entry gets validated with a type" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"number\""
, " , \"cli\": {"
, " \"input\": \"option\""
, " , \"short\": \"g\""
, " , \"long\": \"greeting\""
, " , \"commands\": [\"test\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
case resolveCommandCliPure spec "program" ["test", "--greeting", "hello cli"] of
Left err -> case fromException err of
Just CliEvalExited{} -> return ()
_ -> assertFailure ("Expecting type validation to work on cli; got " <> show err)
Right _ -> assertFailure "Expecting type validation to work on cli"
, testCase "entry with required false does not barf" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test1\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"string\""
, " , \"cli\": {"
, " \"input\": \"option\""
, " , \"short\": \"g\""
, " , \"long\": \"greeting\""
, " , \"required\": false"
, " , \"commands\": [\"test1\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
(cmd, config) <- resolveCommandCliPure spec "program" ["test1"]
assertEqual "invalid command output" "test1" cmd
case getConfigValue ["greeting"] config of
Just aSet ->
assertFailure ("expecting to have no entry for greeting; got\n" <> show aSet)
(_ :: Maybe ()) -> return ()
, testCase "entry with required fails when option not given" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"string\""
, " , \"cli\": {"
, " \"input\": \"option\""
, " , \"short\": \"g\""
, " , \"long\": \"greeting\""
, " , \"required\": true"
, " , \"commands\": [\"test\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
case resolveCommandCliPure spec "program" ["test"] of
Left err -> case fromException err of
Just CliEvalExited{} -> return ()
_ ->
assertFailure ("Expecting required validation to work on cli; got " <> show err)
Right _ -> assertFailure "Expecting required option to fail cli resolving"
]
with_command_argument_tests :: TestTree
with_command_argument_tests = testGroup
"argument input"
[ testCase "entry gets validated with a type" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"number\""
, " , \"cli\": {"
, " \"input\": \"argument\""
, " , \"metavar\": \"GREETING\""
, " , \"commands\": [\"test\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
case resolveCommandCliPure spec "program" ["test", "hello cli"] of
Left err -> case fromException err of
Just CliEvalExited{} -> return ()
_ -> assertFailure ("Expecting type validation to work on cli; got " <> show err)
Right _ -> assertFailure "Expecting type validation to work on cli"
, testCase "entry with required false does not barf" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"string\""
, " , \"cli\": {"
, " \"input\": \"argument\""
, " , \"metavar\": \"GREETING\""
, " , \"required\": false"
, " , \"commands\": [\"test\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
(cmd, config) <- resolveCommandCliPure spec "program" ["test"]
assertEqual "invalid command output" "test" cmd
case getConfigValue ["greeting"] config of
(Nothing :: Maybe ()) -> return ()
Just aSet ->
assertFailure ("expecting to have no entry for greeting; got\n" <> show aSet)
, testCase "entry with required fails when argument not given" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"string\""
, " , \"cli\": {"
, " \"input\": \"argument\""
, " , \"metavar\": \"GREETING\""
, " , \"required\": true"
, " , \"commands\": [\"test\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
case resolveCommandCliPure spec "program" ["test"] of
Left err -> case fromException err of
Just CliEvalExited{} -> return ()
_ ->
assertFailure ("Expecting required validation to work on cli; got " <> show err)
Right _ -> assertFailure "Expecting required argument to fail cli resolving"
, testCase "supports same cli input on multiple arguments" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test\": {\"header\": \"\", \"desc\": \"\"}"
, " , \"other\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"string\""
, " , \"cli\": {"
, " \"input\": \"option\""
, " , \"short\": \"g\""
, " , \"metavar\": \"GREETING\""
, " , \"required\": false"
, " , \"commands\": [\"test\", \"other\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
(cmd1, config1) <- resolveCommandCliPure spec "program" ["test", "-g", "hello"]
(cmd2, config2) <- resolveCommandCliPure spec "program" ["other", "-g", "hello"]
assertEqual "" "test" cmd1
assertEqual "" "other" cmd2
assertEqual "" config1 config2
]
with_command :: TestTree
with_command =
testGroup "when command given" [with_command_option_tests, with_command_argument_tests]
without_command :: TestTree
without_command = testCase "fails when command not given" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"string\""
, " , \"cli\": {"
, " \"input\": \"option\""
, " , \"short\": \"g\""
, " , \"metavar\": \"GREETING\""
, " , \"required\": true"
, " , \"commands\": [\"test\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
case resolveCommandCliPure spec "program" [] of
Left err -> case fromException err of
Just CliEvalExited{} -> return ()
_ -> assertFailure ("Expecting sub-command to be required; got " <> show err)
Right _ -> assertFailure "Expecting sub-command to be required; it wasn't"
tests :: TestTree
tests = testGroup "command" [with_command, without_command]
| roman/Haskell-etc | etc/test/System/Etc/Resolver/Cli/CommandTest.hs | mit | 11,878 | 0 | 19 | 4,345 | 1,940 | 1,016 | 924 | 263 | 8 |
{-# LANGUAGE Arrows, NoMonomorphismRestriction, RebindableSyntax #-}
module System.ArrowVHDL.Circuit.Defaults where
import Control.Category
import Prelude hiding (id, (.))
import qualified Data.Bits as B -- (shiftL, shiftR, xor, (.&.))
import System.ArrowVHDL.Circuit
import System.ArrowVHDL.Circuit.Grid
import System.ArrowVHDL.Circuit.Arrow
import System.ArrowVHDL.Circuit.Auxillary
import System.ArrowVHDL.Circuit.Descriptor
import System.ArrowVHDL.Circuit.Graphs
import System.ArrowVHDL.Circuit.Show
type KeyChunk = Int
type ValChunk = Int
type Key = (KeyChunk, KeyChunk, KeyChunk, KeyChunk)
type KeyHalf = (KeyChunk, KeyChunk)
type Value = (ValChunk, ValChunk)
-- xor :: Bool -> Bool -> Bool
-- xor x y | x == True && y == False = True
-- | x == False && y == True = True
-- | otherwise = False
oneNodeCircuit :: String -> CircuitDescriptor
oneNodeCircuit s = emptyCircuit { nodeDesc = emptyNodeDesc { label = s } }
aId :: (Arrow a) => Grid a b b
aId
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "ID"
, sinks = mkPins 1
, sources = mkPins 1
}
, cycles = 1
, space = 1
}
$ arr id
aConst :: (Arrow a, Show b) => b -> Grid a c b
aConst x
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "CONST_" ++ (show x)
, sinks = mkPins 1 -- a sink is needed for the rewire-function to work properly (TODO: is this ok?)
, sources = mkPins 1
}
, cycles = 0
, space = 1
}
$ arr (const x)
(.&.) :: Bool -> Bool -> Bool
True .&. True = True
_ .&. _ = False
(.|.) :: Bool -> Bool -> Bool
False .|. False = False
_ .|. _ = True
xor :: Bool -> Bool -> Bool
xor True False = True
xor False True = True
xor _ _ = False
-- shiftL8 :: (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool))))))))
-- -> Int
-- -> (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool))))))))
-- shiftL8 (x1, (x2, (x3, (x4, (x5, (x6, (x7, (x8)))))))) i
-- | i == 0
-- = (x1, (x2, (x3, (x4, (x5, (x6, (x7, (x8))))))))
-- | i == 1
-- = (x2, (x3, (x4, (x5, (x6, (x7, (x8, (False))))))))
-- | i == 2
-- = (x3, (x4, (x5, (x6, (x7, (x8, (False, (False))))))))
-- | i == 3
-- = (x4, (x5, (x6, (x7, (x8, (False, (False, (False))))))))
-- | i == 4
-- = (x5, (x6, (x7, (x8, (False, (False, (False, (False))))))))
-- | i == 5
-- = (x6, (x7, (x8, (False, (False, (False, (False, (False))))))))
-- | i == 6
-- = (x7, (x8, (False, (False, (False, (False, (False, (False))))))))
-- | i == 7
-- = (x8, (False, (False, (False, (False, (False, (False, (False))))))))
-- | i == 8
-- = (False, (False, (False, (False, (False, (False, (False, (False))))))))
-- shiftL = shiftL8
-- shiftR8 :: (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool))))))))
-- -> Int
-- -> (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool))))))))
-- shiftR8 (x1, (x2, (x3, (x4, (x5, (x6, (x7, (x8)))))))) i
-- | i == 0
-- = (x1, (x2, (x3, (x4, (x5, (x6, (x7, (x8))))))))
-- | i == 1
-- = (False, (x1, (x2, (x3, (x4, (x5, (x6, (x7))))))))
-- | i == 2
-- = (False, (False, (x1, (x2, (x3, (x4, (x5, (x6))))))))
-- | i == 3
-- = (False, (False, (False, (x1, (x2, (x3, (x4, (x5))))))))
-- | i == 4
-- = (False, (False, (False, (False, (x1, (x2, (x3, (x4))))))))
-- | i == 5
-- = (False, (False, (False, (False, (False, (x1, (x2, (x3))))))))
-- | i == 6
-- = (False, (False, (False, (False, (False, (False, (x1, (x2))))))))
-- | i == 7
-- = (False, (False, (False, (False, (False, (False, (False, (x1))))))))
-- | i == 8
-- = (False, (False, (False, (False, (False, (False, (False, (False))))))))
-- shiftR = shiftR8
-- aAnd :: (Arrow a, Bits b) => Grid a (b, b) (b)
aAnd :: (Arrow a) => Grid a (Bool, Bool) (Bool)
aAnd
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "AND"
, sinks = mkPins 2
, sources = mkPins 1
}
, cycles = 1
, space = 4
}
$ arr (uncurry (.&.))
-- aOr :: (Arrow a, Bits b) => Grid a (b, b) (b) -- :: (Arrow a) => Grid a (Bool, Bool) (Bool)
aOr :: (Arrow a) => Grid a (Bool, Bool) (Bool)
aOr
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "OR"
, sinks = mkPins 2
, sources = mkPins 1
}
, cycles = 1
, space = 4
}
$ arr (uncurry (.|.))
aNot :: (Arrow a) => Grid a (Bool) (Bool)
aNot
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "NOT"
, sinks = mkPins 1
, sources = mkPins 1
}
, cycles = 1
, space = 2
}
$ arr (not)
aBXor :: (Arrow a, B.Bits b) => Grid a (b, b) (b) -- :: (Arrow a) => Grid a (Bool, Bool) (Bool)
aBXor
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "XOR"
, sinks = mkPins 2
, sources = mkPins 1
}
, cycles = 1
, space = 4
}
$ arr (uncurry B.xor)
aXor :: (Arrow a) => Grid a (Bool, Bool) (Bool)
aXor
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "XOR"
, sinks = mkPins 2
, sources = mkPins 1
}
, cycles = 1
, space = 4
}
$ arr (uncurry xor)
-- aFst :: (Arrow a, Bits b) => Grid a (b, c) (b)
aFst :: (Arrow a) => Grid a (b, c) (b)
aFst
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "FST"
, sinks = mkPins 2
, sources = mkPins 1
}
, cycles = 1
, space = 4
}
$ arr (fst)
-- aSnd :: (Arrow a, Bits c) => Grid a (b, c) (c)
aSnd :: (Arrow a) => Grid a (b, c) (c)
aSnd
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "SND"
, sinks = mkPins 2
, sources = mkPins 1
}
, cycles = 1
, space = 4
}
$ arr (snd)
aShiftL :: (Arrow a, B.Bits b) => Grid a (b, Int) (b)
-- aShiftL :: (Arrow a) => Grid a ((Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, Bool))))))), Int) (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, Bool)))))))
aShiftL
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "SHIFTL"
, sinks = mkPins 2
, sources = mkPins 1
}
, cycles = 1
, space = 6
}
$ arr (uncurry B.shiftL)
aShiftR :: (Arrow a, B.Bits b) => Grid a (b, Int) (b)
-- aShiftR :: (Arrow a) => Grid a ((Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, Bool))))))), Int) (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, Bool)))))))
aShiftR
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "SHIFTR"
, sinks = mkPins 2
, sources = mkPins 1
}
, cycles = 1
, space = 6
}
$ arr (uncurry B.shiftR)
aAdd :: (Arrow a, Num b) => Grid a (b, b) (b)
aAdd
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "ADD"
, sinks = mkPins 2
, sources = mkPins 1
}
, cycles = 1
, space = 4
}
$ arr (uncurry (+))
aFlip :: (Arrow a) => Grid a (b, c) (c, b)
aFlip
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "FLIP"
, sinks = mkPins 2
, sources = mkPins 2
}
, cycles = 1
, space = 4
}
$ arr (\(x, y) -> (y, x))
aSwapSnd :: (Arrow a) => Grid a ((b, c), d) ((b, d), c)
aSwapSnd
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "SWPSND"
, sinks = mkPins 2
, sources = mkPins 2
}
, cycles = 1
, space = 6
}
$ arr (\((x, y), z) -> ((x, z), y))
aAssocRight = a_ABc2aBC
aAssocLeft = a_aBC2ABc
a_ABc2aBC :: (Arrow a) => Grid a ((b, c), d) (b, (c, d))
a_ABc2aBC
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "ABc2aBC"
, sinks = mkPins 2
, sources = mkPins 2
}
, cycles = 1
, space = 6
}
$ arr (\((x, y), z) -> (x, (y, z)))
a_aBC2ABc :: (Arrow a) => Grid a (b, (c, d)) ((b, c), d)
a_aBC2ABc
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "aBC2ABc"
, sinks = mkPins 2
, sources = mkPins 2
}
, cycles = 1
, space = 6
}
$ arr (\(x, (y, z)) -> ((x, y), z))
-- |'aDistr' defines an distributivity of an expression ...
-- (x,(a,b)) -> ((x,a), (x,b))
-- aDistr :: (Arrow a, Bits b, Bits c, Bits d) => Grid a (b, (c, d)) ((b, c), (b, d))
aDistr :: (Arrow a) => Grid a (b, (c, d)) ((b, c), (b, d))
aDistr
= aDup
>>> second aFst *** second aSnd
-- |'aDdistr' is the reverse operation to the Distr operation
-- aDdistr :: (Arrow a, Bits b, Bits c, Bits d, Bits e) => Grid a ((b, c), (d, e)) ((b, d), (c, e))
aDdistr :: (Arrow a) => Grid a ((b, c), (d, e)) ((b, d), (c, e))
aDdistr
= aSwapSnd
>>> a_aBC2ABc *** aId
>>> a_ABc2aBC
>>> aId *** aFlip
aShiftL4 :: (Arrow a, B.Bits b) => Grid a b b
-- aShiftL4 :: (Arrow a) => Grid a (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, Bool))))))) (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, Bool)))))))
aShiftL4
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "SHIFTL4"
, sinks = mkPins 1
, sources = mkPins 1
}
, cycles = 1
, space = 6
}
$ arr (flip B.shiftL 4)
aShiftR5 :: (Arrow a, B.Bits b) => Grid a b b
-- aShiftR5 :: (Arrow a) => Grid a (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, Bool))))))) (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, Bool)))))))
aShiftR5
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "SHIFTR5"
, sinks = mkPins 1
, sources = mkPins 1
}
, cycles = 1
, space = 6
}
$ arr (flip B.shiftR 5)
-- aShiftL4addKey :: (Arrow a) => Grid a (ValChunk, KeyChunk) Int
-- aShiftL4addKey
-- = first aShiftL4
-- >>> aAdd
-- aShiftR5addKey :: (Arrow a) => Grid a (ValChunk, KeyChunk) Int
-- aShiftR5addKey
-- = first aShiftR5
-- >>> aAdd
--- NOTE: Hier ist ein schönes Problem aufgetreten:
-- da weiter unten arr ... >>> aAdd verwendet wird, und arr ... vom Typ Arrow a ist
-- aber aAdd vom Typ Grid a ist, gibt's nen type-mismatch... entweder aAdd muss auf Arrow a
-- runtergebrochen werden, oder arr ... muss vorher schon in einen Grid gehoben werden :)
--
-- So oder so, schön ;)
--aAddMagic :: (Arrow a) => Grid a ValChunk Int
aXorMagic
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "ADDMAGIC"
, sinks = mkPins 1
, sources = mkPins 1
}
, cycles = 1
, space = 4
}
$ arr (\x -> (x, 2654435769)) >>> aBXor
--aDup :: (Arrow a) => Grid a b (b, b)
aDup
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "DUP"
, sinks = mkPins 1
, sources = mkPins 2
}
, cycles = 1
, space = 4
}
$ arr (\(x) -> (x, x))
aRegister :: (Arrow a) => Grid a b b
aRegister
= augment
( mkRegister $ emptyNodeDesc
{ sinks = mkPins 1
, sources = mkPins 1
}
)
$ arr id
-- aL_headtail :: (Arrow a) => Grid a ([b]) (b, [b])
-- aL_headtail
-- = augment
-- emptyCircuit
-- { nodeDesc = emptyNodeDesc
-- { label = "listHEADTAIL"
-- , sinks = mkPins 1
-- , sources = mkPins 2
-- }
-- , cycles = 2
-- , space = 16
-- }
-- $ arr (\(x:xs) -> (x, xs))
| frosch03/arrowVHDL | src/System/ArrowVHDL/Circuit/Defaults.hs | cc0-1.0 | 12,752 | 0 | 13 | 4,855 | 2,686 | 1,590 | 1,096 | 266 | 1 |
{- Circuit patterns transliterated from Lava / the Haskell Prelude / ...
- Copyright : (C)opyright 2006, 2009-2011 peteg42 at gmail dot com
- License : GPL (see COPYING for details)
-
- We count from /1/ to /n/, i.e. the first element of the list has index 1.
- FIXME try to remove all the ~'s.
-}
module ADHOC.Patterns
( mapA
, mapAn
, mapAC
, mapACn
, rowA
, rowAn
, zipWithA
, zipWith3A
, foldrA
, foldr1A
, foldr2A
, foldrAC
, conjoinA
, disjoinA
) where
-------------------------------------------------------------------
-- Dependencies.
-------------------------------------------------------------------
import Prelude ()
import ADHOC.Circuits
-------------------------------------------------------------------
-- Some extra Arrow combinators.
-- All are statically parameterised by the length of the list.
-------------------------------------------------------------------
-- | Similar to 'mapAC', where the constituents are a function of
-- their list index.
--
-- FIXME I don't think can be used as a command combinator due to the
-- @Integer@ argument.
mapACn :: Arrow (~>) => Integer -> (Integer -> ((env, b) ~> c)) -> ((env, [b]) ~> [c])
mapACn n farr = go 1
where
go i | i == succ n = proc (_env, []) -> returnA -< []
| otherwise = proc (env, b : bs) ->
(| (liftA2 (:)) (farr i -< (env, b)) (go (succ i) -< (env, bs)) |)
-- | FIXME describe
mapAC :: Arrow (~>) => Integer -> ((env, b) ~> c) -> ((env, [b]) ~> [c])
mapAC n farr = mapACn n (const farr)
-- | Similar to 'mapA', where the constituents are a function of their
-- list index.
mapAn :: Arrow (~>) => Integer -> (Integer -> (b ~> c)) -> ([b] ~> [c])
mapAn n farr = arr (\bs -> ((), bs)) >>> mapACn n (\i -> arr snd >>> farr i)
mapA :: Arrow (~>) => Integer -> (b ~> c) -> ([b] ~> [c])
mapA n farr = mapAn n (const farr)
-- | A variant version of "mapAccumL" I found useful. The constituents
-- are a function of their list index.
-- This is Lava's row, slightly mangled.
-- It does not enforce that the input list has length /n/.
rowAn :: Arrow (~>)
=> Integer
-> (Integer -> (a, b) ~> (a, c))
-> (a, [b]) ~> (a, [c])
rowAn n farr = go 1
where
go i | i == succ n = second (arr (const []))
| otherwise = proc (a, ~(b : bs)) ->
do (a', c) <- farr i -< (a, b)
(a'', cs) <- go (succ i) -< (a', bs)
returnA -< (a'', c : cs)
-- | A version of "mapAccumL" I found useful.
-- This is Lava's row, slightly mangled.
rowA :: Arrow (~>)
=> Integer
-> (a, b) ~> (a, c)
-> (a, [b]) ~> (a, [c])
rowA n farr = rowAn n (const farr)
-- | Zip an arrow over a pair of lists.
zipWithA :: Arrow (~>) => Integer -> ((b, c) ~> d) -> (([b], [c]) ~> [d])
zipWithA n0 f = go n0
where
go n = case n of
0 -> arr (\([], []) -> [])
_ -> arr (\ (s:ss, s':ss') -> ((s, s'), (ss, ss')))
>>> (f *** go (pred n))
>>> arr2 (:)
-- | Zip an arrow over a triple of lists.
zipWith3A :: Arrow (~>) => Integer -> ((b, c, d) ~> e) -> (([b], [c], [d]) ~> [e])
zipWith3A 0 _f = arr (\([], [], []) -> [])
zipWith3A n f =
arr (\ ~(s:ss, s':ss', s'':ss'') -> ((s, s', s''), (ss, ss', ss'')))
>>> (f *** zipWith3A (pred n) f)
>>> arr2 (:)
-- | Right fold.
foldrA :: Arrow (~>)
=> Integer
-> (b, c) ~> b
-> () ~> b
-> [c] ~> b
foldrA 0 _f z = proc [] -> z -< ()
foldrA n f z = proc (c : cs) ->
do b <- foldrA (pred n) f z -< cs
f -< (b, c)
-- | Right fold, no base case.
foldr1A :: Arrow (~>)
=> Integer
-> (b, b) ~> b
-> [b] ~> b
foldr1A 0 _f = error "foldr1A not defined for empty lists"
foldr1A 1 _f = arr (\[x] -> x)
foldr1A n f = proc (c : cs) ->
do b <- foldr1A (pred n) f -< cs
f -< (b, c)
-- | Right fold over a pair of lists.
-- FIXME as fusion doesn't work, we hand fuse in zip.
foldr2A :: Arrow (~>)
=> Integer
-> (b, (c, d)) ~> b
-> () ~> b
-> ([c], [d]) ~> b
foldr2A 0 _f z = proc ([], []) -> z -< ()
foldr2A n f z = proc ~(c : cs, d : ds) ->
do b <- foldr2A (pred n) f z -< (cs, ds)
f -< (b, (c, d))
-- | FIXME
foldrAC :: Arrow (~>)
=> Integer
-> (env, (b, c)) ~> b
-> env ~> b
-> (env, [c]) ~> b
foldrAC 0 _f z = proc (env, []) -> z -< env
foldrAC n f z = proc ~(env, c : cs) ->
do b <- foldrAC (pred n) f z -< (env, cs)
f -< (env, (b, c))
conjoinA :: ArrowComb (~>) => Integer -> ([B (~>)] ~> B (~>))
conjoinA n = foldrA n andA trueA
disjoinA :: ArrowComb (~>) => Integer -> ([B (~>)] ~> B (~>))
disjoinA n = foldrA n orA falseA
| peteg/ADHOC | ADHOC/Patterns.hs | gpl-2.0 | 4,686 | 14 | 16 | 1,320 | 1,968 | 1,089 | 879 | -1 | -1 |
-- Cyclic nand, constructive with input 'false'.
module T where
import Tests.Basis
c = combLoop (andA >>> notA >>> arr dupA)
prop_correct = property (\xs -> simulate c xs == map res xs)
where res x = if x == false then true else bottom
test_constructive = isNothing (isConstructive c)
| peteg/ADHOC | Tests/01_CyclicCircuits/010_nand.hs | gpl-2.0 | 294 | 0 | 9 | 59 | 98 | 52 | 46 | 6 | 2 |
{-# LANGUAGE FlexibleContexts, TypeFamilies, DeriveGeneric, DeriveAnyClass, OverloadedStrings #-}
module Model (
Color (..)
, Number (..)
, Shape (..)
, Fill (..)
, Card (..)
, Game (..)
, cardDeck
, isSolution
, initGame
, removeCards
, solutionCards
, solutions
) where
import Test.QuickCheck
import Control.DeepSeq
import GHC.Generics (Generic)
import Data.Typeable (Typeable)
import Control.Monad.Random
import Control.Monad.State
import Data.List
import System.Random.Shuffle
import Data.Aeson
data Color = Red | Green | Blue deriving (Enum, Eq, NFData, Generic)
data Number = One | Two | Three deriving (Enum, Eq, NFData, Generic)
data Shape = Circle | Diamond | Box deriving (Enum, Eq, NFData, Generic)
data Fill = Empty | Half | Full deriving (Enum, Eq, NFData, Generic)
instance Arbitrary Color where
arbitrary = toEnum <$> choose (0, 2)
instance Arbitrary Shape where
arbitrary = toEnum <$> choose (0, 2)
instance Arbitrary Number where
arbitrary = toEnum <$> choose (0, 2)
instance Arbitrary Fill where
arbitrary = toEnum <$> choose (0, 2)
data Card = Card {
cardColor :: Color
, cardNumber :: Number
, cardShape :: Shape
, cardFill :: Fill
} deriving (Eq, Typeable, NFData, Generic)
instance Arbitrary Card where
arbitrary = Card <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
data Game = Game {
gameAll :: [Card]
, gameDealt :: [Card]
, gameConsumed :: [Card]
} deriving (Generic, NFData)
instance Arbitrary Game where
arbitrary = Game <$> arbitrary <*> arbitrary <*> arbitrary
-- TODO The three sets must be disjunct
instance Show Color where
show Red = "R"
show Green = "G"
show Blue = "B"
instance Show Number where
show One = "1"
show Two = "2"
show Three = "3"
instance Show Shape where
show Circle = "o"
show Diamond = "d"
show Box = "b"
instance Show Fill where
show Empty = "E"
show Half = "C"
show Full = "O"
instance Show Card where
show (Card c n s f) = "(" ++ show c ++ " " ++ show n ++ " " ++ show s ++ " " ++ show f ++ ")"
instance Bounded Card where
minBound = (toEnum cardMinBound)
maxBound = (toEnum cardMaxBound)
instance Show Game where
show (Game a (a1:a2:a3:b1:b2:b3:c1:c2:c3:d1:d2:d3:[]) c) =
unlines [show a,
unlines (map unwords [
map show [a1, a2, a3]
, map show [b1, b2, b3]
, map show [c1, c2, c3]
, map show [d1, d2, d3]])
, show c
, "all: " ++ show (length a)
, "consumed: " ++ show (length c)]
show (Game a d c) = "Game { gameAll = " ++ show a ++ ", gameDealt = " ++ show d ++ ", gameConsumed = " ++ show c ++ " }"
instance Enum Card where
toEnum i = if i >= cardMinBound && i <= cardMaxBound
then cardDeck !! i
else error $ "toEnum{Game}: tag (" ++ show i ++ ") is outside of enumeration's range (" ++ show cardMinBound ++ ", " ++ show cardMaxBound ++ ")"
fromEnum c = case findIndex (==c) cardDeck of
Nothing -> error $ "fromEnum{Game}: Card does not exist: " ++ show c
(Just i) -> i
instance ToJSON Card
instance FromJSON Card
instance ToJSON Game
instance FromJSON Game
instance FromJSON Fill
instance ToJSON Fill
instance FromJSON Color
instance ToJSON Color
instance FromJSON Shape
instance ToJSON Shape
instance FromJSON Number
instance ToJSON Number
cardDeck = [Card c n s f
| c <- [Red .. Blue],
n <- [One .. Three],
s <- [Circle .. Box],
f <- [Empty .. Full]]
cardMinBound = 0
cardMaxBound = (length cardDeck) - 1
initGame :: (MonadRandom m) => m Game
initGame = do
dealt <- getDealt
a <- shuffleM cardDeck
return $ Game (filter (\x -> x `notElem` dealt) a) dealt []
where getDealt :: (MonadRandom m) => m [Card]
getDealt = do
d <- (map toEnum) `fmap` randomList 12
if null $ filter isSolution (allCombinations d)
then getDealt
else return d
removeCards :: [Card] -> Game -> Game
removeCards cs (Game a d u) = Game newAll newDealt newUsed
where newAll = filter (\x -> x `notElem` newDealt) a
newUsed = u ++ cs
newDealt = rmCards ++ (take 3 (solutionCards rmCards a))
rmCards = (filter (\x -> x `notElem` cs) d)
isSolution :: (Card, Card, Card) -> Bool
isSolution ((Card c1 n1 s1 f1), (Card c2 n2 s2 f2), (Card c3 n3 s3 f3)) =
m (fromEnum c1) (fromEnum c2) (fromEnum c3)
&& m (fromEnum n1) (fromEnum n2) (fromEnum n3)
&& m (fromEnum s1) (fromEnum s2) (fromEnum s3)
&& m (fromEnum f1) (fromEnum f2) (fromEnum f3)
where m x1 x2 x3 = ((x1 == x2) && (x2 == x3))
|| ((x1 /= x2) && (x2 /= x3) && (x1 /= x3))
allCombinations :: Enum a => [a] -> [(a, a, a)]
allCombinations [] = []
allCombinations (x:[]) = []
allCombinations xs = [(xs !! x, xs !! y, xs !! z)
| x <- [0 .. length xs - 3 ]
, y <- [succ x .. length xs - 2]
, z <- [succ y .. length xs - 1]]
solutions :: [Card] -> [(Card, Card, Card)]
solutions cs = filter isSolution (allCombinations cs)
solutionCards :: [Card] -> [Card] -> [Card]
solutionCards d a = foldr (\c as ->
if null $ solutions (c:d)
then as
else c:as
) [] a
newNumber :: (MonadRandom m, MonadState [Int] m) => m ()
newNumber = do
d <- get
n <- getRandomR (cardMinBound, cardMaxBound)
if n `elem` d
then newNumber
else put (n:d)
randomList :: MonadRandom m => Int -> m [Int]
randomList n = execStateT (sequence $ replicate n newNumber) []
| drever/fluxette | src/Model.hs | gpl-2.0 | 5,597 | 0 | 22 | 1,505 | 2,260 | 1,203 | 1,057 | 158 | 2 |
{- |
Module : $Header$
Copyright : (c) Heng Jiang and Till Mossakowski, Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable (Logic)
display the logic graph
-}
module GUI.ShowLogicGraph (showPlainLG, showLG) where
import GUI.UDGUtils as UDG
import GUI.HTkUtils as HTk
import qualified GUI.GraphAbstraction as GA
import HTk.Toolkit.DialogWin (useHTk)
import Comorphisms.LogicGraph
import Comorphisms.LogicList
import Comorphisms.HetLogicGraph
import Logic.Grothendieck
import Logic.Logic
import Logic.Comorphism
import Logic.Prover
import qualified Data.Map as Map
import Data.List
import qualified Common.Lib.Rel as Rel
import Common.Consistency
import Data.Typeable
graphParms :: GraphAllConfig graph graphParms node nodeType nodeTypeParms
arc arcType arcTypeParms
=> Graph graph graphParms node nodeType nodeTypeParms
arc arcType arcTypeParms
-> String -- ^ title of graph
-> graphParms
graphParms _ title =
GraphTitle title $$
OptimiseLayout True $$
AllowClose (return True) $$
emptyGraphParms
makeNodeMenu :: (GraphAllConfig graph graphParms node
nodeType nodeTypeParms
arc arcType arcTypeParms,
Typeable value)
=> Graph graph graphParms node nodeType nodeTypeParms
arc arcType arcTypeParms
-> (value -> IO String)
-- ^ display the value as a String
-> LocalMenu value
-> String -- ^ color of node
-> nodeTypeParms value
makeNodeMenu _ showMyValue logicNodeMenu color =
logicNodeMenu $$$
Ellipse $$$
ValueTitle showMyValue $$$
Color color $$$
emptyNodeTypeParms
stableColor, testingColor, unstableColor, experimentalColor,
proverColor :: String
stableColor = "#00FF00"
testingColor = "#88FF33"
unstableColor = "#CCFF66"
experimentalColor = "white"
proverColor = "lightsteelblue"
normalArcColor :: String
normalArcColor = "black"
inclusionArcColor :: String
inclusionArcColor = "blue"
-- | Test whether a comorphism is an ad-hoc inclusion
isInclComorphism :: AnyComorphism -> Bool
isInclComorphism (Comorphism cid) =
Logic (sourceLogic cid) == Logic (targetLogic cid) &&
isProperSublogic (G_sublogics (sourceLogic cid) (sourceSublogic cid))
(G_sublogics (targetLogic cid) (targetSublogic cid))
showLogicGraph :: Bool -> IO GA.OurGraph
showLogicGraph plain = do
-- disp s tD = debug (s ++ (show tD))
logicG <- newGraph daVinciSort
$ (if plain then (GlobalMenu (UDG.Menu Nothing [
Button "Show detailed logic graph" showHSG ]) $$)
else id)
$ graphParms daVinciSort $ if plain then "Logic Graph" else
"Heterogeneous Sublogic Graph"
let logicNodeMenu = LocalMenu $ UDG.Menu (Just "Info")
$ [Button "Tools" $ \ slg -> let lg = toAnyLogic slg in
createTextDisplay
("Parsers, Provers and Cons_Checker of "
++ show lg) (showTools lg) [size (80, 25)]]
++ [Button "Sublogic" $ \ slg -> let lg = toAnyLogic slg in
createTextDisplay ("Sublogics of "
++ show lg) (showSublogic lg) [size (80, 25)] | plain]
++ [Button "Sublogic Graph" $ showSubLogicGraph . toAnyLogic,
Button "Description" $ \ slg -> let lg = toAnyLogic slg in
createTextDisplay
("Description of " ++ show lg) (showDescription lg)
[size (83, 25)]]
makeLogicNodeMenu = makeNodeMenu daVinciSort
(return . if plain then show . toAnyLogic else show)
logicNodeMenu
stableNodeType <- newNodeType logicG $ makeLogicNodeMenu stableColor
testingNodeType <- newNodeType logicG $ makeLogicNodeMenu testingColor
unstableNodeType <- newNodeType logicG $ makeLogicNodeMenu unstableColor
experimentalNodeType <- newNodeType logicG $
makeLogicNodeMenu experimentalColor
proverNodeType <-
newNodeType logicG $ makeLogicNodeMenu proverColor
let newNode' logic =
case toAnyLogic logic of
Logic lid -> if null $ provers lid then let
nodeType = case stability lid of
Stable -> stableNodeType
Testing -> testingNodeType
Unstable -> unstableNodeType
Experimental -> experimentalNodeType
in newNode logicG nodeType logic
else newNode logicG proverNodeType logic
myMap = if plain then Map.map (\ (Logic l) ->
G_sublogics l $ top_sublogic l)
$ Map.fromList $ map addLogicName
$ Map.elems $ logics logicGraph
else sublogicNodes hetSublogicGraph
-- production of the nodes (in a list)
nodeList <- mapM newNode' $ Map.elems myMap
-- build the map with the node's name and the node.
let namesAndNodes = Map.fromList (zip (Map.keys myMap)
nodeList)
lookupLogi gslStr =
Map.findWithDefault (error "lookupLogi: Logic not found")
gslStr
namesAndNodes
{- each edge can also show the informations (the
description of comorphism and names of
source/target-Logic as well as the sublogics). -}
logicArcMenu =
LocalMenu $ Button "Info"
$ \ c -> createTextDisplay (show c) (showComoDescription c)
[size (80, 25)]
acmName (Comorphism cid) = return $ language_name cid
normalArcTypeParms = logicArcMenu $$$ -- normal comorphism
Color normalArcColor $$$
ValueTitle acmName $$$
emptyArcTypeParms
insertArcType tp ((src, trg), acm) =
newArc logicG tp acm (lookupLogi src) (lookupLogi trg)
inclArcTypeParms = logicArcMenu $$$ -- inclusion
Color inclusionArcColor $$$
(if plain then id else (ValueTitle acmName $$$))
emptyArcTypeParms
normalArcType <- newArcType logicG normalArcTypeParms
inclArcType <- newArcType logicG inclArcTypeParms
if plain then do
let toPair co@(Comorphism c) = ((language_name (sourceLogic c)
, language_name (targetLogic c)), co)
insertComo = insertArcType normalArcType . toPair -- for cormophism
insertIncl = insertArcType inclArcType . toPair -- for inclusion
mapM_ insertIncl inclusionList
mapM_ insertComo $ filter (`notElem` inclusionList) comorphismList
else do
let (inclCom, notInclCom) =
partition ((`elem` inclusionList) . snd) $
concatMap (\ (x, ys) -> zip (repeat x) ys) $
Map.toList -- [((String,String),[AnyComorphism])]
(comorphismEdges hetSublogicGraph)
(adhocCom, normalCom) =
partition (isInclComorphism . snd) notInclCom
adhocInclArcTypeParms =
Color inclusionArcColor $$$ -- ad-hoc inclusion
emptyArcTypeParms
adhocInclArcType <- newArcType logicG adhocInclArcTypeParms
mapM_ (insertArcType inclArcType) inclCom
mapM_ (insertArcType adhocInclArcType) adhocCom
mapM_ (insertArcType normalArcType) normalCom
redraw logicG
return logicG
showSubLogicGraph :: AnyLogic -> IO ()
showSubLogicGraph subl =
case subl of
Logic sublid ->
do subLogicG <- newGraph daVinciSort
$ graphParms daVinciSort "SubLogic Graph"
let listG_Sublogics = all_sublogics sublid
subNodeMenu = LocalMenu (UDG.Menu Nothing [])
subNodeTypeParms =
subNodeMenu $$$
Ellipse $$$
ValueTitle (return . sublogicName) $$$
Color "yellow" $$$
emptyNodeTypeParms
subNodeType <- newNodeType subLogicG subNodeTypeParms
subNodeList <- mapM (newNode subLogicG subNodeType)
listG_Sublogics
let slAndNodes = Map.fromList $
zip listG_Sublogics subNodeList
lookupSublogic g_sl =
Map.findWithDefault
(error "lookupSublogic: node not found")
g_sl slAndNodes
subArcMenu = LocalMenu (UDG.Menu Nothing [])
subArcTypeParms = subArcMenu $$$
Color "green" $$$
emptyArcTypeParms
subArcType <- newArcType subLogicG subArcTypeParms
let insertSubArc (node1, node2) =
newArc subLogicG subArcType ""
(lookupSublogic node1)
(lookupSublogic node2)
subl_nodes =
Rel.toList $ Rel.intransKernel $ Rel.fromList
[ (g1, g2)
| g1 <- listG_Sublogics
, g2 <- listG_Sublogics
, g1 /= g2
, isSubElem g1 g2
]
mapM_ insertSubArc subl_nodes
redraw subLogicG
toAnyLogic :: G_sublogics -> AnyLogic
toAnyLogic (G_sublogics lid _) = Logic lid
showSublogic :: AnyLogic -> String
showSublogic (Logic lid) = unlines (map sublogicName (all_sublogics lid))
showSubTitle :: G_sublogics -> String
showSubTitle (G_sublogics _ lid) = sublogicName lid
showDescription :: AnyLogic -> String
showDescription (Logic lid) = let s = description lid in
(if null s then "No description available" else s) ++ "\n\nStability: "
++ show (stability lid)
showComoDescription :: AnyComorphism -> String
showComoDescription (Comorphism cid) =
let ssid = G_sublogics (sourceLogic cid) (sourceSublogic cid)
tsid = G_sublogics (targetLogic cid) (targetSublogic cid)
s = description cid
in (if null s then "" else s ++ "\n\n")
++ "source logic: " ++ language_name (sourceLogic cid) ++ "\n\n"
++ "target logic: " ++ language_name (targetLogic cid) ++ "\n"
++ "source sublogic: " ++ showSubTitle ssid ++ "\n"
++ "target sublogic: " ++ showSubTitle tsid
showTools :: AnyLogic -> String
showTools (Logic li) =
case Map.keys $ parsersAndPrinters li of
s@(_ : r) -> "Parser for basic specifications.\n"
++ if null r then "" else
unlines . filter (not . null) $ "Additional serializations:"
: map show s
[] -> ""
++ case parse_symb_items li of
Just _ -> "Parser for symbol lists.\n"
Nothing -> ""
++ case parse_symb_map_items li of
Just _ -> "Parser for symbol maps.\n"
Nothing -> ""
++ case basic_analysis li of
Just _ -> "Analysis of basic specifications.\n"
Nothing -> ""
++ case data_logic li of
Just _ -> "is a process logic.\n"
Nothing -> ""
++ case provers li of
[] -> ""
ls -> unlines $ "\nProvers:" : map proverName ls
++ case cons_checkers li of
[] -> ""
ls -> unlines $ "\nConsistency checkers:" : map ccName ls
++ case conservativityCheck li of
[] -> ""
ls -> unlines $ "\nConservatity checkers:" : map checkerId ls
showHSG :: IO ()
showHSG = showLogicGraph False >> return ()
showLG :: IO ()
showLG = showLogicGraph True >> return ()
showPlainLG :: IO ()
showPlainLG = do
wishInst <- HTk.initHTk [HTk.withdrawMainWin]
useHTk
lg <- showLogicGraph True
sync (destroyed lg)
destroy wishInst
| nevrenato/HetsAlloy | GUI/ShowLogicGraph.hs | gpl-2.0 | 12,687 | 0 | 22 | 4,648 | 2,811 | 1,401 | 1,410 | 255 | 11 |
-- |
-- Module : Setup
-- Copyright : (C) 2007-2008 Bryan O'Sullivan
-- (C) 2012-2014 Jens Petersen
--
-- Maintainer : Jens Petersen <[email protected]>
-- Stability : alpha
-- Portability : portable
--
-- Explanation: Command line option processing for building RPM
-- packages.
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
module Setup (
RpmFlags(..)
, parseArgs
, quiet
) where
import Control.Monad (unless, when)
import Data.Char (toLower)
import Data.Maybe (listToMaybe, fromMaybe)
import Data.Version (showVersion)
import Distribution.Compiler (CompilerId)
import Distribution.Text (simpleParse)
import Distribution.PackageDescription (FlagName (..))
import Distribution.ReadE (readEOrFail)
import Distribution.Verbosity (Verbosity, flagToVerbosity, normal,
silent)
import System.Console.GetOpt (ArgDescr (..), ArgOrder (..), OptDescr (..),
getOpt', usageInfo)
import System.Environment (getProgName)
import System.Exit (ExitCode (..), exitSuccess, exitWith)
import System.IO (Handle, hPutStrLn, stderr, stdout)
import Distro (Distro(..), readDistroName)
import Paths_cabal_rpm (version)
import SysCmd ((+-+))
data RpmFlags = RpmFlags
{ rpmConfigurationsFlags :: [(FlagName, Bool)]
, rpmForce :: Bool
, rpmHelp :: Bool
, rpmBinary :: Bool
, rpmStrict :: Bool
, rpmRelease :: Maybe String
, rpmCompilerId :: Maybe CompilerId
, rpmDistribution :: Maybe Distro
, rpmVerbosity :: Verbosity
, rpmVersion :: Bool
}
deriving (Eq, Show)
emptyRpmFlags :: RpmFlags
emptyRpmFlags = RpmFlags
{ rpmConfigurationsFlags = []
, rpmForce = False
, rpmHelp = False
, rpmBinary = False
, rpmStrict = False
, rpmRelease = Nothing
, rpmCompilerId = Nothing
, rpmDistribution = Nothing
, rpmVerbosity = normal
, rpmVersion = False
}
quiet :: RpmFlags
quiet = emptyRpmFlags {rpmVerbosity = silent}
options :: [OptDescr (RpmFlags -> RpmFlags)]
options =
[
Option "h?" ["help"] (NoArg (\x -> x { rpmHelp = True }))
"Show this help text",
Option "b" ["binary"] (NoArg (\x -> x { rpmBinary = True }))
"Force Haskell package name to be base package name",
Option "f" ["flags"] (ReqArg (\flags x -> x { rpmConfigurationsFlags = rpmConfigurationsFlags x ++ flagList flags }) "FLAGS")
"Set given flags in Cabal conditionals",
Option "" ["force"] (NoArg (\x -> x { rpmForce = True }))
"Overwrite existing spec file.",
Option "" ["strict"] (NoArg (\x -> x { rpmStrict = True }))
"Fail rather than produce an incomplete spec file.",
Option "" ["release"] (ReqArg (\rel x -> x { rpmRelease = Just rel }) "RELEASE")
"Override the default package release",
Option "" ["compiler"] (ReqArg (\cid x -> x { rpmCompilerId = Just (parseCompilerId cid) }) "COMPILER-ID")
"Finalize Cabal files targetting the given compiler version",
Option "" ["distro"] (ReqArg (\did x -> x { rpmDistribution = Just (readDistroName did) }) "DISTRO")
"Choose the distribution generated spec files will target",
Option "v" ["verbose"] (ReqArg (\verb x -> x { rpmVerbosity = readEOrFail flagToVerbosity verb }) "n")
"Change build verbosity",
Option "V" ["version"] (NoArg (\x -> x { rpmVersion = True }))
"Show version number"
]
-- Lifted from Distribution.Simple.Setup, since it's not exported.
flagList :: String -> [(FlagName, Bool)]
flagList = map tagWithValue . words
where tagWithValue ('-':name) = (FlagName (map toLower name), False)
tagWithValue name = (FlagName (map toLower name), True)
printHelp :: Handle -> IO ()
printHelp h = do
progName <- getProgName
let info = "Usage: " ++ progName ++ " [OPTION]... COMMAND [PATH|PKG|PKG-VERSION]\n"
++ "\n"
++ "PATH can be a .spec file, .cabal file, or pkg dir.\n"
++ "\n"
++ "Commands:\n"
++ " spec\t\t- generate a spec file\n"
++ " srpm\t\t- generate a src rpm file\n"
++ " prep\t\t- unpack source\n"
++ " local\t\t- build rpm package locally\n"
++ " builddep\t- install dependencies\n"
++ " install\t- install packages recursively\n"
++ " depends\t- list Cabal depends\n"
++ " requires\t- list package buildrequires\n"
++ " missingdeps\t- list missing buildrequires\n"
++ " diff\t\t- diff current spec file\n"
++ " update\t- update spec file package to latest version\n"
-- ++ " mock\t\t- mock build package\n"
++ "\n"
++ "Options:"
hPutStrLn h (usageInfo info options)
parseCompilerId :: String -> CompilerId
parseCompilerId x = fromMaybe err (simpleParse x)
where err = error (show x ++ " is not a valid compiler id")
parseArgs :: [String] -> IO (RpmFlags, String, Maybe String)
parseArgs args = do
let (os, args', unknown, errs) = getOpt' Permute options args
opts = foldl (flip ($)) emptyRpmFlags os
when (rpmHelp opts) $ do
printHelp stdout
exitSuccess
when (rpmVersion opts) $ do
putStrLn $ showVersion version
exitSuccess
unless (null errs) $
error $ unlines errs
unless (null unknown) $
error $ "Unrecognised options:" +-+ unwords unknown
when (null args') $ do
printHelp stderr
exitWith (ExitFailure 1)
when (notElem (head args') ["builddep", "depends", "diff", "install", "missingdeps", "prep", "requires", "spec", "srpm", "build", "local", "rpm", "update", "refresh"]) $ do
hPutStrLn stderr $ "Unknown command:" +-+ head args'
printHelp stderr
exitWith (ExitFailure 1)
when (length args' > 2) $
error $ "Too many arguments:" +-+ unwords args'
return (opts, head args', listToMaybe $ tail args')
| mimi1vx/cabal-rpm | src/Setup.hs | gpl-3.0 | 6,406 | 0 | 28 | 1,797 | 1,547 | 858 | 689 | 124 | 2 |
import Minitel.Minitel
import Minitel.Generator
import Minitel.Queue
import Minitel.MString
import Control.Concurrent
import Control.Monad
import System.Hardware.Serialport
import Data.Char
haskellMinitel :: Minitel -> IO ()
haskellMinitel m = forM_ [2,4 .. 20] $ \ x ->
m <<< [ mLocate (x - 1) x
, mSize SimpleSize DoubleSize
, mString VideoTex "Haskell Minitel!"
]
main = do
m <- minitel "/dev/ttyUSB0" baseSettings
mapM_ (mCall m) [ mExtendedKeyboard True
, mCursorKeys True
, mLowercaseKeyboard True
]
m <<< [ mClear ReallyEverything
, mLocate 1 0
, mString VideoTex "Démonstration de HaMinitel"
]
haskellMinitel m
forever $ do
s <- readMString (get $ input m) completeReturn
print s
flush (serial m)
| Zigazou/HaMinitel | example/testMinitel.hs | gpl-3.0 | 887 | 3 | 14 | 287 | 255 | 125 | 130 | 26 | 1 |
{-# LANGUAGE CPP #-}
module Main where
import Control.Category
import Control.Monad.Writer.Strict
import Control.Wire as W
import Data.Char (ord)
import Data.Maybe (fromJust, isJust)
import Data.Traversable
import Experiment.Planets.Instances.GNUPlot ()
import Experiment.Planets.Types
import Linear.V3
import Linear.Vector
import Physics
-- import Data.Colour
import Data.Colour.Names
import Data.Colour.SRGB
import Prelude hiding ((.), id)
import Render.Backend.GNUPlot
import Render.Render
#ifdef WINDOWS
import Render.Backend.GLUT
import Experiment.Planets.Instances.GLUT ()
#else
import Render.Backend.SDL
import Experiment.Planets.Instances.SDL ()
#endif
-- | What is this number? Well, we want our graviational constant to be 1,
-- so we normalize with our time unit being a day and our distance unit
-- being an AU. Our mass unit then must be 6.720074483812448e33 kg. To
-- convert, we divide our kg by the this number.
mConst :: Double
mConst = 1.48807874e-34
processPlanetData :: String -> String -> [(Planet, V3D)]
processPlanetData nStr dStr = zipWith mergeData nData dData
where
nData = map processLineN $ lines nStr
processLineN = makeDataN . words
makeDataN (name:dat) =
let (m:px:py:pz:vx:vy:vz:_) = map read dat
b = Body (mConst * m) (V3 px py pz)
v0 = V3 vx vy vz
in (Planet name 5 black b, v0)
dData = map processLineD $ lines dStr
processLineD = drop 1 . words
mergeData (p,v0) (rad:col) =
( p { planetRadius = rConst * read rad, planetColor = colTup (read (unwords col)) }
, v0)
rConst = 0.04
colTup (cr,cg,cb) = sRGB24 cr cg cb
main :: IO ()
main = do
pData <- processPlanetData
<$> readFile "data/planet_data.dat"
<*> readFile "data/planet_display.dat"
mapM_ print pData
runManyMouse pData
-- runMany :: [(Planet, V3D)] -> IO ()
-- runMany planets = runTest (length planets) (w . pure ())
-- where
-- bwire = manyBody (map bTup planets) euler
-- w = arr (zipWith ($) planetMakers) . bwire
-- planetMakers = map (pMaker . fst) planets
runManyMouse :: [(Planet, V3D)] -> IO ()
runManyMouse planets = runTest (length planets + 1) wz
where
bwire = manyBody' (map bTup planets) verlet
w = zipWith ($) planetMakers <$> bwire
wz = proc e -> do
bs <- w -< e
zoomE <- filterE isJust -< zoomEvent <$> e
zoom <- hold . accumE (*) 1 <|> pure 1 -< fromJust <$> zoomE
returnA -< (zoom,bs)
planetMakers = map (pMaker . fst) (planets ++ [extraPlanet])
extraPlanet = (Planet "Bob" 0.1 white (Body 0 zero), zero)
zoomEvent (RenderMouseDown _ RenderMouseWheelUp) = Just (1/1.2)
zoomEvent (RenderMouseDown _ RenderMouseWheelDown) = Just (1.2)
zoomEvent (RenderKeyDown (RenderKeyData c _))
| c == ord '+' = Just (1.2)
| c == ord '-' = Just (1/1.2)
| otherwise = Nothing
zoomEvent _ = Nothing
manyBody' :: forall m e t s. (MonadFix m, Monoid e, HasTime t s, Fractional t)
=> [(Body, V3D)] -- Initial body states and initial velocities
-> Integrator -- Integrator
-> Wire s e m (Event RenderEvent) [Body]
manyBody' bodyList igr = proc e -> do
rec
bs <- bodyWires -< (bs, e)
returnA -< bs
where
toWire :: (Body, V3D) -> Wire s e m ([Body], Event RenderEvent) Body
toWire (b0, v0) = proc (bs,_) -> do
bodyGs b0 v0 igr -< bs
bodyWires = sequenceA $ map toWire bodyList ++ [mouseBody]
mouseBody :: Wire s e m ([Body], Event RenderEvent) Body
mouseBody = proc (_,e) -> do
t <- realToFrac . (/5) <$> time -< ()
let pos = 2.5 *^ V3 (sin t) (cos t) (sin ((t+1)/2) / 4)
lastMouse <- hold . filterE isMouseEvent <|> pure emptyMouse-< e
returnA -< case lastMouse of
RenderMouseDown _ RenderMouseLeft -> Body 0.5 pos
_ -> Body 0 pos
-- returnA -< Body 0 1
emptyMouse :: RenderEvent
emptyMouse = RenderMouseUp (0,0) RenderMouseLeft
-- runFixed :: [(Planet, V3D)] -> IO ()
-- runFixed pData = runTest 9 (w . pure ())
-- where
-- bwire = manyFixedBody [sun] (map bTup pData) verlet
-- w = zipWith ($) planetMakers <$> bwire
-- ((Planet _ _ _ sun,_):planets) = pData
-- planetMakers = map (pMaker . fst) planets
-- runTwoBody :: (Planet,V3D) -> (Planet, V3D) -> IO ()
-- runTwoBody (p1@(Planet _ _ _ b1), v1) (p2@(Planet _ _ _ b2), v2) =
-- runTest 2 (w . pure ())
-- where
-- w = planetMaker <$> twoBody (b1,v1) (b2,v2) verlet
-- planetMaker (a,b) = [pMaker p1 a, pMaker p2 b]
-- runOneBody :: (Planet,V3D) -> IO ()
-- runOneBody (p@(Planet _ _ _ b0), v0) = runTest 1 (w . pure ())
-- where
-- w = map (pMaker p) <$> manyFixedBody [Body 1 zero] [(b0,v0)] verlet
runTest :: Int -> Wire (Timed Double ()) () Identity (Event RenderEvent) (Double, [Planet]) -> IO ()
runTest _ w =
#ifdef WINDOWS
runBackend (glutBackend (1/30) 2.5 (600,600) (31,31,31)) (const . return $ ()) (uncurry PlanetList <$> w)
#else
runBackend (sdlBackend (1/30) 30 (600,600) (31,31,31)) (const . const . return . return $ ()) (uncurry PlanetList <$> w)
#endif
runTestGNUPlot :: Int -> Wire (Timed Double ()) () IO (Event RenderEvent) [Planet] -> IO ()
runTestGNUPlot n w = do
clearLogs 10
runBackend (gnuPlotBackend 1 20000) (writeLog n) w
bTup :: (Planet, V3D) -> (Body, V3D)
bTup (Planet _ _ _ b0, v0) = (b0, v0)
pMaker :: Planet -> Body -> Planet
pMaker (Planet n r c _) = Planet n r c
clearLogs :: Int -> IO ()
clearLogs n = forM_ [0..(n-1)] $ \i ->
writeFile
("out/planets_b" ++ show i ++ ".dat")
""
writeLog :: Int -> [Planet] -> IO ()
writeLog n planets = forM_ [0..(n-1)] $ \i ->
appendFile
("out/planets_b" ++ show i ++ ".dat")
((++ "\n") . gnuplot $ planets !! i)
| mstksg/netwire-experiments | src/Experiment/Planets.hs | gpl-3.0 | 6,078 | 4 | 21 | 1,667 | 1,794 | 958 | 836 | -1 | -1 |
module WeatherFetch where
import Data.Maybe (fromJust)
import Network.HTTP
import Network.URI
import qualified Data.ByteString.Lazy as L
-- {{{ Units for temperature + speed
data Unit
= Default
| Metric
| Imperial
deriving (Show, Read, Eq)
-- generate a string that can be fit into an URL
showUnitUrl :: Unit -> String
showUnitUrl Default = ""
showUnitUrl Metric = "&units=metric"
showUnitUrl Imperial = "&units=imperial"
-- }}}
-- {{{ Locations come in various flavours
data Location
= City
{ name :: String
, countryCode :: String
}
| CityID
{ id :: String }
| Coords
{ latitude :: Double
, longitude :: Double
}
deriving (Show, Read, Eq)
-- {{{ Boilerplate for our arg handling
setCity :: Location -> String -> Location
setCity (City _ c) s = City s c
setCity _ s = City s ""
setCountry :: Location -> String -> Location
setCountry (City c _) s = City c s
setCountry _ s = City "" s
setLat :: Location -> Double -> Location
setLat (Coords _ l) d = Coords d l
setLat _ d = Coords d 0
setLon :: Location -> Double -> Location
setLon (Coords l _) d = Coords l d
setLon _ d = Coords 0 d
-- }}}
-- generate a string that can be fit into an URL
showLocationURL :: Location -> String
showLocationURL (City n cC) = "q=" ++ n ++ "," ++ cC
showLocationURL (CityID i) = "id=" ++ i
showLocationURL (Coords lat lon) = "lat=" ++ show lat ++ "&lon=" ++ show lon
-- }}}
-- build an URL that represents the user's needs
constructURL :: String -> Unit -> Location -> String
constructURL appId unit location = escapeURIString (/=' ') $
"http://api.openweathermap.org/data/2.5/forecast?" ++
showLocationURL location ++ showUnitUrl unit ++ "&APPID=" ++ appId
-- fetch a 5 day (every 3 hours) forecast
getRawWeatherdata :: String -> Unit -> Location -> IO L.ByteString
getRawWeatherdata a u l = simpleHTTP req >>= getResponseBody
where uri = fromJust $ parseURI (constructURL a u l)
req = Request uri GET [] L.empty
| ibabushkin/hweather | WeatherFetch.hs | gpl-3.0 | 2,026 | 0 | 11 | 470 | 606 | 320 | 286 | 48 | 1 |
module Main where
import System.Environment
import Text.ParserCombinators.Parsec hiding (spaces)
import Control.Monad
data LispVal = Atom String
| List [LispVal]
| DottedList [LispVal] LispVal
| Number Integer
| String String
| Bool Bool
symbol :: Parser Char
symbol = oneOf "!$%&|*+-/:<=?>@^_~#"
spaces :: Parser ()
spaces = skipMany1 space
parseString :: Parser LispVal
parseString = do
char '"'
x <- many (noneOf "\"")
char '"'
return $ String x
parseAtom :: Parser LispVal
parseAtom = do
first <- letter <|> symbol
rest <- many (letter <|> symbol <|> digit)
let atom = first : rest
return $ case atom of
"#t" -> Bool True
"#f" -> Bool False
_ -> Atom atom
parseNumber :: Parser LispVal
parseNumber = liftM (Number . read) $ many1 digit
parseExpr :: Parser LispVal
parseExpr = parseString <|> parseNumber <|> parseAtom
readExpr :: String -> String
readExpr input = case parse parseExpr "lisp" input of
Left err -> "No match: " ++ show err
Right val -> "Found value"
main :: IO ()
main = do
args <- getArgs
putStrLn (readExpr (args !! 0))
| lifengsun/haskell-exercise | scheme/02/datatypeparser.hs | gpl-3.0 | 1,156 | 0 | 11 | 293 | 394 | 197 | 197 | 41 | 3 |
{-# LANGUAGE TemplateHaskell #-}
module Poslist
( Poslist
, parse
, convertToGraph
) where
import DeriveJSON
import Data.Foldable (foldr')
import Data.Text.Lazy.Internal (Text)
import Data.Text.Lazy.Encoding (encodeUtf8)
import Graph (Graph, mkEmptyGraph, addEdge)
-------------------------------------------------------------------------------
type X = Double
type Y = Double
type Distance = Int
type Pos = (X, Y)
data Poslist = Poslist ![Pos] deriving (Show)
-------------------------------------------------------------------------------
$(deriveJSON hiveJSONOptions ''Poslist)
-------------------------------------------------------------------------------
convertToGraph :: Poslist -> Graph Int
convertToGraph (Poslist pl) =
let pss = zip [1..] . map (zip [1..] . zipWith dist pl . repeat) $ pl
in foldr' (\(f, ts) g -> foldr' (\(t, d) g' -> addEdge g' (f, t, d)) g ts) mkEmptyGraph pss
where
dist :: Pos -> Pos -> Distance
dist (x1, y1) (x2, y2) = round . sqrt $ (x1-x2)**2 + (y1-y2)**2
-------------------------------------------------------------------------------
parse :: Text -> Maybe Poslist
parse = decode' . encodeUtf8
| chrisbloecker/Hive | tools/Poslist.hs | gpl-3.0 | 1,206 | 0 | 16 | 213 | 363 | 206 | 157 | 26 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.StorageTransfer.TransferOperations.Cancel
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Cancels a transfer. Use the get method to check whether the cancellation
-- succeeded or whether the operation completed despite cancellation.
--
-- /See:/ <https://cloud.google.com/storage/transfer Google Storage Transfer API Reference> for @storagetransfer.transferOperations.cancel@.
module Network.Google.Resource.StorageTransfer.TransferOperations.Cancel
(
-- * REST Resource
TransferOperationsCancelResource
-- * Creating a Request
, transferOperationsCancel
, TransferOperationsCancel
-- * Request Lenses
, tocXgafv
, tocUploadProtocol
, tocPp
, tocAccessToken
, tocUploadType
, tocBearerToken
, tocName
, tocCallback
) where
import Network.Google.Prelude
import Network.Google.StorageTransfer.Types
-- | A resource alias for @storagetransfer.transferOperations.cancel@ method which the
-- 'TransferOperationsCancel' request conforms to.
type TransferOperationsCancelResource =
"v1" :>
CaptureMode "name" "cancel" Text :>
QueryParam "$.xgafv" Text :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Post '[JSON] Empty
-- | Cancels a transfer. Use the get method to check whether the cancellation
-- succeeded or whether the operation completed despite cancellation.
--
-- /See:/ 'transferOperationsCancel' smart constructor.
data TransferOperationsCancel = TransferOperationsCancel'
{ _tocXgafv :: !(Maybe Text)
, _tocUploadProtocol :: !(Maybe Text)
, _tocPp :: !Bool
, _tocAccessToken :: !(Maybe Text)
, _tocUploadType :: !(Maybe Text)
, _tocBearerToken :: !(Maybe Text)
, _tocName :: !Text
, _tocCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TransferOperationsCancel' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tocXgafv'
--
-- * 'tocUploadProtocol'
--
-- * 'tocPp'
--
-- * 'tocAccessToken'
--
-- * 'tocUploadType'
--
-- * 'tocBearerToken'
--
-- * 'tocName'
--
-- * 'tocCallback'
transferOperationsCancel
:: Text -- ^ 'tocName'
-> TransferOperationsCancel
transferOperationsCancel pTocName_ =
TransferOperationsCancel'
{ _tocXgafv = Nothing
, _tocUploadProtocol = Nothing
, _tocPp = True
, _tocAccessToken = Nothing
, _tocUploadType = Nothing
, _tocBearerToken = Nothing
, _tocName = pTocName_
, _tocCallback = Nothing
}
-- | V1 error format.
tocXgafv :: Lens' TransferOperationsCancel (Maybe Text)
tocXgafv = lens _tocXgafv (\ s a -> s{_tocXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
tocUploadProtocol :: Lens' TransferOperationsCancel (Maybe Text)
tocUploadProtocol
= lens _tocUploadProtocol
(\ s a -> s{_tocUploadProtocol = a})
-- | Pretty-print response.
tocPp :: Lens' TransferOperationsCancel Bool
tocPp = lens _tocPp (\ s a -> s{_tocPp = a})
-- | OAuth access token.
tocAccessToken :: Lens' TransferOperationsCancel (Maybe Text)
tocAccessToken
= lens _tocAccessToken
(\ s a -> s{_tocAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
tocUploadType :: Lens' TransferOperationsCancel (Maybe Text)
tocUploadType
= lens _tocUploadType
(\ s a -> s{_tocUploadType = a})
-- | OAuth bearer token.
tocBearerToken :: Lens' TransferOperationsCancel (Maybe Text)
tocBearerToken
= lens _tocBearerToken
(\ s a -> s{_tocBearerToken = a})
-- | The name of the operation resource to be cancelled.
tocName :: Lens' TransferOperationsCancel Text
tocName = lens _tocName (\ s a -> s{_tocName = a})
-- | JSONP
tocCallback :: Lens' TransferOperationsCancel (Maybe Text)
tocCallback
= lens _tocCallback (\ s a -> s{_tocCallback = a})
instance GoogleRequest TransferOperationsCancel where
type Rs TransferOperationsCancel = Empty
type Scopes TransferOperationsCancel =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient TransferOperationsCancel'{..}
= go _tocName _tocXgafv _tocUploadProtocol
(Just _tocPp)
_tocAccessToken
_tocUploadType
_tocBearerToken
_tocCallback
(Just AltJSON)
storageTransferService
where go
= buildClient
(Proxy :: Proxy TransferOperationsCancelResource)
mempty
| rueshyna/gogol | gogol-storage-transfer/gen/Network/Google/Resource/StorageTransfer/TransferOperations/Cancel.hs | mpl-2.0 | 5,582 | 0 | 17 | 1,306 | 852 | 495 | 357 | 119 | 1 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
--------------------------------------------------------------------------------
{-|
Module : Media
Copyright : (c) Daan Leijen 2003
(c) shelarcy 2007
License : wxWindows
Maintainer : [email protected]
Stability : provisional
Portability : portable
Images, Media, Sounds, and action!
-}
--------------------------------------------------------------------------------
module Graphics.UI.WX.Media
( -- * Media
Media(..)
-- * Sound
, sound, playLoop, playWait
-- * Images
, image, imageCreateFromFile, imageCreateFromPixels, imageGetPixels
, imageCreateFromPixelArray, imageGetPixelArray
-- * Bitmaps
, bitmap, bitmapCreateFromFile, bitmapFromImage
) where
import System.IO.Unsafe( unsafePerformIO )
import Graphics.UI.WXCore
import Graphics.UI.WX.Types( Var, varGet, varSet, varCreate )
import Graphics.UI.WX.Attributes
import Graphics.UI.WX.Classes
{--------------------------------------------------------------------
Bitmaps
--------------------------------------------------------------------}
-- | Return a managed bitmap object. Bitmaps are abstract images used
-- for drawing to a device context. The file path should point to
-- a valid bitmap file, normally a @.ico@, @.bmp@, @.xpm@, or @.png@,
-- but any file format supported by 'Image' is correctly loaded.
--
-- Instances: 'Sized'.
bitmap :: FilePath -> Bitmap ()
bitmap fname
= unsafePerformIO $ bitmapCreateFromFile fname
instance Sized (Bitmap a) where
size = newAttr "size" bitmapGetSize bitmapSetSize
-- | Create a bitmap from an image with the same color depth.
bitmapFromImage :: Image a -> IO (Bitmap ())
bitmapFromImage image
= bitmapCreateFromImage image (-1)
{--------------------------------------------------------------------
Images
--------------------------------------------------------------------}
-- | Return a managed image. Images are platform independent representations
-- of pictures, using an array of rgb pixels. See "Graphics.UI.WXCore.Image" for
-- low-level pixel manipulation. The file path should point to
-- a valid image file, like @.jpg@, @.bmp@, @.xpm@, or @.png@, for example.
--
-- Instances: 'Sized'.
image :: FilePath -> Image ()
image fname
= unsafePerformIO $ imageCreateFromFile fname
instance Sized (Image a) where
size = newAttr "size" imageGetSize imageRescale
{--------------------------------------------------------------------
Media
--------------------------------------------------------------------}
-- | Abstract layer between 'MediaCtrl' and 'Sound'. This class intends to
-- avoid breaking backward-compatibility.
class Media w where
-- | If use this method with 'Sound', play a sound fragment asynchronously.
-- If use this method with 'MediaCtrl', play media that is loaded by
-- 'mediaCtrlLoad'.
play :: w -> IO ()
stop :: w -> IO ()
{--------------------------------------------------------------------
Sounds
--------------------------------------------------------------------}
-- | Return a managed sound object. The file path points to
-- a valid sound file, normally a @.wav@.
sound :: FilePath -> Sound ()
sound fname
= unsafePerformIO $ soundCreate fname False
instance Media (Sound a) where
play sound = unitIO (soundPlay sound wxSOUND_ASYNC)
stop = soundStop
-- | Play a sound fragment repeatedly (and asynchronously).
playLoop :: Sound a -> IO ()
playLoop sound
= unitIO (soundPlay sound $ wxSOUND_ASYNC .+. wxSOUND_LOOP)
-- | Play a sound fragment synchronously (i.e. wait till completion).
playWait :: Sound a -> IO ()
playWait sound
= unitIO (soundPlay sound wxSOUND_SYNC)
| sherwoodwang/wxHaskell | wx/src/Graphics/UI/WX/Media.hs | lgpl-2.1 | 3,833 | 0 | 9 | 686 | 474 | 268 | 206 | 41 | 1 |
module Main where
import qualified Data.Map as M
-- guzaiがnabeに入っていたらその具材の個数を1減らす、
-- guzaiの個数がゼロになったらその項目をMapから消す、
-- ように、関数eatを追記してください。
eat :: String -> M.Map String Int -> M.Map String Int
eat guzai nabe = do
let num = M.lookup guzai nabe in
case num of
Just x ->
if x <= 1 then M.delete guzai nabe
else
let f key n = if key == guzai then Just (n - 1) else Nothing in
M.updateWithKey f guzai nabe
_ -> nabe
party :: M.Map String Int -> IO ()
party nabe = do
putStrLn $ "Nabe: " ++ show nabe
order <- getLine
let newNabe = eat order nabe
-- 鍋が空かどうかを判定する
if M.null nabe then putStrLn "The party is over!"
else party newNabe
-- http://stackoverflow.com/questions/3067048/haskell-convert-list-to-list-of-tuples
stride _ [] = []
stride n (x:xs) = x : stride n (drop (n-1) xs)
toPairs xs = zip (stride 2 xs) (stride 2 (drop 1 xs))
readRecipe :: IO (M.Map String Int)
readRecipe = do
content <- readFile "recipe.txt"
-- content の内容を解釈して、おいしそうな鍋の中身を作ってください!
let lst = words content in
let ret = zip (stride 2 lst) (map read . stride 2 . drop 1 $ lst) :: [(String,Int)] in
return $ M.fromList ret
-- return $ M.fromList [("Kuuki",1)]
main :: IO ()
main = do
initialNabe <- readRecipe
party initialNabe
| jtwp470/my-programming-learning-book | haskell/exercise-7-nabe/src/Main.hs | unlicense | 1,488 | 0 | 20 | 306 | 487 | 240 | 247 | 31 | 4 |
module ProjectEuler.A268681 (a268681) where
import Data.List (nub)
import Tables.A007318 (a007318_row)
a268681 :: Integer -> Integer
a268681 n = sum $ filter squareFree $ nub $ concatMap a007318_row [0..n-1] where
squareFree k = all (\d -> k `mod` d /= 0) $ map (^2) [2..20]
| peterokagey/haskellOEIS | src/ProjectEuler/A268681.hs | apache-2.0 | 280 | 0 | 12 | 49 | 126 | 69 | 57 | 6 | 1 |
module Helpers.CostasLikeArrays (countPermutationsUpToDihedralSymmetry, distinctDirections, distinctDistances) where
import Data.List (elemIndex, nub)
import Data.Maybe (mapMaybe)
import Data.Ratio ((%))
import Helpers.Subsets (eachPair)
type Permutation = [Int]
distinctDistances :: Permutation -> Int
distinctDistances permutation = length $ nub $ map distanceSquare $ eachPair $ zip [0..] permutation where
distanceSquare ((x1, y1), (x2, y2)) = (x1 - x2)^2 + (y1 - y2)^2
distinctDirections :: Permutation -> Int
distinctDirections permutation = length $ nub $ map direction $ eachPair $ zip [0..] permutation
-- direction :: (Int, Int) -> (Int, Int) -> Data.Ratio Int
direction ((x1, y1), (x2, y2)) = recip ratio `min` ratio where
ratio = abs $ (x1 - x2) % (y1 - y2)
quarterTurn :: Int -> Permutation -> Permutation
quarterTurn n permutation = mapMaybe (`elemIndex` permutation) [0..n-1]
horizontalSymmetries :: Int -> [Permutation] -> [Permutation]
horizontalSymmetries n = concatMap flips where
flips permutation = [permutation, flipped] where
flipped = map (n-1-) permutation
verticalSymmetries :: [Permutation] -> [Permutation]
verticalSymmetries = concatMap flips where
flips permutation = [permutation, flipped] where
flipped = reverse permutation
-- There's surely a more elegant way to do this.
rotationalSymmetries :: Int -> [Permutation] -> [Permutation]
rotationalSymmetries n = concatMap turns where
turns permutation = [permutation, quarterTurn n permutation]
canonicalRepresentative :: Int -> Permutation -> Permutation
canonicalRepresentative n permutation = minimum $ rotationalSymmetries n $ horizontalSymmetries n $ verticalSymmetries [permutation]
countPermutationsUpToDihedralSymmetry :: Int -> [Permutation] -> Int
countPermutationsUpToDihedralSymmetry n permutations = length $ nub $ map (canonicalRepresentative n) permutations
| peterokagey/haskellOEIS | src/Helpers/CostasLikeArrays.hs | apache-2.0 | 1,882 | 0 | 11 | 267 | 580 | 319 | 261 | 30 | 1 |
-- Show expressions in prefix notation
module OperationExtension1 where
import DataBase
import DataExtension
instance Show Lit
where
show (Lit i) = "Lit " ++ show i
instance (Exp x, Exp y, Show x, Show y) => Show (Add x y)
where
show (Add x y) = "Add (" ++ show x ++ ") (" ++ show y ++ ")"
instance (Exp x, Show x) => Show (Neg x)
where
show (Neg x) = "Neg (" ++ show x ++ ")"
| egaburov/funstuff | Haskell/tytag/xproblem_src/samples/expressions/Haskell/OpenDatatype1/OperationExtension1.hs | apache-2.0 | 392 | 0 | 10 | 95 | 175 | 89 | 86 | 9 | 0 |
module Time where
import Data.IORef (readIORef, IORef, newIORef, modifyIORef')
import Control.Monad
import qualified Graphics.UI.GLUT as GLUT
import Concurrency (writeIORef)
type FloatType = Float
type Time = FloatType
type DTime = FloatType
type TimeIORef = IORef Time
newTimeIORef :: IO TimeIORef
newTimeIORef = newIORef =<< elapsedTime
elapsedTime :: IO Time
elapsedTime = do
ms <- GLUT.get GLUT.elapsedTime
return $ fromIntegral ms / 1000
newTimeDelta :: TimeIORef -> IO DTime
newTimeDelta t = do
currentTime <- elapsedTime
lastTime <- writeIORef t currentTime
return $ currentTime - lastTime
| epeld/zatacka | old/Time.hs | apache-2.0 | 628 | 0 | 9 | 116 | 180 | 98 | 82 | 20 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Haddock.Version
-- Copyright : (c) Simon Marlow 2003
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
-----------------------------------------------------------------------------
module Haddock.Version (
projectName, projectVersion, projectUrl
) where
import Paths_haddock_internal ( version )
import Data.Version ( showVersion )
projectName, projectUrl :: String
projectName = "Haddock"
projectUrl = "http://www.haskell.org/haddock/"
projectVersion :: String
projectVersion = showVersion version
| ghcjs/haddock-internal | src/Haddock/Version.hs | bsd-2-clause | 705 | 0 | 5 | 104 | 75 | 50 | 25 | 9 | 1 |
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-}
-- |
-- Module : FRP.Animas.Vector3
-- Copyright : (c) Antony Courtney and Henrik Nilsson, Yale University, 2003
-- License : BSD-style (see the LICENSE file in the distribution)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : non-portable (GHC extensions)
--
-- 3D vector abstraction (R^3).
--
-- ToDo: Deriving Show, or provide dedicated show instance?
module FRP.Animas.Vector3 (
Vector3,
vector3,
vector3X,
vector3Y,
vector3Z,
vector3XYZ,
vector3Spherical,
vector3Rho,
vector3Theta,
vector3Phi,
vector3RhoThetaPhi,
vector3Rotate
) where
import FRP.Animas.VectorSpace
import FRP.Animas.Forceable
-- | 3-dimensional vector
data RealFloat a => Vector3 a = Vector3 !a !a !a deriving (Eq, Show)
-- | Construct a 3 dimensional vector
vector3 :: RealFloat a => a -- ^ X magnitude
-> a -- ^ Y magnitude
-> a -- ^ Z magnitude
-> Vector3 a -- ^ Vector
vector3 x y z = Vector3 x y z
-- | X magnitude of the vector
vector3X :: RealFloat a => Vector3 a -> a
vector3X (Vector3 x _ _) = x
-- | Y magnitude of the vector
vector3Y :: RealFloat a => Vector3 a -> a
vector3Y (Vector3 _ y _) = y
-- | Z magnitude of the vector
vector3Z :: RealFloat a => Vector3 a -> a
vector3Z (Vector3 _ _ z) = z
-- | Ordered pair of magnitudes of the vector
vector3XYZ :: RealFloat a => Vector3 a
-> (a, a, a) -- ^ (X, Y, Z)
vector3XYZ (Vector3 x y z) = (x, y, z)
-- | Spherical coordinates to vector
vector3Spherical :: RealFloat a => a -- ^ magnitude
-> a -- ^ Theta-direction
-> a -- ^ Phi-direction
-> Vector3 a
vector3Spherical rho theta phi =
Vector3 (rhoSinPhi * cos theta) (rhoSinPhi * sin theta) (rho * cos phi)
where
rhoSinPhi = rho * sin phi
-- | Magnitude of a vector
vector3Rho :: RealFloat a => Vector3 a -> a
vector3Rho (Vector3 x y z) = sqrt (x * x + y * y + z * z)
-- | Theta-direction of a vector
vector3Theta :: RealFloat a => Vector3 a -> a
vector3Theta (Vector3 x y _) = atan2 y x
-- | Phi-direction of a vector
vector3Phi :: RealFloat a => Vector3 a -> a
vector3Phi v@(Vector3 _ _ z) = acos (z / vector3Rho v)
-- | Magnitude and directions of a vector as an ordered triple
vector3RhoThetaPhi :: RealFloat a => Vector3 a
-> (a, a, a) -- ^ (Rho, Theta, Phi)
vector3RhoThetaPhi (Vector3 x y z) = (rho, theta, phi)
where
rho = sqrt (x * x + y * y + z * z)
theta = atan2 y x
phi = acos (z / rho)
instance RealFloat a => VectorSpace (Vector3 a) a where
zeroVector = Vector3 0 0 0
a *^ (Vector3 x y z) = Vector3 (a * x) (a * y) (a * z)
(Vector3 x y z) ^/ a = Vector3 (x / a) (y / a) (z / a)
negateVector (Vector3 x y z) = (Vector3 (-x) (-y) (-z))
(Vector3 x1 y1 z1) ^+^ (Vector3 x2 y2 z2) = Vector3 (x1+x2) (y1+y2) (z1+z2)
(Vector3 x1 y1 z1) ^-^ (Vector3 x2 y2 z2) = Vector3 (x1-x2) (y1-y2) (z1-z2)
(Vector3 x1 y1 z1) `dot` (Vector3 x2 y2 z2) = x1 * x2 + y1 * y2 + z1 * z2
-- | Rotate a vector
vector3Rotate :: RealFloat a =>
a -- ^ Difference of theta
-> a -- ^ Difference of phi
-> Vector3 a -- ^ Initial vector
-> Vector3 a -- ^ Rotated vector
vector3Rotate theta' phi' v =
vector3Spherical (vector3Rho v)
(vector3Theta v + theta')
(vector3Phi v + phi')
instance RealFloat a => Forceable (Vector3 a) where
force = id
| eamsden/Animas | src/FRP/Animas/Vector3.hs | bsd-3-clause | 3,576 | 5 | 14 | 1,000 | 1,156 | 607 | 549 | 75 | 1 |
{-# LANGUAGE LambdaCase, OverloadedStrings #-}
-- | Bash script evaluation.
module Bash.Config.Eval
( Eval(..)
, interpret
) where
import Control.Applicative
import Control.Monad.Reader.Class
import Control.Monad.State.Class
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Monoid hiding (Last)
import Bash.Config.Cond
import Bash.Config.Expand
import Bash.Config.Types
import Bash.Config.Word
-- | Interpret a script or function, returning the resulting environment
-- variables and function definitions. Any variables or functions missing
-- are assumed to be unknown.
interpret :: Eval a => a -> Env -> Either String Env
interpret a = fmap snd . runBash (eval a) Clean
-- | Evaluate with a dirty status.
dirty :: Eval a => a -> Bash ExitStatus
dirty a = Nothing <$ local (const Dirty) (eval a)
-- | Execute in a subshell. Environment changes during the subshell execution
-- will not affect the outside environment.
subshell :: Eval a => a -> Bash ExitStatus
subshell a = do
env <- get
r <- eval a
put env
return r
-- | Execute an assignment builtin.
assignBuiltin :: Word -> [Either Assign Word] -> Bash ExitStatus
assignBuiltin b args = Nothing <$ case Map.lookup b assignBuiltins of
Nothing -> return ()
Just f -> mapM_ f args
where
assignBuiltins = Map.fromList $
[ ("alias" , \_ -> return ())
, ("declare" , perform )
, ("export" , perform )
, ("local" , unassign)
, ("readonly", unassign)
, ("typeset" , perform )
]
perform (Left a) = () <$ eval a
perform _ = return ()
unassign (Left (Assign n _ _)) = unset n
unassign (Right w) = unset (toString w)
-- | Execute a simple command.
command :: String -> [String] -> Bash ExitStatus
command name args = do
defined <- gets functions
let allCommands = builtins <> fmap (const . eval) defined
case Map.lookup name allCommands of
Nothing -> return Nothing
Just f -> f args
-- | Execute a function definition.
functionDef :: Word -> Function -> Bash ExitStatus
functionDef w f = Just True <$ define name f
<|> Nothing <$ undefine name
where
name = toString w
-- | Interpreter builtins. These are commands the the interpreter knows
-- how to execute. Any command not in this map is assumed to be user-defined,
-- or external.
--
-- The implemented builtins are @test@, @[@, @true@, and @false@. Most shell
-- builtins are assumed to have unpredictable effects and will cause the
-- interpreter to fail. However, some shell builtins, such as
-- @break@, @continue@, @pwd@, etc. are assumed to be safe. Builtins that
-- could take an assignment as a parameter are implemented separately.
builtins :: Map String ([String] -> Bash ExitStatus)
builtins = Map.fromList $
-- implemented builtins
[ ("test" , return . test )
, ("[" , return . test_)
, ("true" , \_ -> return (Just True) )
, ("false", \_ -> return (Just False))
]
-- unsafe builtins
++ map (\name -> (name, \_ -> unimplemented name))
[ ".", "builtin", "caller", "enable", "exec", "exit", "let"
, "logout", "mapfile", "read", "readarray", "return", "source"
, "trap", "unset", "unalias"
]
-- | Executable commands.
class Eval a where
-- | Execute a command, and return its return value.
eval :: a -> Bash ExitStatus
instance Eval a => Eval [a] where
eval [] = return (Just True)
eval cs = last <$> mapM eval cs
instance Eval Script where
eval (Script l) = eval l
instance Eval Command where
eval (Simple c) = eval c
eval (Shell c) = eval c
eval (FunctionDef w f) = functionDef w f
eval Coproc = unimplemented "coproc"
instance Eval List where
eval (List cs) = eval cs
instance Eval AndOr where
eval (Last p ) = eval p
eval (And p cs) = eval p >>= \case
Nothing -> dirty cs
Just False -> return (Just False)
Just True -> eval cs
eval (Or p cs) = eval p >>= \case
Nothing -> dirty cs
Just False -> eval cs
Just True -> return (Just True)
instance Eval Pipeline where
eval (Pipeline b cs) = bang $ case cs of
[] -> return (Just True)
[c] -> eval c
_ -> subshell cs
where
bang = if b then invert else id
invert = fmap (fmap not)
instance Eval SimpleCommand where
eval (SimpleCommand as ws) = optional (expandWordList ws) >>= \case
Nothing -> return Nothing
Just [] -> eval as
Just (c:args) -> command c args
eval (AssignCommand b args) = assignBuiltin b args
instance Eval Assign where
eval (Assign name op a) = Just True <$ (assign name =<< expandValue a)
<|> Nothing <$ unset name
where
assign = case op of
Equals -> set
PlusEquals -> augment
instance Eval Function where
eval (Function body) = eval body
instance Eval ShellCommand where
eval (Subshell l ) = subshell l
eval (Group l ) = eval l
eval (Arith s ) = unimplemented $ "((" ++ s ++ "))"
eval (Cond ws ) = cond ws
eval (For _ _ l ) = dirty l
eval (ArithFor s _) = unimplemented $ "for ((" ++ s ++ "))"
eval (Select _ _ l) = dirty l
eval (Case _ cs ) = eval cs
eval (If p t f ) = eval p >>= \case
Nothing -> dirty t >> dirty f
Just r -> eval $ if r then t else f
eval (Until p l ) = dirty p >> dirty l
eval (While p l ) = dirty p >> dirty l
instance Eval CaseClause where
eval (CaseClause _ l _) = dirty l
| knrafto/bash-config | src/Bash/Config/Eval.hs | bsd-3-clause | 5,812 | 0 | 13 | 1,746 | 1,829 | 931 | 898 | 121 | 4 |
{-# OPTIONS_GHC -fno-warn-missing-import-lists #-}
module Silvi
( module Silvi.Encode
, module Silvi.Random
, module Silvi.Types
) where
import Silvi.Encode
import Silvi.Random
import Silvi.Types
| chessai/silvi | src/Silvi.hs | bsd-3-clause | 203 | 0 | 5 | 30 | 40 | 26 | 14 | 8 | 0 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.NV.FramebufferMultisampleCoverage
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.NV.FramebufferMultisampleCoverage (
-- * Extension Support
glGetNVFramebufferMultisampleCoverage,
gl_NV_framebuffer_multisample_coverage,
-- * Enums
pattern GL_MAX_MULTISAMPLE_COVERAGE_MODES_NV,
pattern GL_MULTISAMPLE_COVERAGE_MODES_NV,
pattern GL_RENDERBUFFER_COLOR_SAMPLES_NV,
pattern GL_RENDERBUFFER_COVERAGE_SAMPLES_NV,
-- * Functions
glRenderbufferStorageMultisampleCoverageNV
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
| haskell-opengl/OpenGLRaw | src/Graphics/GL/NV/FramebufferMultisampleCoverage.hs | bsd-3-clause | 959 | 0 | 5 | 113 | 72 | 52 | 20 | 12 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
-- |
-- Module : SMSAero.Types
-- Copyright : (c) 2016, GetShopTV
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
--
-- This module defines types used in SMSAero API.
module SMSAero.Types (
SMSAeroAuth(..),
Signature(..),
MessageId(..),
MessageBody(..),
Group(..),
Phone(..),
SMSAeroDate(..),
SendType(..),
DigitalChannel(..),
Name(..),
BirthDate(..),
ChannelName,
) where
import Control.Applicative (empty)
import Data.Aeson
import Data.Int (Int64)
import Data.Monoid
import Data.Time (UTCTime)
import Data.Time.Calendar (Day)
import Data.Time.Clock.POSIX (utcTimeToPOSIXSeconds, posixSecondsToUTCTime)
import Data.Text (Text)
import qualified Data.Text as Text
import Web.HttpApiData
-- | SMSAero sender's signature. This is used for the "from" field.
newtype Signature = Signature { getSignature :: Text } deriving (Eq, Show, FromJSON, ToJSON, ToHttpApiData, FromHttpApiData)
-- | SMSAero sent message id.
newtype MessageId = MessageId Int64
deriving (Eq, Show, Ord, FromJSON, ToJSON, ToHttpApiData, FromHttpApiData
#if MIN_VERSION_aeson(1,0,0)
, ToJSONKey, FromJSONKey
#endif
)
-- | SMSAero message body.
newtype MessageBody = MessageBody Text deriving (Eq, Show, FromJSON, ToJSON, ToHttpApiData, FromHttpApiData)
-- | SMSAero group name.
newtype Group = Group Text deriving (Eq, Show, FromJSON, ToJSON, ToHttpApiData, FromHttpApiData)
-- | SMSAero channel name.
type ChannelName = Text
-- | SMSAero authentication data.
data SMSAeroAuth = SMSAeroAuth
{ authUser :: Text -- ^ Username.
, authPassword :: Text -- ^ MD5 hash of a password.
}
instance FromJSON SMSAeroAuth where
parseJSON (Object o) = SMSAeroAuth
<$> o .: "user"
<*> o .: "password"
parseJSON _ = empty
instance ToJSON SMSAeroAuth where
toJSON SMSAeroAuth{..} = object
[ "user" .= authUser
, "password" .= authPassword ]
-- | Phone number.
newtype Phone = Phone { getPhone :: Int64 } deriving (Eq, Show, ToHttpApiData, FromHttpApiData)
-- | Date. Textually @SMSAeroDate@ is represented as a number of seconds since 01 Jan 1970.
newtype SMSAeroDate = SMSAeroDate { getSMSAeroDate :: UTCTime } deriving (Eq, Show)
instance ToHttpApiData SMSAeroDate where
toQueryParam (SMSAeroDate dt) = Text.pack (show (utcTimeToPOSIXSeconds dt))
instance FromHttpApiData SMSAeroDate where
parseQueryParam s = do
n <- fromInteger <$> parseQueryParam s
return (SMSAeroDate (posixSecondsToUTCTime n))
-- | Send type. This is used to describe send channel, equals to @FreeSignatureExceptMTC@ by default.
-- Textually @SendType@ is represented as a number from 1 to 6, excluding 5.
data SendType
= PaidSignature -- ^ Paid literal signature for all operators.
| FreeSignatureExceptMTC -- ^ Free literal signature for all operators except MTS.
| FreeSignature -- ^ Free literal signature for all operators.
| InfoSignature -- ^ Infosignature for all operators.
| International -- ^ International delivery (for RU and KZ operators).
deriving (Eq, Show, Bounded, Enum)
-- | Digital send channel. Textually represented as '1' if the parameter is present.
data DigitalChannel = DigitalChannel
instance ToHttpApiData DigitalChannel where
toQueryParam _ = "1"
instance FromHttpApiData DigitalChannel where
parseQueryParam "1" = Right DigitalChannel
parseQueryParam x = Left ("expected 1 for digital channel (but got " <> x <> ")")
instance ToHttpApiData SendType where
toQueryParam PaidSignature = "1"
toQueryParam FreeSignatureExceptMTC = "2"
toQueryParam FreeSignature = "3"
toQueryParam InfoSignature = "4"
toQueryParam International = "6"
instance FromHttpApiData SendType where
parseQueryParam = parseBoundedQueryParam
-- | Subscriber's name.
newtype Name = Name Text deriving (Eq, Show, ToHttpApiData, FromHttpApiData)
-- | Subscriber's birth date. Textually represented in %Y-%m-%d format.
newtype BirthDate = BirthDate Day deriving (Eq, Show, ToHttpApiData, FromHttpApiData)
| GetShopTV/smsaero | src/SMSAero/Types.hs | bsd-3-clause | 4,219 | 0 | 12 | 768 | 841 | 489 | 352 | 76 | 0 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
import Control.Applicative
import Control.Exception
import Control.Monad
import Data.List
import Data.Traversable (for)
import Distribution.PackageDescription
import Distribution.Simple
import Distribution.Simple.BuildPaths
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.Setup
import Distribution.Simple.Utils
import Distribution.Simple.Program
import qualified Distribution.Verbosity as Verbosity
import System.Directory
import System.FilePath
import System.Info
main :: IO ()
main = defaultMainWithHooks simpleUserHooks
{ confHook = customConfHook
, buildHook = customBuildHook
, copyHook = customCopyHook
, cleanHook = customCleanHook
, hookedPrograms = hookedPrograms simpleUserHooks
++ [ makeProgram ]
}
customConfHook :: (GenericPackageDescription, HookedBuildInfo) -> ConfigFlags
-> IO LocalBuildInfo
customConfHook (pkg, pbi) flags = do
(_, includeDir, _) <- libvoyeurPaths
let addIncludeDirs = (onLocalLibBuildInfo . onIncludeDirs) (++ [".", includeDir])
addLibs = if os == "darwin"
then id
else (onLocalLibBuildInfo . onLdOptions) (++ ["-lbsd"])
lbi <- confHook simpleUserHooks (pkg, pbi) flags
return $ (addLibs . addIncludeDirs) lbi
customBuildHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> BuildFlags -> IO ()
customBuildHook pkg lbi usrHooks flags = do
putStrLn "Building libvoyeur..."
(libvoyeurDir, _, libDir) <- libvoyeurPaths
let verbosity = fromFlag (buildVerbosity flags)
runMake = runDbProgram verbosity makeProgram (withPrograms lbi)
inDir libvoyeurDir $
runMake []
buildHook simpleUserHooks pkg lbi usrHooks flags
notice verbosity "Relinking libvoyeur.."
let libObjs = map (libObjPath libDir) [ "voyeur"
, "net"
, "env"
, "event"
, "util"
]
componentLibs = concatMap componentLibNames $ componentsConfigs lbi
addStaticObjectFile libName objName = runAr ["r", libName, objName]
runAr = runDbProgram verbosity arProgram (withPrograms lbi)
forM_ componentLibs $ \componentLib -> do
when (withVanillaLib lbi) $
let libName = buildDir lbi </> mkLibName componentLib
in mapM_ (addStaticObjectFile libName) libObjs
when (withProfLib lbi) $
let libName = buildDir lbi </> mkProfLibName componentLib
in mapM_ (addStaticObjectFile libName) libObjs
when (withSharedLib lbi) $
let libName = buildDir lbi </> mkSharedLibName buildCompilerId componentLib
in mapM_ (addStaticObjectFile libName) libObjs
customCopyHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> CopyFlags -> IO ()
customCopyHook pkg lbi hooks flags = do
let verb = fromFlagOrDefault Verbosity.normal $ copyVerbosity flags
copyHook simpleUserHooks pkg lbi hooks flags
putStrLn "Installing libvoyeur helper libraries..."
let helperLibs = [ "exec", "exit", "open", "close" ]
helperLibFiles = map (("libvoyeur-" ++) . (<.> dllExtension)) helperLibs
helperLibDir = datadir (absoluteInstallDirs pkg lbi NoCopyDest)
(_, _, libDir) <- libvoyeurPaths
copyFiles verb helperLibDir $ map (libDir,) helperLibFiles
customCleanHook :: PackageDescription -> () -> UserHooks -> CleanFlags -> IO ()
customCleanHook pkg v hooks flags = do
putStrLn "Cleaning libvoyeur..."
let verb = fromFlagOrDefault Verbosity.normal $ cleanVerbosity flags
pgmConf <- configureProgram verb (simpleProgram "make") defaultProgramConfiguration
(libvoyeurDir, _, _) <- libvoyeurPaths
inDir libvoyeurDir $
runDbProgram verb makeProgram pgmConf ["clean"]
cleanHook simpleUserHooks pkg v hooks flags
libvoyeurPaths :: IO (FilePath, FilePath, FilePath)
libvoyeurPaths = do
curDir <- getCurrentDirectory
return (curDir </> "libvoyeur",
curDir </> "libvoyeur" </> "include",
curDir </> "libvoyeur" </> "build")
componentLibNames :: (ComponentName, ComponentLocalBuildInfo, [ComponentName]) -> [LibraryName]
componentLibNames (_, LibComponentLocalBuildInfo {..}, _) = componentLibraries
componentLibNames _ = []
makeProgram :: Program
makeProgram = simpleProgram "make"
libObjPath :: FilePath -> FilePath -> FilePath
libObjPath dir name = dir </> name <.> objExtension
inDir :: FilePath -> IO a -> IO a
inDir dir act = do
curDir <- getCurrentDirectory
bracket_ (setCurrentDirectory dir)
(setCurrentDirectory curDir)
act
type Lifter a b = (a -> a) -> b -> b
onLocalPkgDescr :: Lifter PackageDescription LocalBuildInfo
onLocalPkgDescr f lbi = lbi { localPkgDescr = f (localPkgDescr lbi) }
onLibrary :: Lifter Library PackageDescription
onLibrary f lpd = lpd { library = f <$> library lpd }
onLibBuildInfo :: Lifter BuildInfo Library
onLibBuildInfo f lib = lib { libBuildInfo = f (libBuildInfo lib) }
onLocalLibBuildInfo :: Lifter BuildInfo LocalBuildInfo
onLocalLibBuildInfo = onLocalPkgDescr . onLibrary . onLibBuildInfo
onIncludeDirs :: Lifter [FilePath] BuildInfo
onIncludeDirs f libbi = libbi { includeDirs = f (includeDirs libbi) }
onLdOptions :: Lifter [FilePath] BuildInfo
onLdOptions f libbi = libbi { ldOptions = f (ldOptions libbi) }
onPkgDescr :: Lifter PackageDescription GenericPackageDescription
onPkgDescr f gpd = gpd { packageDescription = f (packageDescription gpd) }
onExtraSrcFiles :: Lifter [FilePath] PackageDescription
onExtraSrcFiles f pd = pd { extraSrcFiles = f (extraSrcFiles pd) }
| sethfowler/hslibvoyeur | Setup.hs | bsd-3-clause | 5,766 | 2 | 17 | 1,270 | 1,568 | 802 | 766 | 119 | 2 |
-- | This module exports the types used to create flag writes.
module Data.Factual.Write.Flag
(
-- * Flag type
Flag(..)
-- * Problem type
, Problem(..)
-- * Required modules
, module Data.Factual.Shared.Table
) where
import Data.Factual.Write
import Data.Factual.Shared.Table
import Data.Maybe (fromJust)
import Data.List.Utils (join)
import Data.Factual.Utils
import qualified Data.Map as M
-- | A Problem represents what is wrong with the row being flagged
data Problem = Duplicate
| Nonexistent
| Inaccurate
| Inappropriate
| Spam
| Other
deriving (Eq, Show)
-- | The Flag type represents a Write to be made to the API which flags a
-- row as having some kind of problem. The table and factualId identify the
-- problematic row, while the problem indicates the type of issue the row
-- has. The user is specified as a string. Other fields such as comment and
-- reference are optional. The debug flag is used to write in debug mode.
data Flag = Flag { table :: Table
, factualId :: String
, problem :: Problem
, user :: String
, comment :: Maybe String
, dataJSON :: Maybe String
, fields :: Maybe [String]
, reference :: Maybe String
} deriving (Eq, Show)
-- The Flag type is a member of the Write typeclass so it can be sent as a post
-- request to the API.
instance Write Flag where
path flag = (show $ table flag) ++ "/" ++ (factualId flag) ++ "/flag"
params _ = M.empty
body flag = M.fromList [ ("problem", show $ problem flag)
, ("user", user flag)
, commentPair flag
, dataPair flag
, fieldsPair flag
, referencePair flag ]
-- The following functions are helpers for the body function
commentPair :: Flag -> (String, String)
commentPair flag
| comment flag == Nothing = ("comment", "")
| otherwise = ("comment", fromJust $ comment flag)
dataPair :: Flag -> (String, String)
dataPair flag
| dataJSON flag == Nothing = ("data", "")
| otherwise = ("data", fromJust $ dataJSON flag)
fieldsPair :: Flag -> (String, String)
fieldsPair flag
| fields flag == Nothing = ("fields", "")
| otherwise = ("fields", arrayString)
where arrayString = "[" ++ (join "," $ fromJust $ fields flag) ++ "]"
referencePair :: Flag -> (String, String)
referencePair flag
| reference flag == Nothing = ("reference", "")
| otherwise = ("reference", fromJust $ reference flag)
| rudyl313/factual-haskell-driver | Data/Factual/Write/Flag.hs | bsd-3-clause | 2,737 | 0 | 12 | 868 | 609 | 343 | 266 | 53 | 1 |
module Language.SequentCore.Driver.Flags (
SeqFlags(..), SeqDumpFlag(..), SeqGeneralFlag(..),
FloatOutSwitches(..), FinalPassSwitches(..), ContifySwitches(..),
sgopt, sgopt_set, sgopt_unset,
sdopt, sdopt_set, sdopt_unset,
parseSeqFlags
) where
import CmdLineParser
import FastString
import MonadUtils
import Outputable
import Panic
import SrcLoc
import Control.Monad
import Data.IntSet (IntSet)
import qualified Data.IntSet as IntSet
parseSeqFlags :: MonadIO m => [String]
-> m (SeqFlags, [String], [String])
parseSeqFlags args = do
let ((leftover, errs, warns), sflags)
= runCmdLine (processArgs seqFlags (map noLoc args))
defaultSeqFlags
unless (null errs) $ liftIO $
throwGhcExceptionIO $ errorsToGhcException errs
return (sflags, map unLoc leftover, map unLoc warns)
data SeqDumpFlag
= Opt_D_dump_llf
| Opt_D_dump_seq_xlate
| Opt_D_dump_seq_pipeline
| Opt_D_dump_cfy_stats
deriving (Eq, Ord, Enum)
data SeqGeneralFlag
= Opt_EnableSeqSimpl -- ^ Use Sequent Core simplifier (Language.SequentCore.Simpl)
| Opt_EnableSeqFloatOut -- ^ Use Sequent Core implementation of Float Out (Language.SequentCore.FloatOut)
| Opt_EnableSeqSpecConstr -- ^ Use Sequent Core implementation of SpecConstr (Language.SequentCore.SpecConstr)
| Opt_EnableContify -- ^ Use contification pass (aggressive mode)
| Opt_CombineSeqPasses -- ^ Avoid churning between Core and Sequent Core
-- TODO Contify more often so that there is nothing to gain by going back and forth
| Opt_ContifyBetweenSeqPasses -- ^ Contify (gently) between consecutive Sequent Core passes
| Opt_Contify_Simpl -- ^ Run (Sequent Core) simplifier after full contification
| Opt_CoreSimplAtEnd -- ^ Run the original simplifier at the very end of the pipeline
| Opt_SeqSimplAtEnd -- ^ Run the Sequent Core simplifier at the very end of the pipeline
| Opt_ProtectLastValArg
| Opt_IgnoreRealWorld
| Opt_FloatNullaryJoins -- ^ Always allowed to float a nullary join point
| Opt_LLF -- ^ Enable the late lambda lift pass
| Opt_LLF_AbsUnsat -- ^ allowed to abstract undersaturated applied let-bound variables?
| Opt_LLF_AbsSat -- ^ allowed to abstract saturated applied let-bound variables?
| Opt_LLF_AbsOversat -- ^ allowed to abstract oversaturated applied let-bound variables?
| Opt_LLF_CreatePAPs -- ^ allowed to float function bindings that occur unapplied
| Opt_LLF_Simpl -- ^ follow the late lambda lift with a simplification pass?
| Opt_LLF_Stabilize
| Opt_LLF_UseStr -- ^ use strictness in the late lambda float
| Opt_LLF_OneShot
deriving (Eq, Ord, Enum)
data SeqFlags = SeqFlags {
seqDumpFlags :: IntSet,
seqGeneralFlags :: IntSet,
lateFloatNonRecLam :: Maybe Int, -- ^ Limit on # abstracted variables for *late* non-recursive function floating (Nothing => all, Just 0 => none)
lateFloatRecLam :: Maybe Int, -- ^ " " " " " for *late* recursive function floating
lateFloatIfInClo :: Maybe Int, -- ^ Limit on # abstracted variables for floating a binding that occurs in a closure
lateFloatCloGrowth :: Maybe Int, -- ^ Limit on # additional free variables for closures in which the function occurs
lateFloatCloGrowthInLam :: Maybe Int
}
defaultSeqFlags :: SeqFlags
defaultSeqFlags =
SeqFlags {
seqDumpFlags = IntSet.empty,
seqGeneralFlags = IntSet.fromList (map fromEnum defaultGeneralFlags),
lateFloatNonRecLam = Just 10,
lateFloatRecLam = Just 6,
lateFloatIfInClo = Nothing,
lateFloatCloGrowth = Just 0,
lateFloatCloGrowthInLam = Just 0
}
defaultGeneralFlags :: [SeqGeneralFlag]
defaultGeneralFlags = [ Opt_LLF_AbsUnsat, Opt_LLF_UseStr, Opt_LLF_OneShot,
Opt_LLF_Simpl, Opt_LLF_Stabilize ]
-- | Test whether a 'SeqGeneralFlag' is set
sgopt :: SeqGeneralFlag -> SeqFlags -> Bool
sgopt f sflags = fromEnum f `IntSet.member` seqGeneralFlags sflags
-- | Set a 'SeqGeneralFlag'
sgopt_set :: SeqFlags -> SeqGeneralFlag -> SeqFlags
sgopt_set sfs f = sfs{ seqGeneralFlags = IntSet.insert (fromEnum f) (seqGeneralFlags sfs) }
-- | Unset a 'SeqGeneralFlag'
sgopt_unset :: SeqFlags -> SeqGeneralFlag -> SeqFlags
sgopt_unset sfs f = sfs{ seqGeneralFlags = IntSet.delete (fromEnum f) (seqGeneralFlags sfs) }
-- | Test whether a 'SeqDumpFlag' is set
sdopt :: SeqDumpFlag -> SeqFlags -> Bool
sdopt f sflags = fromEnum f `IntSet.member` seqDumpFlags sflags
-- | Set a 'SeqDumpFlag'
sdopt_set :: SeqFlags -> SeqDumpFlag -> SeqFlags
sdopt_set sfs f = sfs{ seqDumpFlags = IntSet.insert (fromEnum f) (seqDumpFlags sfs) }
-- | Unset a 'SeqDumpFlag'
sdopt_unset :: SeqFlags -> SeqDumpFlag -> SeqFlags
sdopt_unset sfs f = sfs{ seqDumpFlags = IntSet.delete (fromEnum f) (seqDumpFlags sfs) }
seqFlags :: [Flag (CmdLineP SeqFlags)]
seqFlags = [
Flag "ddump-llf" (setDumpFlag Opt_D_dump_llf)
, Flag "ddump-seq-xlate" (setDumpFlag Opt_D_dump_seq_xlate)
, Flag "ddump-seq-pipeline" (setDumpFlag Opt_D_dump_seq_pipeline)
, Flag "ddump-cfy-stats" (setDumpFlag Opt_D_dump_cfy_stats)
, Flag "fllf-nonrec-lam-limit" (intSuffix (\n f -> f{ lateFloatNonRecLam = Just n }))
, Flag "fllf-nonrec-lam-any" (noArg (\f -> f{ lateFloatNonRecLam = Nothing }))
, Flag "fno-llf-nonrec-lam" (noArg (\f -> f{ lateFloatNonRecLam = Just 0 }))
, Flag "fllf-rec-lam-limit" (intSuffix (\n f -> f{ lateFloatRecLam = Just n }))
, Flag "fllf-rec-lam-any" (noArg (\f -> f{ lateFloatRecLam = Nothing }))
, Flag "fno-llf-rec-lam" (noArg (\f -> f{ lateFloatRecLam = Just 0 }))
, Flag "fllf-clo-growth-limit" (intSuffix (\n f -> f{ lateFloatCloGrowth = Just n }))
, Flag "fllf-clo-growth-any" (noArg (\f -> f{ lateFloatCloGrowth = Nothing }))
, Flag "fno-llf-clo-growth" (noArg (\f -> f{ lateFloatCloGrowth = Just 0 }))
, Flag "fllf-in-clo-limit" (intSuffix (\n f -> f{ lateFloatIfInClo = Just n }))
, Flag "fllf-in-clo-any" (noArg (\f -> f{ lateFloatIfInClo = Nothing }))
, Flag "fno-llf-in-clo" (noArg (\f -> f{ lateFloatIfInClo = Just 0 }))
, Flag "fllf-clo-growth-in-lam-limit" (intSuffix (\n f -> f{ lateFloatCloGrowthInLam = Just n }))
, Flag "fllf-clo-growth-in-lam-any" (noArg (\f -> f{ lateFloatCloGrowthInLam = Nothing }))
, Flag "fno-llf-clo-growth-in-lam" (noArg (\f -> f{ lateFloatCloGrowthInLam = Just 0 }))
]
++ map (mkFlag turnOn "f" setGeneralFlag ) sFlags
++ map (mkFlag turnOff "fno-" unSetGeneralFlag) sFlags
type TurnOnFlag = Bool -- True <=> we are turning the flag on
-- False <=> we are turning the flag off
turnOn :: TurnOnFlag; turnOn = True
turnOff :: TurnOnFlag; turnOff = False
type FlagSpec flag
= ( String -- Flag in string form
, flag -- Flag in internal form
, TurnOnFlag -> DynP ()) -- Extra action to run when the flag is found
-- Typically, emit a warning or error
mkFlag :: TurnOnFlag -- ^ True <=> it should be turned on
-> String -- ^ The flag prefix
-> (flag -> DynP ()) -- ^ What to do when the flag is found
-> FlagSpec flag -- ^ Specification of this particular flag
-> Flag (CmdLineP SeqFlags)
mkFlag turn_on flagPrefix f (name, flag, extra_action)
= Flag (flagPrefix ++ name) (NoArg (f flag >> extra_action turn_on))
nop :: TurnOnFlag -> DynP ()
nop _ = return ()
sFlags :: [FlagSpec SeqGeneralFlag]
sFlags = [
( "seq-simpl", Opt_EnableSeqSimpl, nop),
( "seq-full-laziness", Opt_EnableSeqFloatOut, nop),
( "seq-spec-constr", Opt_EnableSeqSpecConstr, nop),
( "seq-contification", Opt_EnableContify, nop),
( "seq-combine-passes", Opt_CombineSeqPasses, nop),
( "seq-contify-between", Opt_ContifyBetweenSeqPasses, nop),
( "seq-contification-simpl", Opt_Contify_Simpl, nop),
( "seq-core-simpl-at-end", Opt_CoreSimplAtEnd, nop),
( "seq-simpl-at-end", Opt_SeqSimplAtEnd, nop),
( "llf", Opt_LLF, nop),
( "llf-abstract-undersat", Opt_LLF_AbsUnsat, nop),
( "llf-abstract-sat", Opt_LLF_AbsSat, nop),
( "llf-abstract-oversat", Opt_LLF_AbsOversat, nop),
( "llf-create-PAPs", Opt_LLF_CreatePAPs, nop),
( "llf-simpl", Opt_LLF_Simpl, nop),
( "llf-stabilize", Opt_LLF_Stabilize, nop),
( "llf-use-strictness", Opt_LLF_UseStr, nop),
( "llf-oneshot", Opt_LLF_OneShot, nop),
( "float-nullary-joins", Opt_FloatNullaryJoins, nop)
]
type DynP = EwM (CmdLineP SeqFlags)
noArg :: (SeqFlags -> SeqFlags) -> OptKind (CmdLineP SeqFlags)
noArg fn = NoArg (upd fn)
intSuffix :: (Int -> SeqFlags -> SeqFlags) -> OptKind (CmdLineP SeqFlags)
intSuffix fn = IntSuffix (\n -> upd (fn n))
upd :: (SeqFlags -> SeqFlags) -> DynP ()
upd f = liftEwM (do dflags <- getCmdLineState
putCmdLineState $! f dflags)
setDumpFlag :: SeqDumpFlag -> OptKind (CmdLineP SeqFlags)
setDumpFlag dump_flag = NoArg (setDumpFlag' dump_flag)
--------------------------
setGeneralFlag, unSetGeneralFlag :: SeqGeneralFlag -> DynP ()
setGeneralFlag f = upd (setGeneralFlag' f)
unSetGeneralFlag f = upd (unSetGeneralFlag' f)
setGeneralFlag' :: SeqGeneralFlag -> SeqFlags -> SeqFlags
setGeneralFlag' f dflags = sgopt_set dflags f
unSetGeneralFlag' :: SeqGeneralFlag -> SeqFlags -> SeqFlags
unSetGeneralFlag' f dflags = sgopt_unset dflags f
setDumpFlag' :: SeqDumpFlag -> DynP ()
setDumpFlag' dump_flag = upd (\dfs -> sdopt_set dfs dump_flag)
--------------------------
-- These two datatypes are copied from CoreMonad in the wip/llf branch. Defined
-- here so that both Driver and FloatOut can use them.
data FloatOutSwitches = FloatOutSwitches {
floatOutLambdas :: Maybe Int,
-- ^ Just n <=> float lambdas to top level, if doing so will
-- abstract over n or fewer value variables Nothing <=> float all
-- lambdas to top level, regardless of how many free variables Just
-- 0 is the vanilla case: float a lambda iff it has no free vars
floatOutConstants :: Bool,
-- ^ True <=> float constants to top level, even if they do not
-- escape a lambda
floatOutPartialApplications :: Bool,
-- ^ True <=> float out partial applications based on arity
-- information.
finalPass_ :: Maybe FinalPassSwitches
-- ^ Nothing <=> not the final pass, behave like normal
}
data FinalPassSwitches = FinalPassSwitches
{ fps_rec :: !(Maybe Int)
-- ^ used as floatOutLambdas for recursive lambdas
, fps_absUnsatVar :: !Bool
-- ^ abstract over undersaturated applied variables?
, fps_absSatVar :: !Bool
-- ^ abstract over exactly saturated applied variables? Doing so might lose some fast entries
, fps_absOversatVar :: !Bool
-- ^ abstracting over oversaturated applied variables?
, fps_createPAPs :: !Bool
-- ^ allowed to float functions occuring unapplied
, fps_cloGrowth :: !(Maybe Int)
-- ^ limits the number of free variables added to closures using the floated function
, fps_ifInClo :: !(Maybe Int)
-- ^ limits the number of abstracted variables allowed if the binder occurs in a closure
, fps_stabilizeFirst :: !Bool
-- ^ stabilizes an unstable unfolding before floating things out of
-- it, since floating out precludes specialization at the call-site
, fps_cloGrowthInLam :: !(Maybe Int)
-- ^ disallow the floating of a binding if it occurs in closure that
-- is allocated inside a lambda
, fps_trace :: !Bool
, fps_strictness :: !Bool
, fps_oneShot :: !Bool
}
instance Outputable FloatOutSwitches where
ppr = pprFloatOutSwitches
pprFloatOutSwitches :: FloatOutSwitches -> SDoc
pprFloatOutSwitches sw
= ptext (sLit "FOS") <+> (braces $
sep $ punctuate comma $
[ ptext (sLit "Lam =") <+> ppr (floatOutLambdas sw)
, ptext (sLit "Consts =") <+> ppr (floatOutConstants sw)
, ptext (sLit "PAPs =") <+> ppr (floatOutPartialApplications sw)
, ptext (sLit "Late =") <+> ppr (finalPass_ sw)])
instance Outputable FinalPassSwitches where
ppr = pprFinalPassSwitches
pprFinalPassSwitches :: FinalPassSwitches -> SDoc
pprFinalPassSwitches sw = sep $ punctuate comma $
[ ptext (sLit "Rec =") <+> ppr (fps_rec sw)
, ptext (sLit "AbsUnsatVar =") <+> ppr (fps_absUnsatVar sw)
, ptext (sLit "AbsSatVar =") <+> ppr (fps_absSatVar sw)
, ptext (sLit "AbsOversatVar =") <+> ppr (fps_absOversatVar sw)
, ptext (sLit "ClosureGrowth =") <+> ppr (fps_cloGrowth sw)
, ptext (sLit "ClosureGrowthInLam =") <+> ppr (fps_cloGrowthInLam sw)
, ptext (sLit "StabilizeFirst =") <+> ppr (fps_stabilizeFirst sw)
]
data ContifySwitches = ContifySwitches {
cs_gentle :: Bool
-- ^ True <=> minimal effort, as happens automatically after translation
}
instance Outputable ContifySwitches where
ppr = pprContifySwitches
pprContifySwitches :: ContifySwitches -> SDoc
pprContifySwitches sw
= text "ContifySwitches" <+> braces (text "Gentle =" <+> ppr (cs_gentle sw))
| lukemaurer/sequent-core | src/Language/SequentCore/Driver/Flags.hs | bsd-3-clause | 13,520 | 0 | 15 | 3,163 | 2,863 | 1,595 | 1,268 | 235 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS -fno-warn-name-shadowing #-}
module Snap.Snaplet.Fay (
Fay
, initFay
, fayServe
, fayax
, toFayax
, fromFayax
) where
import Control.Applicative
import Control.Monad
import Control.Monad.Reader
import Control.Monad.State.Class
import Control.Monad.Trans.Writer
import qualified Data.Aeson as A
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy as BL
import qualified Data.Configurator as C
import Data.Data
import Data.List
import Data.Maybe
import Data.String
import Fay.Convert
import Snap.Core
import Snap.Snaplet
import Snap.Util.FileServe
import System.Directory
import System.FilePath
import Paths_snaplet_fay
import Snap.Snaplet.Fay.Internal
-- | Snaplet initialization
initFay :: SnapletInit b Fay
initFay = makeSnaplet "fay" description datadir $ do
config <- getSnapletUserConfig
fp <- getSnapletFilePath
(opts, errs) <- runWriterT $ do
compileModeStr <- logErr "Must specify compileMode" $ C.lookup config "compileMode"
compileMode <- case compileModeStr of
Just x -> logErr "Invalid compileMode" . return $ compileModeFromString x
Nothing -> return Nothing
verbose <- logErr "Must specify verbose" $ C.lookup config "verbose"
prettyPrint <- logErr "Must specify prettyPrint" $ C.lookup config "prettyPrint"
includeDirs <- logErr "Must specify includeDirs" $ C.lookup config "includeDirs"
let inc = maybe [] (split ',') includeDirs
inc' <- liftIO $ mapM canonicalizePath inc
packages <- logErr "Must specify packages" $ C.lookup config "packages"
let packs = maybe [] (split ',') packages
return (verbose, compileMode, prettyPrint, inc', packs)
let fay = case opts of
(Just verbose, Just compileMode, Just prettyPrint, includeDirs, packages) ->
Fay
{ snapletFilePath = fp
, verbose = verbose
, compileMode = compileMode
, prettyPrint = prettyPrint
, _includeDirs = fp : includeDirs
, packages = packages
}
_ -> error $ intercalate "\n" errs
-- Make sure snaplet/fay, snaplet/fay/src, snaplet/fay/js are present.
liftIO $ mapM_ createDirUnlessExists [fp, srcDir fay, destDir fay]
when (Production == compileMode fay) (liftIO $ compileAll fay)
return fay
where
-- TODO Use split package
split :: Eq a => a -> [a] -> [[a]]
split _ [] = []
split a as = takeWhile (/= a) as : split a (drop 1 $ dropWhile (/= a) as)
createDirUnlessExists fp = do
dirExists <- doesDirectoryExist fp
unless dirExists $ createDirectory fp
datadir = Just $ liftM (++ "/resources") getDataDir
description = "Automatic (re)compilation and serving of Fay files"
logErr :: MonadIO m => t -> IO (Maybe a) -> WriterT [t] m (Maybe a)
logErr err m = do
res <- liftIO m
when (isNothing res) (tell [err])
return res
compileModeFromString :: String -> Maybe CompileMode
compileModeFromString "Development" = Just Development
compileModeFromString "Production" = Just Production
compileModeFromString _ = Nothing
-- | Serves the compiled Fay scripts using the chosen compile mode.
fayServe :: Handler b Fay ()
fayServe = do
modifyResponse . setContentType $ "text/javascript;charset=utf-8"
get >>= compileWithMode . compileMode
-- | Send and receive JSON.
-- | Automatically decodes a JSON request into a Fay record which is
-- | passed to `g`. The handler `g` should then return a Fay record (of
-- | a possibly separate type) which is encoded and passed back as a
-- | JSON response.
-- | If you only want to send JSON and handle input manually, use toFayax.
-- | If you want to receive JSON and handle the response manually, use fromFayax
fayax :: (Data f1, Read f1, Show f2) => (f1 -> Handler h1 h2 f2) -> Handler h1 h2 ()
fayax g = do
res <- decode
case res of
Left body -> send500 $ Just body
Right res -> toFayax . g $ res
-- | fayax only sending JSON.
toFayax :: Show f2 => Handler h1 h2 f2 -> Handler h1 h2 ()
toFayax g = do
modifyResponse . setContentType $ "text/json;charset=utf-8"
writeLBS . A.encode . showToFay =<< g
-- | fayax only recieving JSON.
fromFayax :: (Data f1, Read f1) => (f1 -> Handler h1 h2 ()) -> Handler h1 h2 ()
fromFayax g = do
res <- decode
case res of
Left body -> send500 $ Just body
Right res -> g res
-- | Read the request input and convert to a Fay value.
decode :: (Data f1, Read f1) => Handler h1 h2 (Either ByteString f1)
decode = do
body <- readRequestBody 1024 -- Nothing will break by abusing this :)!
res <- return $ A.decode body >>= readFromFay
return $ case res of
Nothing -> Left. BS.concat . BL.toChunks $ "Could not decode " `BL.append` body
Just x -> Right x
-- | Compiles according to the specified mode.
compileWithMode :: CompileMode -> Handler b Fay ()
compileWithMode Development = do
cfg <- get
uri <- (srcDir cfg </>) . toHsName . filename . BS.unpack . rqURI <$> getRequest
res <- liftIO (compileFile cfg uri)
case res of
Success s -> writeBS $ fromString s
NotFound -> send404 Nothing
Error err -> send500 . Just . BS.pack $ err
-- Production compilation has already been done.
compileWithMode Production = get >>= serveDirectory . destDir
-- | Respond with Not Found
send404 :: Maybe ByteString -> Handler a b ()
send404 msg = do
modifyResponse $ setResponseStatus 404 "Not Found"
writeBS $ fromMaybe "Not Found" msg
finishWith =<< getResponse
-- | Respond with Internal Server Error
send500 :: Maybe ByteString -> Handler a b ()
send500 msg = do
modifyResponse $ setResponseStatus 500 "Internal Server Error"
writeBS $ fromMaybe "Internal Server Error" msg
finishWith =<< getResponse
| bergmark/snaplet-fay | src/Snap/Snaplet/Fay.hs | bsd-3-clause | 6,274 | 0 | 18 | 1,678 | 1,634 | 825 | 809 | 126 | 6 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
module Kiosk.Backend.Data ( DataTemplateEntry (..)
, DataTemplateEntryKey (..)
, DataTemplate (..)
, TicketId (..)
, TemplateItem (..)
, dataTemplateEntryKey
, dataTemplateEntryValue
, decodeUUID
, getListOfSortedTemplateItems
, fromDataTemplateEntryToCsv
, fromDataTemplateEntryToS3Csv
, fromDataTemplateEntryToXlsxWorksheet
) where
import Kiosk.Backend.Data.DataTemplate
import Kiosk.Backend.Data.DataTemplateEntry
import Kiosk.Backend.Data.DataTemplateEntryKey
| plow-technologies/cobalt-kiosk-data-template | src/Kiosk/Backend/Data.hs | bsd-3-clause | 959 | 0 | 5 | 420 | 90 | 64 | 26 | 19 | 0 |
module Day2 where
import Test.Hspec
import Utils
import qualified Text.Megaparsec.String as P
import qualified Text.Megaparsec as P
-- Input DSL
data Instruction = U | D | L | R deriving (Show)
-- Parsing
parser :: P.Parser [[Instruction]]
parser = P.sepBy (P.many (parserInstruction)) (P.string "\n")
parserInstruction :: P.Parser Instruction
parserInstruction = (U <$ P.string "U") P.<|>
(D <$ P.string "D") P.<|>
(L <$ P.string "L") P.<|>
(R <$ P.string "R")
-- Problem DSL
data KeyPad = KeyPad [[Char]] deriving (Show)
data Status = Status KeyPad (Int, Int) deriving (Show)
makeKeyPad s coord = Status (KeyPad (lines s)) coord
validCase k@(KeyPad s) (x, y) = y >= 0 && y < length s && x >= 0 && x < length (s !! y) && getKeyPad k (x, y) /= ' '
getKeyPad keyPad@(KeyPad s) (x, y) = s !! y !! x
getStatus (Status keyPad coord) = getKeyPad keyPad coord
moveKeyPad i (Status keyPad (x, y)) = Status keyPad (if validCase keyPad newCoord then newCoord else (x, y))
where newCoord = case i of
U -> (x, y - 1)
D -> (x, y + 1)
L -> (x - 1, y)
R -> (x + 1, y)
keyPad = makeKeyPad "123\n\
\456\n\
\789" (1, 1)
keyPad' = makeKeyPad " 1 \n\
\ 234 \n\
\56789\n\
\ ABC \n\
\ D " (0, 2)
-- utils
foldInstruction :: Status -> [Instruction] -> Status
foldInstruction keyPad xs = foldl (flip moveKeyPad) keyPad xs
genericDay :: Status -> [[Instruction]] -> [Char]
genericDay keypad code = map getStatus (tail (scanl foldInstruction keypad code))
-- FIRST problem
day code = genericDay keyPad code
-- SECOND problem
day' code = genericDay keyPad' code
-- tests and data
testData = [[U, L],
[R, R, D, D, D],
[L, U, R, D, L],
[U, U, U, U, D]
]
test = hspec $ do
describe "firstProblem" $ do
it "works" $ do
day testData `shouldBe` "1985"
--day 1 `shouldBe` (2 :: Int)
describe "secondProblem" $ do
it "works" $ do
day' testData `shouldBe` "5DB3"
describe "finally" $ do
it "works" $ do
day <$> content `shouldReturn` "47978"
day' <$> content `shouldReturn` "659AD"
fileContent = readFile "content/day2"
content = parse parser <$> fileContent
| guibou/AdventOfCode2016 | src/Day2.hs | bsd-3-clause | 2,376 | 0 | 15 | 717 | 883 | 479 | 404 | 52 | 5 |
module Handler.RemoveDeck where
import Kerchief.Prelude
import Prelude hiding (putStrLn)
import System.Directory (removeFile)
import System.FilePath ((</>))
import Kerchief (Kerchief, getDecksDir)
import Utils (askYesNo, getDirectoryContents')
handleRemoveDeck :: [String] -> Kerchief ()
handleRemoveDeck ["--help"] = printRemoveDeckUsage
handleRemoveDeck [name] = handleRemoveDeck' name
handleRemoveDeck _ = printRemoveDeckUsage
handleRemoveDeck' :: String -> Kerchief ()
handleRemoveDeck' name = do
decksDir <- getDecksDir
decks <- io (getDirectoryContents' decksDir)
if name `elem` decks
then askYesNo ("Are you sure you want to remove deck \"" ++ name ++ "\"? ")
(do
io $ removeFile (decksDir </> name)
putStrLn $ "Deck \"" ++ name ++ "\" removed.")
(putStrLn $ "Deck \"" ++ name ++ "\" not removed.")
else putStrLn $ "Deck \"" ++ name ++ "\" doesn't exist. See \"decks\"."
printRemoveDeckUsage :: Kerchief ()
printRemoveDeckUsage = putStrLn "TODO"
| mitchellwrosen/kerchief | src/Handler/RemoveDeck.hs | bsd-3-clause | 1,119 | 0 | 15 | 293 | 274 | 147 | 127 | 24 | 2 |
{-|
Description: helpers for matching requests
contains various matching utilities
-}
{-# LANGUAGE TupleSections #-}
module Web.Respond.Request where
import Network.Wai
import qualified Data.ByteString.Lazy as LBS
import Control.Applicative ((<$>))
import Control.Monad.IO.Class (liftIO)
import qualified Network.HTTP.Media as Media
import Data.Maybe (fromMaybe)
import Web.Respond.Types
import Web.Respond.Monad
import Web.Respond.Response
-- * extracting the request body
-- | gets the body as a lazy ByteString using lazy IO (see 'lazyRequestBody')
getBodyLazy :: MonadRespond m => m LBS.ByteString
getBodyLazy = getRequest >>= liftIO . lazyRequestBody
-- | gets the body as a lazy ByteString using /strict/ IO (see 'strictRequestBody')
getBodyStrict :: MonadRespond m => m LBS.ByteString
getBodyStrict = getRequest >>= liftIO . strictRequestBody
-- ** extraction using FromBody
-- | use a FromBody instance to parse the body. uses 'getBodyLazy' to
-- lazily load the body data.
extractBodyLazy :: (ReportableError e, FromBody e a, MonadRespond m) => m (Either e a)
extractBodyLazy = fromBody <$> getBodyLazy
-- | uses a FromBody instance to parse the body. uses 'getBodyStrict' to
-- load the body strictly.
extractBodyStrict :: (ReportableError e, FromBody e a, MonadRespond m) => m (Either e a)
extractBodyStrict = fromBody <$> getBodyStrict
-- | extracts the body using 'extractBodyLazy'. runs the inner action only
-- if the body could be loaded and parseda using the FromBody instance;
-- otherwise responds with the reportable error by calling
-- 'handleBodyParseFailure'.
withRequiredBody :: (ReportableError e, FromBody e a, MonadRespond m) => (a -> m ResponseReceived) -> m ResponseReceived
withRequiredBody action = extractBodyLazy >>= either handleBodyParseFailure action
-- | extracts the body using 'extractBodyStrict'. runs the inner action only
-- if the body could be loaded and parseda using the FromBody instance;
-- otherwise responds with the reportable error by calling
-- 'handleBodyParseFailure'.
withRequiredBody' :: (ReportableError e, FromBody e a, MonadRespond m) => (a -> m ResponseReceived) -> m ResponseReceived
withRequiredBody' action = extractBodyStrict >>= either handleBodyParseFailure action
-- * authentication and authorization
-- | authenticate uses the result of the authentication action (if it
-- succssfully produced a result) to run the inner action function.
-- otherwise, it uses 'handleAuthFailed'.
authenticate :: (MonadRespond m, ReportableError e) => m (Either e a) -> (a -> m ResponseReceived) -> m ResponseReceived
authenticate auth inner = auth >>= either handleAuthFailed inner
-- | reauthenticate tries to use a prior authentication value to run the
-- inner action; if it's not availalble, it falls back to 'authenticate' to
-- apply the auth action and run the inner action.
reauthenticate :: (MonadRespond m, ReportableError e) => Maybe a -> m (Either e a) -> (a -> m ResponseReceived) -> m ResponseReceived
reauthenticate prior auth inner = maybe (authenticate auth inner) inner prior
-- | if given an error report value , respond immediately using
-- 'handleDenied'. otherwise, run the inner route.
authorize :: (ReportableError e, MonadRespond m) => Maybe e -> m ResponseReceived -> m ResponseReceived
authorize check inner = maybe inner handleAccessDenied check
-- | if the bool is true, run the inner. otherwise, handleDenied the
-- report.
authorizeBool :: (ReportableError e, MonadRespond m) => e -> Bool -> m ResponseReceived -> m ResponseReceived
authorizeBool report allowed inner
| allowed = inner
| otherwise = handleAccessDenied report
-- | authorize using an Either; if it's Left, fail using 'handleDenied' on
-- the contained ReportableError. if it's right, run the inner action using
-- the contained value,
authorizeE :: (ReportableError e, MonadRespond m) => Either e a -> (a -> m ResponseReceived) -> m ResponseReceived
authorizeE check inner = either handleAccessDenied inner check
-- * content negotiation
-- | selects action by accept header
routeAccept :: MonadRespond m
=> m a -- ^ default action - do this if nothing matches
-> [(Media.MediaType, m a)] -- ^ actions to perform for each accepted media type
-> m a -- ^ chosen action
routeAccept def mapped = getAcceptHeader >>= fromMaybe def . Media.mapAcceptMedia mapped
-- | defends the inner routes by first checking the Accept header and
-- failing if it cannot accept any media type in the list
checkAccepts :: MonadRespond m => [Media.MediaType] -> m ResponseReceived -> m ResponseReceived
checkAccepts list action = getAcceptHeader >>= maybe handleUnacceptableResponse (const action) . Media.matchAccept list
| raptros/respond | src/Web/Respond/Request.hs | bsd-3-clause | 4,720 | 0 | 11 | 758 | 874 | 466 | 408 | 42 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
import qualified Data.ByteString.Lazy.Char8 as B
import System.Console.CmdArgs
import System.Exit
import Text.WikiEngine
import qualified Text.Blaze.Renderer.Utf8 as RenderUtf8 (renderHtml)
import qualified Text.Blaze.Renderer.Pretty as RenderPretty (renderHtml)
import qualified Data.ByteString.Lazy as L
renderCfg = defaultRenderCfg { rcfgCodeRenderType = CodeRenderSimple }
doMain (Render input pretty) = do
content <- readFile input
let wikiblocks =
case parseDocument content of
Right blocks -> blocks
Left errors -> error ("error parsing wiki content: " ++ show errors)
if pretty
then putStrLn $ RenderPretty.renderHtml $ renderAsHtml renderCfg wikiblocks
else L.putStrLn $ RenderUtf8.renderHtml $ renderAsHtml renderCfg wikiblocks
doMain (Raw input) = do
content <- readFile input
let wikiblocks =
case parseDocument content of
Right blocks -> blocks
Left errors -> error ("error parsing wiki content: " ++ show errors)
mapM_ (putStrLn . show) wikiblocks
doMain (Validate input) = do
content <- readFile input
case parseDocument content of
Right _ -> exitSuccess
Left _ -> exitFailure
data Opts =
Render { input :: FilePath, pretty :: Bool }
| Raw { input :: FilePath }
| Validate { input :: FilePath }
deriving (Show,Data,Typeable)
render = Render { input = def &= typFile, pretty = def }
raw = Raw { input = def &= typFile }
validate = Validate { input = def &= typFile }
mode = cmdArgsMode $ modes [raw,validate,render]
main = cmdArgsRun mode >>= doMain
| vincenthz/wikiengine | Wikihtml.hs | bsd-3-clause | 1,567 | 25 | 16 | 289 | 519 | 266 | 253 | 40 | 5 |
import Common.Numbers.Numbers (powMod)
main = print $ 1 + (28433 * (powMod 2 (7830457 :: Int) modulo) `mod` modulo) where
modulo = 10^10 :: Integer
| foreverbell/project-euler-solutions | src/97.hs | bsd-3-clause | 154 | 1 | 12 | 31 | 72 | 39 | 33 | 3 | 1 |
{-# LANGUAGE PatternGuards, ViewPatterns, CPP, ScopedTypeVariables #-}
module General.Util(
PkgName, ModName,
URL,
pretty, parseMode, applyType, applyFun1, unapplyFun, fromName, fromQName, fromTyVarBind, declNames, isTypeSig,
fromDeclHead, fromContext, fromIParen, fromInstHead,
tarballReadFiles,
isUpper1, isAlpha1,
joinPair,
testing, testEq,
showUTCTime,
strict,
withs,
escapeHTML, unescapeHTML, unHTML,
escapeURL,
takeSortOn,
Average, toAverage, fromAverage,
inRanges,
parseTrailingVersion,
trimVersion,
exitFail,
prettyTable,
getStatsPeakAllocBytes, getStatsCurrentLiveBytes, getStatsDebug,
hackagePackageURL, hackageModuleURL, hackageDeclURL, ghcModuleURL,
minimum', maximum',
general_util_test
) where
import Language.Haskell.Exts
import Control.Applicative
import Data.List.Extra
import Data.Char
import Data.Either.Extra
import Data.Semigroup
import Data.Tuple.Extra
import Control.Monad.Extra
import qualified Data.ByteString.Lazy as LBS
import qualified Data.Map as Map
import Data.Ix
import Numeric.Extra
import Codec.Compression.GZip as GZip
import Codec.Archive.Tar as Tar
import Data.Time.Clock
import Data.Time.Format
import Control.DeepSeq
import Control.Exception.Extra
import Test.QuickCheck
import Data.Version
import Data.Int
import System.IO
import System.Exit
import System.Mem
import GHC.Stats
import General.Str
import Prelude
import qualified Network.HTTP.Types.URI as URI
import qualified Data.ByteString.UTF8 as UTF8
type PkgName = Str
type ModName = Str
-- | A URL, complete with a @https:@ prefix.
type URL = String
#if __GLASGOW_HASKELL__ >= 802
#define RTS_STATS 1
#endif
showMb :: (Show a, Integral a) => a -> String
#if RTS_STATS
showMb x = show (x `div` (1024*1024)) ++ "Mb"
#else
showMb x = show x ++ "Mb"
#endif
#if RTS_STATS
withRTSStats :: (RTSStats -> a) -> IO (Maybe a)
withRTSStats f = ifM getRTSStatsEnabled (Just . f <$> getRTSStats) (pure Nothing)
#else
withGCStats :: (GCStats -> a) -> IO (Maybe a)
withGCStats f = ifM getGCStatsEnabled (Just . f <$> getGCStats) (pure Nothing)
#endif
getStatsCurrentLiveBytes :: IO (Maybe String)
getStatsCurrentLiveBytes = do
performGC
#if RTS_STATS
withRTSStats $ showMb . gcdetails_live_bytes . gc
#else
withGCStats $ showMb . currentBytesUsed
#endif
getStatsPeakAllocBytes :: IO (Maybe String)
getStatsPeakAllocBytes = do
#if RTS_STATS
withRTSStats $ showMb . max_mem_in_use_bytes
#else
withGCStats $ showMb . peakMegabytesAllocated
#endif
getStatsDebug :: IO (Maybe String)
getStatsDebug = do
let dump = replace ", " "\n" . takeWhile (/= '}') . drop1 . dropWhile (/= '{') . show
#if RTS_STATS
withRTSStats dump
#else
withGCStats dump
#endif
exitFail :: String -> IO ()
exitFail msg = do
hPutStrLn stderr msg
exitFailure
pretty :: Pretty a => a -> String
pretty = prettyPrintWithMode defaultMode{layout=PPNoLayout}
parseMode :: ParseMode
parseMode = defaultParseMode{extensions=map EnableExtension es}
where es = [ConstraintKinds,EmptyDataDecls,TypeOperators,ExplicitForAll,GADTs,KindSignatures,MultiParamTypeClasses
,TypeFamilies,FlexibleContexts,FunctionalDependencies,ImplicitParams,MagicHash,UnboxedTuples
,ParallelArrays,UnicodeSyntax,DataKinds,PolyKinds,PatternSynonyms]
applyType :: Type a -> [Type a] -> Type a
applyType x (t:ts) = applyType (TyApp (ann t) x t) ts
applyType x [] = x
applyFun1 :: [Type a] -> Type a
applyFun1 [x] = x
applyFun1 (x:xs) = TyFun (ann x) x $ applyFun1 xs
unapplyFun :: Type a -> [Type a]
unapplyFun (TyFun _ x y) = x : unapplyFun y
unapplyFun x = [x]
fromName :: Name a -> String
fromName (Ident _ x) = x
fromName (Symbol _ x) = x
fromQName :: QName a -> String
fromQName (Qual _ _ x) = fromName x
fromQName (UnQual _ x) = fromName x
fromQName (Special _ UnitCon{}) = "()"
fromQName (Special _ ListCon{}) = "[]"
fromQName (Special _ FunCon{}) = "->"
fromQName (Special _ (TupleCon _ box n)) = "(" ++ h ++ replicate n ',' ++ h ++ ")"
where h = ['#' | box == Unboxed]
fromQName (Special _ UnboxedSingleCon{}) = "(##)"
fromQName (Special _ Cons{}) = ":"
fromContext :: Context a -> [Asst a]
fromContext (CxSingle _ x) = [x]
fromContext (CxTuple _ xs) = xs
fromContext _ = []
fromIParen :: InstRule a -> InstRule a
fromIParen (IParen _ x) = fromIParen x
fromIParen x = x
fromTyVarBind :: TyVarBind a -> Name a
fromTyVarBind (KindedVar _ x _) = x
fromTyVarBind (UnkindedVar _ x) = x
fromDeclHead :: DeclHead a -> (Name a, [TyVarBind a])
fromDeclHead (DHead _ n) = (n, [])
fromDeclHead (DHInfix _ x n) = (n, [x])
fromDeclHead (DHParen _ x) = fromDeclHead x
fromDeclHead (DHApp _ dh x) = second (++[x]) $ fromDeclHead dh
fromInstHead :: InstHead a -> (QName a, [Type a])
fromInstHead (IHCon _ n) = (n, [])
fromInstHead (IHInfix _ x n) = (n, [x])
fromInstHead (IHParen _ x) = fromInstHead x
fromInstHead (IHApp _ ih x) = second (++[x]) $ fromInstHead ih
declNames :: Decl a -> [String]
declNames x = map fromName $ case x of
TypeDecl _ hd _ -> f hd
DataDecl _ _ _ hd _ _ -> f hd
GDataDecl _ _ _ hd _ _ _ -> f hd
TypeFamDecl _ hd _ _ -> f hd
DataFamDecl _ _ hd _ -> f hd
ClassDecl _ _ hd _ _ -> f hd
TypeSig _ names _ -> names
PatSynSig _ names _ _ _ _ _ -> names
_ -> []
where f x = [fst $ fromDeclHead x]
isTypeSig :: Decl a -> Bool
isTypeSig TypeSig{} = True
isTypeSig PatSynSig{} = True
isTypeSig _ = False
tarballReadFiles :: FilePath -> IO [(FilePath, LBS.ByteString)]
tarballReadFiles file = f . Tar.read . GZip.decompress <$> LBS.readFile file
where
f (Next e rest) | NormalFile body _ <- entryContent e = (entryPath e, body) : f rest
f (Next _ rest) = f rest
f Done = []
f (Fail e) = error $ "tarballReadFiles on " ++ file ++ ", " ++ show e
innerTextHTML :: String -> String
innerTextHTML ('<':xs) = innerTextHTML $ drop1 $ dropWhile (/= '>') xs
innerTextHTML (x:xs) = x : innerTextHTML xs
innerTextHTML [] = []
unHTML :: String -> String
unHTML = unescapeHTML . innerTextHTML
escapeURL :: String -> String
escapeURL = UTF8.toString . URI.urlEncode True . UTF8.fromString
isUpper1 (x:xs) = isUpper x
isUpper1 _ = False
isAlpha1 (x:xs) = isAlpha x
isAlpha1 [] = False
splitPair :: String -> String -> (String, String)
splitPair x y | (a,stripPrefix x -> Just b) <- breakOn x y = (a,b)
| otherwise = error $ "splitPair does not contain separator " ++ show x ++ " in " ++ show y
joinPair :: [a] -> ([a], [a]) -> [a]
joinPair sep (a,b) = a ++ sep ++ b
testing_, testing :: String -> IO () -> IO ()
testing_ name act = do putStr $ "Test " ++ name ++ " "; act
testing name act = do testing_ name act; putStrLn ""
testEq :: (Show a, Eq a) => a -> a -> IO ()
testEq a b | a == b = putStr "."
| otherwise = errorIO $ "Expected equal, but " ++ show a ++ " /= " ++ show b
showUTCTime :: String -> UTCTime -> String
showUTCTime = formatTime defaultTimeLocale
withs :: [(a -> r) -> r] -> ([a] -> r) -> r
withs [] act = act []
withs (f:fs) act = f $ \a -> withs fs $ \as -> act $ a:as
prettyTable :: Int -> String -> [(String, Double)] -> [String]
prettyTable dp units xs =
( padR len units ++ "\tPercent\tName") :
[ padL len (showDP dp b) ++ "\t" ++ padL 7 (showDP 1 (100 * b / tot) ++ "%") ++ "\t" ++ a
| (a,b) <- ("Total", tot) : sortOn (negate . snd) xs]
where
tot = sum $ map snd xs
len = length units `max` length (showDP dp tot)
padL n s = replicate (n - length s) ' ' ++ s
padR n s = s ++ replicate (n - length s) ' '
-- ensure that no value escapes in a thunk from the value
strict :: NFData a => IO a -> IO a
strict act = do
res <- try_ act
case res of
Left e -> do msg <- showException e; evaluate $ rnf msg; errorIO msg
Right v -> evaluate $ force v
data Average a = Average !a {-# UNPACK #-} !Int deriving Show -- a / b
toAverage :: a -> Average a
toAverage x = Average x 1
fromAverage :: Fractional a => Average a -> a
fromAverage (Average a b) = a / fromIntegral b
instance Num a => Semigroup (Average a) where
Average x1 x2 <> Average y1 y2 = Average (x1+y1) (x2+y2)
instance Num a => Monoid (Average a) where
mempty = Average 0 0
mappend = (<>)
data TakeSort k v = More !Int !(Map.Map k [v])
| Full !k !(Map.Map k [v])
-- | @takeSortOn n op == take n . sortOn op@
takeSortOn :: Ord k => (a -> k) -> Int -> [a] -> [a]
takeSortOn op n xs
| n <= 0 = []
| otherwise = concatMap reverse $ Map.elems $ getMap $ foldl' add (More n Map.empty) xs
where
getMap (More _ mp) = mp
getMap (Full _ mp) = mp
add (More n mp) x = (if n <= 1 then full else More (n-1)) $ Map.insertWith (++) (op x) [x] mp
add o@(Full mx mp) x = let k = op x in if k >= mx then o else full $ Map.insertWith (++) k [x] $ delMax mp
full mp = Full (fst $ Map.findMax mp) mp
delMax mp | Just ((k,_:vs), mp) <- Map.maxViewWithKey mp = if null vs then mp else Map.insert k vs mp
-- See https://ghc.haskell.org/trac/ghc/ticket/10830 - they broke maximumBy
maximumBy' :: (a -> a -> Ordering) -> [a] -> a
maximumBy' cmp = foldl1' $ \x y -> if cmp x y == GT then x else y
maximum' :: Ord a => [a] -> a
maximum' = maximumBy' compare
minimumBy' :: (a -> a -> Ordering) -> [a] -> a
minimumBy' cmp = foldl1' $ \x y -> if cmp x y == LT then x else y
minimum' :: Ord a => [a] -> a
minimum' = minimumBy' compare
hackagePackageURL :: PkgName -> URL
hackagePackageURL x = "https://hackage.haskell.org/package/" ++ strUnpack x
hackageModuleURL :: ModName -> URL
hackageModuleURL x = "/docs/" ++ ghcModuleURL x
ghcModuleURL :: ModName -> URL
ghcModuleURL x = replace "." "-" (strUnpack x) ++ ".html"
hackageDeclURL :: Bool -> String -> URL
hackageDeclURL typesig x = "#" ++ (if typesig then "v" else "t") ++ ":" ++ concatMap f x
where
f x | isLegal x = [x]
| otherwise = "-" ++ show (ord x) ++ "-"
-- isLegal is from haddock-api:Haddock.Utils; we need to use
-- the same escaping strategy here in order for fragment links
-- to work
isLegal ':' = True
isLegal '_' = True
isLegal '.' = True
isLegal c = isAscii c && isAlphaNum c
trimVersion :: Int -> Version -> Version
trimVersion i v = v{versionBranch = take 3 $ versionBranch v}
parseTrailingVersion :: String -> (String, [Int])
parseTrailingVersion = (reverse *** reverse) . f . reverse
where
f xs | (ver@(_:_),sep:xs) <- span isDigit xs
, sep == '-' || sep == '.'
, (a, b) <- f xs
= (a, Prelude.read (reverse ver) : b)
f xs = (xs, [])
-- | Equivalent to any (`inRange` x) xs, but more efficient
inRanges :: Ix a => [(a,a)] -> (a -> Bool)
inRanges xs = \x -> maybe False (`inRange` x) $ Map.lookupLE x mp
where
mp = foldl' add Map.empty xs
merge (l1,u1) (l2,u2) = (min l1 l2, max u1 u2)
overlap x1 x2 = x1 `inRange` fst x2 || x2 `inRange` fst x1
add mp x
| Just x2 <- Map.lookupLE (fst x) mp, overlap x x2 = add (Map.delete (fst x2) mp) (merge x x2)
| Just x2 <- Map.lookupGE (fst x) mp, overlap x x2 = add (Map.delete (fst x2) mp) (merge x x2)
| otherwise = uncurry Map.insert x mp
general_util_test :: IO ()
general_util_test = do
testing "General.Util.splitPair" $ do
let a === b = if a == b then putChar '.' else errorIO $ show (a,b)
splitPair ":" "module:foo:bar" === ("module","foo:bar")
do x <- try_ $ evaluate $ rnf $ splitPair "-" "module:foo"; isLeft x === True
splitPair "-" "module-" === ("module","")
testing_ "General.Util.inRanges" $ do
quickCheck $ \(x :: Int8) xs -> inRanges xs x == any (`inRange` x) xs
testing "General.Util.parseTrailingVersion" $ do
let a === b = if a == b then putChar '.' else errorIO $ show (a,b)
parseTrailingVersion "shake-0.15.2" === ("shake",[0,15,2])
parseTrailingVersion "test-of-stuff1" === ("test-of-stuff1",[])
| ndmitchell/hoogle | src/General/Util.hs | bsd-3-clause | 12,086 | 0 | 16 | 2,799 | 4,832 | 2,510 | 2,322 | 276 | 9 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances, FlexibleContexts #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE UndecidableInstances #-}
module Web.Zwaluw.Regular
( mkRouters
, Routers
, RouterList(..)
-- * Re-exported from Generics.Regular
, deriveAll
, PF
) where
import Web.Zwaluw.Core
import Generics.Regular
infixr :&
-- | The type of the list of routers generated for type @r@.
type Routers r = RouterList (PF r) r
-- | Creates the routers for type @r@, one for each constructor. For example:
--
-- @Z rHome :& Z rUserOverview :& Z rUserDetail :& Z rArticle = mkRouters@
mkRouters :: (MkRouters (PF r), Regular r) => Routers r
mkRouters = mkRouters' to (Just . from)
data family RouterList f r
class MkRouters (f :: * -> *) where
mkRouters' :: (f r -> r) -> (r -> Maybe (f r)) -> RouterList f r
data instance RouterList (C c f) r = Z (forall t. Router (RouterLhs f r t) (r :- t))
instance MkRouter f => MkRouters (C c f) where
mkRouters' addLR matchLR = Z $ pure (hdMap (addLR . C) . mkP) (fmap mkS . hdTraverse (fmap unC . matchLR))
data instance RouterList (f :+: g) r = RouterList f r :& RouterList g r
instance (MkRouters f, MkRouters g) => MkRouters (f :+: g) where
mkRouters' addLR matchLR = mkRouters' (addLR . L) (matchL matchLR)
:& mkRouters' (addLR . R) (matchR matchLR)
where
matchL :: (r -> Maybe ((f :+: g) r)) -> r -> Maybe (f r)
matchL frm r = case frm r of
Just (L f) -> Just f
_ -> Nothing
matchR :: (r -> Maybe ((f :+: g) r)) -> r -> Maybe (g r)
matchR frm r = case frm r of
Just (R f) -> Just f
_ -> Nothing
type family RouterLhs (f :: * -> *) (r :: *) (t :: *) :: *
class MkRouter (f :: * -> *) where
mkP :: RouterLhs f r t -> (f r :- t)
mkS :: (f r :- t) -> RouterLhs f r t
type instance RouterLhs U r t = t
instance MkRouter U where
mkP t = U :- t
mkS (U :- r) = r
type instance RouterLhs (K a) r t = a :- t
instance MkRouter (K a) where
mkP (a :- t) = K a :- t
mkS (K a :- t) = a :- t
type instance RouterLhs I r t = r :- t
instance MkRouter I where
mkP (r :- t) = I r :- t
mkS (I r :- t) = r :- t
type instance RouterLhs (f :*: g) r t = RouterLhs f r (RouterLhs g r t)
instance (MkRouter f, MkRouter g) => MkRouter (f :*: g) where
mkP t = (f :*: g) :- t''
where
f :- t' = mkP t
g :- t'' = mkP t'
mkS ((f :*: g) :- t) = mkS (f :- mkS (g :- t))
| MedeaMelana/Zwaluw | Web/Zwaluw/Regular.hs | bsd-3-clause | 2,485 | 0 | 15 | 666 | 1,072 | 563 | 509 | 57 | 1 |
{-# LANGUAGE BangPatterns, DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
module Database.Riak.Protocol.GetBucketRequest (GetBucketRequest(..)) where
import Prelude ((+), (/))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data GetBucketRequest = GetBucketRequest{bucket :: !P'.ByteString}
deriving (Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data)
instance P'.Mergeable GetBucketRequest where
mergeAppend (GetBucketRequest x'1) (GetBucketRequest y'1) = GetBucketRequest (P'.mergeAppend x'1 y'1)
instance P'.Default GetBucketRequest where
defaultValue = GetBucketRequest P'.defaultValue
instance P'.Wire GetBucketRequest where
wireSize ft' self'@(GetBucketRequest x'1)
= case ft' of
10 -> calc'Size
11 -> P'.prependMessageSize calc'Size
_ -> P'.wireSizeErr ft' self'
where
calc'Size = (P'.wireSizeReq 1 12 x'1)
wirePut ft' self'@(GetBucketRequest x'1)
= case ft' of
10 -> put'Fields
11 -> do
P'.putSize (P'.wireSize 10 self')
put'Fields
_ -> P'.wirePutErr ft' self'
where
put'Fields
= do
P'.wirePutReq 10 12 x'1
wireGet ft'
= case ft' of
10 -> P'.getBareMessageWith update'Self
11 -> P'.getMessageWith update'Self
_ -> P'.wireGetErr ft'
where
update'Self wire'Tag old'Self
= case wire'Tag of
10 -> Prelude'.fmap (\ !new'Field -> old'Self{bucket = new'Field}) (P'.wireGet 12)
_ -> let (field'Number, wire'Type) = P'.splitWireTag wire'Tag in P'.unknown field'Number wire'Type old'Self
instance P'.MessageAPI msg' (msg' -> GetBucketRequest) GetBucketRequest where
getVal m' f' = f' m'
instance P'.GPB GetBucketRequest
instance P'.ReflectDescriptor GetBucketRequest where
getMessageInfo _ = P'.GetMessageInfo (P'.fromDistinctAscList [10]) (P'.fromDistinctAscList [10])
reflectDescriptorInfo _
= Prelude'.read
"DescriptorInfo {descName = ProtoName {protobufName = FIName \".Protocol.GetBucketRequest\", haskellPrefix = [MName \"Database\",MName \"Riak\"], parentModule = [MName \"Protocol\"], baseName = MName \"GetBucketRequest\"}, descFilePath = [\"Database\",\"Riak\",\"Protocol\",\"GetBucketRequest.hs\"], isGroup = False, fields = fromList [FieldInfo {fieldName = ProtoFName {protobufName' = FIName \".Protocol.GetBucketRequest.bucket\", haskellPrefix' = [MName \"Database\",MName \"Riak\"], parentModule' = [MName \"Protocol\",MName \"GetBucketRequest\"], baseName' = FName \"bucket\"}, fieldNumber = FieldId {getFieldId = 1}, wireTag = WireTag {getWireTag = 10}, packedTag = Nothing, wireTagLength = 1, isPacked = False, isRequired = True, canRepeat = False, mightPack = False, typeCode = FieldType {getFieldType = 12}, typeName = Nothing, hsRawDefault = Nothing, hsDefault = Nothing}], keys = fromList [], extRanges = [], knownKeys = fromList [], storeUnknown = False, lazyFields = False}" | iand675/hiker | Database/Riak/Protocol/GetBucketRequest.hs | bsd-3-clause | 3,116 | 0 | 16 | 584 | 590 | 308 | 282 | 49 | 0 |
{-# language CPP #-}
-- No documentation found for Chapter "Exception"
module OpenXR.Exception (OpenXrException(..)) where
import GHC.Exception.Type (Exception(..))
import OpenXR.Core10.Enums.Result (Result)
import OpenXR.Core10.Enums.Result (Result(..))
-- | This exception is thrown from calls to marshalled Vulkan commands
-- which return a negative VkResult.
newtype OpenXrException = OpenXrException { vulkanExceptionResult :: Result }
deriving (Eq, Ord, Read, Show)
instance Exception OpenXrException where
displayException (OpenXrException r) = show r ++ ": " ++ resultString r
-- | A human understandable message for each VkResult
resultString :: Result -> String
resultString = \case
r -> show r
| expipiplus1/vulkan | openxr/src/OpenXR/Exception.hs | bsd-3-clause | 719 | 0 | 8 | 107 | 156 | 93 | 63 | -1 | -1 |
{-# LANGUAGE ForeignFunctionInterface, CPP #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.ARB.ShadingLanguageInclude
-- Copyright : (c) Sven Panne 2014
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
-- All raw functions and tokens from the ARB_shading_language_include extension,
-- see <http://www.opengl.org/registry/specs/ARB/shading_language_include.txt>.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.ARB.ShadingLanguageInclude (
-- * Functions
glNamedString,
glDeleteNamedString,
glCompileShaderInclude,
glIsNamedString,
glGetNamedString,
glGetNamedStringiv,
-- * Tokens
gl_SHADER_INCLUDE,
gl_NAMED_STRING_LENGTH,
gl_NAMED_STRING_TYPE
) where
import Foreign.C.Types
import Foreign.Ptr
import Graphics.Rendering.OpenGL.Raw.Core31.Types
import Graphics.Rendering.OpenGL.Raw.Extensions
--------------------------------------------------------------------------------
#include "HsOpenGLRaw.h"
extensionNameString :: String
extensionNameString = "GL_ARB_shading_language_include"
EXTENSION_ENTRY(dyn_glNamedString,ptr_glNamedString,"glNamedString",glNamedString,GLenum -> GLint -> Ptr GLchar -> GLint -> Ptr GLchar -> IO ())
EXTENSION_ENTRY(dyn_glDeleteNamedString,ptr_glDeleteNamedString,"glDeleteNamedString",glDeleteNamedString,GLint -> Ptr GLchar -> IO ())
EXTENSION_ENTRY(dyn_glCompileShaderInclude,ptr_glCompileShaderInclude,"glCompileShaderInclude",glCompileShaderInclude,GLuint -> GLsizei -> Ptr (Ptr GLchar) -> Ptr GLint -> IO ())
EXTENSION_ENTRY(dyn_glIsNamedString,ptr_glIsNamedString,"glIsNamedString",glIsNamedString,GLint -> Ptr GLchar -> IO GLboolean)
EXTENSION_ENTRY(dyn_glGetNamedString,ptr_glGetNamedString,"glGetNamedString",glGetNamedString,GLint -> Ptr GLchar -> GLsizei -> Ptr GLint -> Ptr GLchar -> IO ())
EXTENSION_ENTRY(dyn_glGetNamedStringiv,ptr_glGetNamedStringiv,"glGetNamedStringiv",glGetNamedStringiv,GLint -> Ptr GLchar -> GLenum -> Ptr GLint -> IO ())
gl_SHADER_INCLUDE :: GLenum
gl_SHADER_INCLUDE = 0x8DAE
gl_NAMED_STRING_LENGTH :: GLenum
gl_NAMED_STRING_LENGTH = 0x8DE9
gl_NAMED_STRING_TYPE :: GLenum
gl_NAMED_STRING_TYPE = 0x8DEA
| mfpi/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/ARB/ShadingLanguageInclude.hs | bsd-3-clause | 2,366 | 0 | 14 | 243 | 399 | 231 | 168 | -1 | -1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# OPTIONS_GHC -Wall #-}
--------------------------------------------------------------------
-- |
-- Copyright : (c) Edward Kmett 2014
-- License : BSD3
-- Maintainer: Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability: non-portable
--
--------------------------------------------------------------------
module Hask.Power where
import Hask.Core
import Hask.Rel
import Hask.Rep
import qualified Prelude
infixr 0 ⋔
type (⋔) = Power
class (Category ((~>) :: i -> i -> *), hom ~ Hom) => Powered (hom :: j -> j -> i) where
type Power :: i -> j -> j
flipped :: forall (a :: j) (u :: i) (b :: j) (a' :: j) (u' :: i) (b' :: j).
Iso (hom a (Power u b)) (hom a' (Power u' b')) (u `Hom` hom a b) (u' `Hom` hom a' b')
flip :: Powered hom => hom a (Power u b) ~> Hom u (hom a b)
flip = get flipped
unflip :: Powered hom => Hom u (hom a b) ~> hom a (Power u b)
unflip = beget flipped
-- flippedInternal :: forall (a :: i) (u :: i) (b :: i). CCC (Hom :: i -> i -> *) => Iso' ((b^u)^a) ((b^a)^u)
--flippedInternal = dimap (curry $ curry $ apply . first apply . associate (fmap1 swap))
-- (curry $ curry $ apply . first apply . associate (fmap1 swap))
instance Powered (->) where
type Power = (->)
flipped = dimap Prelude.flip Prelude.flip
instance Powered (|-) where
type Power = (|-)
flipped = dimap (curry $ curry $ apply . first apply . associate (fmap1 swap))
(curry $ curry $ apply . first apply . associate (fmap1 swap))
instance Powered (Lift1 (->)) where
type Power = Lift (->)
flipped = dimap (curry $ curry $ apply . first apply . associate (fmap1 swap))
(curry $ curry $ apply . first apply . associate (fmap1 swap))
--flipped = dimap (Nat $ beget _Lift . fmap1 (beget _Lift) . flip . fmap1 (get _Lift) . get _Lift)
-- (Nat $ beget _Lift . fmap1 (beget _Lift) . flip . fmap1 (get _Lift) . get _Lift)
instance Powered (Lift2 (Lift1 (->))) where
type Power = Lift (Lift (->))
flipped = dimap (curry $ curry $ apply . first apply . associate (fmap1 swap))
(curry $ curry $ apply . first apply . associate (fmap1 swap))
--flipped = dimap (Nat $ beget _Lift . Nat (beget _Lift . fmap1 (transport (beget _Lift) . beget _Lift) . flip . fmap1 (get _Lift . transport (get _Lift)) . get _Lift) . get _Lift)
-- (Nat $ beget _Lift . Nat (beget _Lift . fmap1 (transport (beget _Lift) . beget _Lift) . flip . fmap1 (get _Lift . transport (get _Lift)) . get _Lift) . get _Lift)
-- Power1 :: * -> (i -> *) -> (i -> *)
newtype Power1 v f a = Power { runPower :: v -> f a }
instance Powered (Nat :: (i -> *) -> (i -> *) -> *) where
type Power = Power1
flipped = dimap
(\k v -> Nat $ \f -> runPower (transport k f) v)
(\k -> Nat $ \a' -> Power $ \u' -> transport (k u') a')
instance Contravariant Power1 where
contramap f = nat2 $ Power . lmap f . runPower
instance Functor (Power1 v) where
fmap f = Nat $ Power . fmap1 (transport f) . runPower
instance Semimonoidal (Power1 v) where
ap2 = Nat $ \(Lift (Power va, Power vb)) -> Power $ \v -> Lift (va v, vb v)
instance Monoidal (Power1 v) where
ap0 = Nat $ \(Const ()) -> Power $ \_ -> Const ()
instance Semigroup m => Semigroup (Power1 v m) where
mult = multM
instance Monoid m => Monoid (Power1 v m) where
one = oneM
instance Semimonoidal f => Semimonoidal (Power1 v f) where
ap2 (Power vfa, Power vfb) = Power $ \v -> ap2 (vfa v, vfb v)
instance Monoidal f => Monoidal (Power1 v f) where
ap0 () = Power $ \_ -> ap0 ()
instance (Semimonoidal f, Semigroup m) => Semigroup (Power1 v f m) where
mult = multM
instance (Monoidal f, Monoid m) => Monoid (Power1 v f m) where
one = oneM
instance Functor f => Functor (Power1 v f) where
fmap f = Power . fmap1 (fmap f) . runPower
instance Corepresentable Power1 where
type Corep Power1 = Rel
_Corep = dimap (Nat $ \(Power ab) -> Lift (ab . get _Const))
(Nat $ \(Lift ab) -> Power (ab . beget _Const))
| ekmett/hask | old/src/Hask/Power.hs | bsd-3-clause | 4,423 | 2 | 15 | 1,015 | 1,428 | 759 | 669 | 75 | 1 |
-- Copyright (c) 2017 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -funbox-strict-fields -Wall -Werror #-}
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, FlexibleContexts #-}
-- | This module defines the FlatIR language.
--
-- FlatIR is a simply-typed flat-scoped intermediate language. It
-- is designed to be reasonably close to LLVM, with instructions
-- similar to LLVM's, but without some of the difficulties of LLVM.
--
-- At the moment, the FlatIR language is under revision, and will
-- probably change quite a bit.
--
-- Things that need to be done:
-- * Add notions of vtables and lookups to the language
-- * Add variant types
-- * Redesign/modify certain instructions (Deref, Call, Cast, Alloc)
-- * Add exception handling
module IR.FlatIR.Syntax(
-- * Indexes
Id,
Label,
Fieldname,
Typename,
Globalname,
-- * Operators and options
Binop(..),
Unop(..),
-- * Core language
-- ** Types
Type(..),
TypeDef(..),
FieldDef(..),
FormDef(..),
Ptr(..),
Mutability(..),
-- ** Execution
Exp(..),
LValue(..),
Stm(..),
Bind(..),
Transfer(..),
-- ** Definitions
DeclNames(..),
Block(..),
Body(..),
Global(..),
Module(..),
-- ** Utilities
renameType
) where
import Data.Array
import Data.Graph.Inductive.Graph(Graph)
import Data.Functor
import Data.Hashable
import Data.Maybe
import Data.Intervals(Intervals)
import Data.Position.DWARFPosition(DWARFPosition)
import Data.Word
import IR.Common.Alloc
import IR.Common.Body
import IR.Common.LValue
import IR.Common.Names
import IR.Common.Ptr
import IR.Common.Operator
import IR.Common.Rename
import IR.Common.RenameType
import IR.Common.Transfer
import Prelude hiding (head, init)
--import Prelude.Extras
--import Text.Format
import Text.XML.Expat.Pickle
import Text.XML.Expat.Tree(NodeG)
import qualified Data.ByteString as Strict
-- FlatIR is a simply-typed IR intended to be close to LLVM. It is
-- intended primarily as a jumping-off point for other languages
-- targeting LLVM.
-- Programs in FlatIR are equipped with very detailed information
-- about garbage collection. This is passed through to LLVM in the
-- form of metadata.
-- FlatIR also contains a virtual call abstraction, which allows
-- polymorphic languages to compile to FlatIR without having to
-- monomorphise everything. XXX IMPLEMENT THIS
-- FlatIR will eventually contain transaction information.
-- In general, any optimization pass that would be written for
-- FlatIR should instead be written for LLVM, unless there is a very
-- compelling reason for it. Examples would be optimizations that
-- deal with GC or virtual calls (or eventually transactions).
-- | Data for a structure field.
data FieldDef tagty =
FieldDef {
-- | The name of the field
fieldDefName :: !Strict.ByteString,
-- | The mutability of the field.
fieldDefMutability :: !Mutability,
-- | The type of the field.
fieldDefTy :: Type tagty,
-- | The position in source from which this arises.
fieldDefPos :: DWARFPosition Globalname Typename
}
-- | Data for a variant.
data FormDef tagty =
FormDef {
-- | The name of the variant.
formDefName :: !Strict.ByteString,
-- | The mutability of the variant data.
formDefMutability :: !Mutability,
-- | The variant type.
formDefTy :: Type tagty,
-- | The position in source from which this arises.
formDefPos :: DWARFPosition Globalname Typename
}
-- | Types. Types are monomorphic, and correspond roughly with LLVM
-- types.
data Type tagty =
-- | A function type
FuncType {
-- | The return type of the function.
funcTyRetTy :: Type tagty,
-- | The types of the arguments.
funcTyArgTys :: [Type tagty],
-- | The position in source from which this arises.
funcTyPos :: DWARFPosition Globalname Typename
}
-- | A structure, representing both tuples and records
| StructType {
-- | Whether or not the layout is strict.
structPacked :: !Bool,
-- | The fields of the struct.
structFields :: !(Array Fieldname (FieldDef tagty)),
-- | The position in source from which this arises.
structPos :: DWARFPosition Globalname Typename
}
-- | A variant, representing both tuples and records
| VariantType {
-- | The fields of the struct.
variantTyForms :: !(Array Formname (FormDef tagty)),
-- | The position in source from which this arises.
variantTyPos :: DWARFPosition Globalname Typename
}
-- | An array. Unlike LLVM arrays, these may be variable-sized
| ArrayType {
-- | The length of the array, if known.
arrayLen :: !(Maybe Word),
-- | The type of array elements.
arrayElemTy :: Type tagty,
-- | The position in source from which this arises.
arrayPos :: DWARFPosition Globalname Typename
}
-- | Pointers, both native and GC
| PtrType {
-- | The pointer information
ptrTy :: !(Ptr tagty (Type tagty)),
-- | The position in source from which this arises.
ptrPos :: DWARFPosition Globalname Typename
}
-- | An integer, possibly signed, with a size.
| IntType {
-- | Whether or not the int is signed.
intSigned :: !Bool,
-- | The size of the int in bits.
intSize :: !Word,
-- | The possible-value intervals for the integer.
intIntervals :: !(Intervals Integer),
-- | The position in source from which this arises.
intPos :: DWARFPosition Globalname Typename
}
-- | Floating point types
| FloatType {
-- | The size of the float in bits.
floatSize :: !Word,
-- | The position in source from which this arises.
floatPos :: DWARFPosition Globalname Typename
}
-- | A defined type
| IdType {
-- | The name for this type.
idName :: !Typename,
-- | The position in source from which this arises.
idPos :: DWARFPosition Globalname Typename
}
-- | The unit type, equivalent to SML unit and C/Java void
| UnitType {
-- | The position in source from which this arises.
unitPos :: DWARFPosition Globalname Typename
}
-- | An expression
data Exp tagty =
-- | Allocate an object.
Alloc {
-- | The allocation data.
allocData :: !(Allocation tagty (Type tagty) (Exp tagty)),
-- | The position in source from which this arises.
allocPos :: DWARFPosition Globalname Typename
}
-- | A binary operation
| Binop {
-- | The operator.
binopOp :: !Binop,
-- | The left hand side.
binopLeft :: Exp tagty,
-- | The right hand side.
binopRight :: Exp tagty,
-- | The position in source from which this arises.
binopPos :: DWARFPosition Globalname Typename
}
-- | Call a function.
| Call {
-- | The function being called. Must be a function value.
callFunc :: Exp tagty,
-- | The arguments to the function.
callArgs :: [Exp tagty],
-- | The position in source from which this arises.
callPos :: DWARFPosition Globalname Typename
}
-- | A unary operation
| Unop {
-- | The operator.
unopOp :: !Unop,
-- | The operand.
unopVal :: Exp tagty,
-- | The position in source from which this arises.
unopPos :: DWARFPosition Globalname Typename
}
-- | A conversion from one type to another.
| Conv {
-- | The type to which the value is being converted.
convTy :: Type tagty,
-- | The value being converted.
convVal :: Exp tagty,
-- | The position in source from which this arises.
convPos :: DWARFPosition Globalname Typename
}
-- | Treat an expression as if it were the given type regardless of
-- its actual type.
| Cast {
-- | The type to which the value is being cast.
castTy :: Type tagty,
-- | The value being cast.
castVal :: Exp tagty,
-- | The position in source from which this arises.
castPos :: DWARFPosition Globalname Typename
}
-- | Address of an LValue.
| AddrOf {
-- | The value having its address taken.
addrofVal :: LValue (Exp tagty),
-- | The position in source from which this arises.
addrofPos :: DWARFPosition Globalname Typename
}
-- | A structure literal.
| StructLit {
-- | The literal's type, must be a struct type.
structLitTy :: Type tagty,
-- | The constant's field values
structLitFields :: !(Array Fieldname (Exp tagty)),
-- | The position in source from which this arises.
structLitPos :: DWARFPosition Globalname Typename
}
-- | A variant literal.
| VariantLit {
-- | The literal's type, must be a variant type.
variantLitTy :: Type tagty,
-- | The literal's form.
variantLitForm :: !Formname,
-- | The literal's inner value.
variantLitVal :: Exp tagty,
-- | The position in source from which this arises.
variantLitPos :: DWARFPosition Globalname Typename
}
-- | An array literal
| ArrayLit {
-- | The constant's type, must be an array type.
arrayLitTy :: Type tagty,
-- | The constant's values
arrayLitVals :: [Exp tagty],
-- | The position in source from which this arises.
arrayLitPos :: DWARFPosition Globalname Typename
}
-- | A numerical constant with a given size and signedness XXX add a
-- floating point constant.
| IntLit {
-- | The constant's type, must be an integer or float type.
intLitTy :: Type tagty,
-- | The constant's value
intLitVal :: !Integer,
-- | The position in source from which this arises.
intLitPos :: DWARFPosition Globalname Typename
}
-- | An LValue.
| LValue { lvalueData :: !(LValue (Exp tagty)) }
-- | A global value. Represents a global variable or a function.
data Global tagty gr =
-- | A function
Function {
-- | Name of the function
funcName :: !(Maybe DeclNames),
-- | Return type
funcRetTy :: Type tagty,
-- | A map from identifiers for arguments and local variables to
-- their types.
funcValTys :: !(Array Id (Type tagty)),
-- | A list of the identifiers representing arguments
funcParams :: [Id],
-- | The function's body, if it has one
funcBody :: Maybe (Body (Exp tagty) (StmElems (Exp tagty)) gr),
-- | The position in source from which this arises.
funcPos :: DWARFPosition Globalname Typename
}
-- | A global variable
| GlobalVar {
-- | The name of the variable.
gvarName :: !(Maybe DeclNames),
-- | The type of the variable.
gvarTy :: Type tagty,
-- | The initializer.
gvarInit :: Maybe (Exp tagty),
-- | The variable's mutability.
gvarMutability :: !Mutability,
-- | The position in source from which this arises.
gvarPos :: DWARFPosition Globalname Typename
}
-- | Type definitions.
data TypeDef tagty =
-- | A full, named type definition.
TypeDef {
-- | The typedef's name.
typeDefStr :: !Strict.ByteString,
-- | The type.
typeDefTy :: !(Type tagty),
-- | Position of the type definition.
typeDefPos :: DWARFPosition Globalname Typename
}
-- | A type definition to a name.
| Name {
-- | The typedef's name.
nameStr :: !Strict.ByteString,
-- | Position of the type definition.
namePos :: DWARFPosition Globalname Typename
}
-- | An anonymous type definition.
| Anon {
-- | The type.
anonTy :: !(Type tagty),
-- | Position of the type definition.
anonPos :: DWARFPosition Globalname Typename
}
-- | A module. Represents a concept similar to an LLVM module.
data Module tagty tagdescty gr =
Module {
-- | Name of the module
modName :: !Strict.ByteString,
-- | A map from 'Typename's to their proper names and possibly their
-- definitions
modTypes :: !(Array Typename (TypeDef tagty)),
-- | A map from 'Tagname's to their definitions
modTags :: !(Array Tagname (TagDesc tagdescty)),
-- | Generated tagged types (this module will generate the
-- signatures and accessor definitions for all these 'Tagname's)
modGenTags :: [Tagname],
-- | A map from 'Globalname's to the corresponding definitions
modGlobals :: !(Array Globalname (Global tagty gr)),
-- | Should be a file position, indicating the file from which
-- this arises.
modPos :: DWARFPosition Globalname Typename
}
instance Eq tagty => Eq (FieldDef tagty) where
FieldDef { fieldDefName = name1, fieldDefTy = ty1,
fieldDefMutability = mut1 } ==
FieldDef { fieldDefName = name2, fieldDefTy = ty2,
fieldDefMutability = mut2 } =
mut1 == mut2 && name1 == name2 && ty1 == ty2
instance Eq tagty => Eq (FormDef tagty) where
FormDef { formDefName = name1, formDefTy = ty1, formDefMutability = mut1 } ==
FormDef { formDefName = name2, formDefTy = ty2, formDefMutability = mut2 } =
mut1 == mut2 && name1 == name2 && ty1 == ty2
instance Eq tagty => Eq (Type tagty) where
FuncType { funcTyRetTy = retty1, funcTyArgTys = params1 } ==
FuncType { funcTyRetTy = retty2, funcTyArgTys = params2 } =
retty1 == retty2 && params1 == params2
StructType { structPacked = packed1, structFields = fields1 } ==
StructType { structPacked = packed2, structFields = fields2 } =
packed1 == packed2 && fields1 == fields2
VariantType { variantTyForms = forms1 } ==
VariantType { variantTyForms = forms2 } =
forms1 == forms2
ArrayType { arrayLen = len1, arrayElemTy = inner1 } ==
ArrayType { arrayLen = len2, arrayElemTy = inner2 } =
len1 == len2 && inner1 == inner2
PtrType { ptrTy = objtype1 } == PtrType { ptrTy = objtype2 } =
objtype1 == objtype2
IntType { intSigned = signed1, intIntervals = intervals1,
intSize = size1 } ==
IntType { intSigned = signed2, intIntervals = intervals2,
intSize = size2 } =
signed1 == signed2 && size1 == size2 && intervals1 == intervals2
IdType { idName = name1 } == IdType { idName = name2 } = name1 == name2
FloatType { floatSize = size1 } == FloatType { floatSize = size2 } =
size1 == size2
(UnitType _) == (UnitType _) = True
_ == _ = False
instance Eq tagty => Eq (Exp tagty) where
Alloc { allocData = alloc1 } == Alloc { allocData = alloc2 } =
alloc1 == alloc2
Binop { binopOp = op1, binopLeft = left1, binopRight = right1 } ==
Binop { binopOp = op2, binopLeft = left2, binopRight = right2 } =
op1 == op2 && left1 == left2 && right1 == right2
Call { callFunc = func1, callArgs = args1 } ==
Call { callFunc = func2, callArgs = args2 } =
func1 == func2 && args1 == args2
Unop { unopOp = op1, unopVal = val1 } ==
Unop { unopOp = op2, unopVal = val2 } =
op1 == op2 && val1 == val2
Conv { convTy = ty1, convVal = val1 } ==
Conv { convTy = ty2, convVal = val2 } =
ty1 == ty2 && val1 == val2
Cast { castTy = ty1, castVal = val1 } ==
Cast { castTy = ty2, castVal = val2 } =
ty1 == ty2 && val1 == val2
AddrOf { addrofVal = val1 } == AddrOf { addrofVal = val2 } = val1 == val2
StructLit { structLitTy = ty1, structLitFields = fields1 } ==
StructLit { structLitTy = ty2, structLitFields = fields2 } =
ty1 == ty2 && fields1 == fields2
VariantLit { variantLitTy = ty1, variantLitForm = form1,
variantLitVal = val1 } ==
VariantLit { variantLitTy = ty2, variantLitForm = form2,
variantLitVal = val2 } =
form1 == form2 && ty1 == ty2 && val1 == val2
ArrayLit { arrayLitTy = ty1, arrayLitVals = vals1 } ==
ArrayLit { arrayLitTy = ty2, arrayLitVals = vals2 } =
ty1 == ty2 && vals1 == vals2
IntLit { intLitTy = ty1, intLitVal = val1 } ==
IntLit { intLitTy = ty2, intLitVal = val2 } =
ty1 == ty2 && val1 == val2
(LValue lval1) == (LValue lval2) = lval1 == lval2
_ == _ = False
instance Ord tagty => Ord (FieldDef tagty) where
compare FieldDef { fieldDefName = name1, fieldDefTy = ty1,
fieldDefMutability = mut1 }
FieldDef { fieldDefName = name2, fieldDefTy = ty2,
fieldDefMutability = mut2 } =
case compare mut1 mut2 of
EQ -> case compare name1 name2 of
EQ -> compare ty1 ty2
out -> out
out -> out
instance Ord tagty => Ord (FormDef tagty) where
compare FormDef { formDefName = name1, formDefTy = ty1,
formDefMutability = mut1 }
FormDef { formDefName = name2, formDefTy = ty2,
formDefMutability = mut2 } =
case compare mut1 mut2 of
EQ -> case compare name1 name2 of
EQ -> compare ty1 ty2
out -> out
out -> out
instance Ord tagty => Ord (Type tagty) where
compare FuncType { funcTyRetTy = retty1, funcTyArgTys = params1 }
FuncType { funcTyRetTy = retty2, funcTyArgTys = params2 } =
case compare retty1 retty2 of
EQ -> compare params1 params2
out -> out
compare FuncType {} _ = LT
compare _ FuncType {} = GT
compare StructType { structPacked = packed1, structFields = fields1 }
StructType { structPacked = packed2, structFields = fields2 } =
case compare packed1 packed2 of
EQ -> compare fields1 fields2
out -> out
compare StructType {} _ = LT
compare _ StructType {} = GT
compare VariantType { variantTyForms = forms1 }
VariantType { variantTyForms = forms2 } =
compare forms1 forms2
compare VariantType {} _ = LT
compare _ VariantType {} = GT
compare ArrayType { arrayLen = len1, arrayElemTy = inner1 }
ArrayType { arrayLen = len2, arrayElemTy = inner2 } =
case compare len1 len2 of
EQ -> compare inner1 inner2
out -> out
compare ArrayType {} _ = LT
compare _ ArrayType {} = GT
compare PtrType { ptrTy = objtype1 } PtrType { ptrTy = objtype2 } =
compare objtype1 objtype2
compare PtrType {} _ = LT
compare _ PtrType {} = GT
compare IntType { intSigned = signed1, intIntervals = intervals1,
intSize = size1 }
IntType { intSigned = signed2, intIntervals = intervals2,
intSize = size2 } =
case compare signed1 signed2 of
EQ -> case compare size1 size2 of
EQ -> compare intervals1 intervals2
out -> out
out -> out
compare IntType {} _ = LT
compare _ IntType {} = GT
compare IdType { idName = name1 } IdType { idName = name2 } =
compare name1 name2
compare IdType {} _ = LT
compare _ IdType {} = GT
compare FloatType { floatSize = size1 } FloatType { floatSize = size2 } =
compare size1 size2
compare FloatType {} _ = LT
compare _ FloatType {} = GT
compare (UnitType _) (UnitType _) = EQ
instance Ord tagty => Ord (Exp tagty) where
compare Alloc { allocData = alloc1 } Alloc { allocData = alloc2 } =
compare alloc1 alloc2
compare Alloc {} _ = LT
compare _ Alloc {} = GT
compare Binop { binopOp = op1, binopLeft = left1, binopRight = right1 }
Binop { binopOp = op2, binopLeft = left2, binopRight = right2 } =
case compare op1 op2 of
EQ -> case compare left1 left2 of
EQ -> compare right1 right2
out -> out
out -> out
compare Binop {} _ = LT
compare _ Binop {} = GT
compare Call { callFunc = func1, callArgs = args1 }
Call { callFunc = func2, callArgs = args2 } =
case compare func1 func2 of
EQ -> compare args1 args2
out -> out
compare Call {} _ = LT
compare _ Call {} = GT
compare Unop { unopOp = op1, unopVal = val1 }
Unop { unopOp = op2, unopVal = val2 } =
case compare op1 op2 of
EQ -> compare val1 val2
out -> out
compare Unop {} _ = LT
compare _ Unop {} = GT
compare Conv { convTy = ty1, convVal = val1 }
Conv { convTy = ty2, convVal = val2 } =
case compare ty1 ty2 of
EQ -> compare val1 val2
out -> out
compare Conv {} _ = LT
compare _ Conv {} = GT
compare Cast { castTy = ty1, castVal = val1 }
Cast { castTy = ty2, castVal = val2 } =
case compare ty1 ty2 of
EQ -> compare val1 val2
out -> out
compare Cast {} _ = LT
compare _ Cast {} = GT
compare AddrOf { addrofVal = val1 } AddrOf { addrofVal = val2 } =
compare val1 val2
compare AddrOf {} _ = LT
compare _ AddrOf {} = GT
compare StructLit { structLitTy = ty1, structLitFields = fields1 }
StructLit { structLitTy = ty2, structLitFields = fields2 } =
case compare ty1 ty2 of
EQ -> compare fields1 fields2
out -> out
compare StructLit {} _ = LT
compare _ StructLit {} = GT
compare VariantLit { variantLitTy = ty1, variantLitForm = form1,
variantLitVal = val1 }
VariantLit { variantLitTy = ty2, variantLitForm = form2,
variantLitVal = val2 } =
case compare form1 form2 of
EQ -> case compare ty1 ty2 of
EQ -> compare val1 val2
out -> out
out -> out
compare VariantLit {} _ = LT
compare _ VariantLit {} = GT
compare ArrayLit { arrayLitTy = ty1, arrayLitVals = vals1 }
ArrayLit { arrayLitTy = ty2, arrayLitVals = vals2 } =
case compare ty1 ty2 of
EQ -> compare vals1 vals2
out -> out
compare ArrayLit {} _ = LT
compare _ ArrayLit {} = GT
compare IntLit { intLitTy = ty1, intLitVal = val1 }
IntLit { intLitTy = ty2, intLitVal = val2 } =
case compare ty1 ty2 of
EQ -> compare val1 val2
out -> out
compare IntLit {} _ = LT
compare _ IntLit {} = GT
compare LValue { lvalueData = lval1 } LValue { lvalueData = lval2 } =
compare lval1 lval2
instance Hashable tagty => Hashable (FieldDef tagty) where
hashWithSalt s FieldDef { fieldDefName = name, fieldDefTy = ty,
fieldDefMutability = mut } =
s `hashWithSalt` mut `hashWithSalt` name `hashWithSalt` ty
instance Hashable tagty => Hashable (FormDef tagty) where
hashWithSalt s FormDef { formDefName = name, formDefTy = ty,
formDefMutability = mut } =
s `hashWithSalt` mut `hashWithSalt` name `hashWithSalt` ty
instance Hashable tagty => Hashable (Type tagty) where
hashWithSalt s FuncType { funcTyRetTy = retty, funcTyArgTys = params } =
s `hashWithSalt` (0 :: Int) `hashWithSalt` retty `hashWithSalt` params
hashWithSalt s StructType { structPacked = packed, structFields = fields } =
s `hashWithSalt` (1 :: Int) `hashWithSalt`
packed `hashWithSalt` elems fields
hashWithSalt s VariantType { variantTyForms = forms } =
s `hashWithSalt` (2 :: Int) `hashWithSalt` elems forms
hashWithSalt s ArrayType { arrayLen = Nothing, arrayElemTy = inner } =
s `hashWithSalt` (3 :: Int) `hashWithSalt` (0 :: Int) `hashWithSalt` inner
hashWithSalt s ArrayType { arrayLen = Just size, arrayElemTy = inner } =
s `hashWithSalt` (3 :: Int) `hashWithSalt` size `hashWithSalt` inner
hashWithSalt s PtrType { ptrTy = objtype } =
s `hashWithSalt` (4 :: Int) `hashWithSalt` objtype
hashWithSalt s IntType { intSigned = signed, intIntervals = intervals,
intSize = size } =
s `hashWithSalt` (5 :: Int) `hashWithSalt` signed `hashWithSalt`
intervals `hashWithSalt` size
hashWithSalt s IdType { idName = name } =
s `hashWithSalt` (6 :: Int) `hashWithSalt` name
hashWithSalt s FloatType { floatSize = size } =
s `hashWithSalt` (7 :: Int) `hashWithSalt` size
hashWithSalt s UnitType {} = s `hashWithSalt` (7 :: Int)
instance Hashable tagty => Hashable (Exp tagty) where
hashWithSalt s Alloc { allocData = alloc } =
s `hashWithSalt` (0 :: Int) `hashWithSalt` alloc
hashWithSalt s Binop { binopOp = op, binopLeft = left, binopRight = right } =
s `hashWithSalt` (1 :: Int) `hashWithSalt`
op `hashWithSalt` left `hashWithSalt` right
hashWithSalt s Call { callFunc = func, callArgs = args } =
s `hashWithSalt` (2 :: Int) `hashWithSalt` func `hashWithSalt` args
hashWithSalt s Unop { unopOp = op, unopVal = val } =
s `hashWithSalt` (3 :: Int) `hashWithSalt` op `hashWithSalt` val
hashWithSalt s Conv { convTy = ty, convVal = val } =
s `hashWithSalt` (4 :: Int) `hashWithSalt` ty `hashWithSalt` val
hashWithSalt s Cast { castTy = ty, castVal = val } =
s `hashWithSalt` (5 :: Int) `hashWithSalt` ty `hashWithSalt` val
hashWithSalt s AddrOf { addrofVal = val } =
s `hashWithSalt` (6 :: Int) `hashWithSalt` val
hashWithSalt s StructLit { structLitTy = ty, structLitFields = fields } =
s `hashWithSalt` (7 :: Int) `hashWithSalt` ty `hashWithSalt` elems fields
hashWithSalt s VariantLit { variantLitTy = ty, variantLitForm = form,
variantLitVal = val } =
s `hashWithSalt` (8 :: Int) `hashWithSalt`
form `hashWithSalt` ty `hashWithSalt` val
hashWithSalt s ArrayLit { arrayLitTy = ty, arrayLitVals = vals } =
s `hashWithSalt` (9 :: Int) `hashWithSalt` ty `hashWithSalt` vals
hashWithSalt s IntLit { intLitTy = ty, intLitVal = val } =
s `hashWithSalt` (10 :: Int) `hashWithSalt` ty `hashWithSalt` val
hashWithSalt s (LValue lval) =
s `hashWithSalt` (11 :: Int) `hashWithSalt` lval
instance RenameType Typename (FieldDef tagty) where
renameType f fdef @ FieldDef { fieldDefTy = ty } =
fdef { fieldDefTy = renameType f ty }
instance RenameType Typename (FormDef tagty) where
renameType f vdef @ FormDef { formDefTy = ty } =
vdef { formDefTy = renameType f ty }
instance RenameType Typename (Type tagty) where
renameType f ty @ FuncType { funcTyRetTy = retty, funcTyArgTys = argtys } =
ty { funcTyArgTys = renameType f argtys, funcTyRetTy = renameType f retty }
renameType f ty @ StructType { structFields = fields } =
ty { structFields = fmap (renameType f) fields }
renameType f ty @ VariantType { variantTyForms = forms } =
ty { variantTyForms = fmap (renameType f) forms }
renameType f ty @ ArrayType { arrayElemTy = elemty } =
ty { arrayElemTy = renameType f elemty }
-- renameType f ty @ PtrType { ptrTy = inner } =
-- ty { ptrTy = renameType f inner }
renameType f ty @ IdType { idName = name } = ty { idName = f name }
renameType _ ty = ty
instance RenameType Typename (Exp tagty) where
renameType f a @ Alloc { allocData = alloc } =
a { allocData = renameType f alloc }
renameType f e @ Binop { binopLeft = left, binopRight = right } =
e { binopLeft = renameType f left, binopRight = renameType f right }
renameType f e @ Call { callFunc = func, callArgs = args } =
e { callFunc = renameType f func, callArgs = renameType f args }
renameType f e @ Conv { convTy = ty, convVal = val } =
e { convTy = renameType f ty, convVal = renameType f val }
renameType f e @ Cast { castTy = ty, castVal = val } =
e { castTy = renameType f ty, castVal = renameType f val }
renameType f e @ Unop { unopVal = val } = e { unopVal = renameType f val }
renameType f e @ AddrOf { addrofVal = val } =
e { addrofVal = renameType f val }
renameType f e @ StructLit { structLitFields = fields, structLitTy = ty } =
e { structLitFields = renameTypeArray f fields,
structLitTy = renameType f ty }
renameType f e @ VariantLit { variantLitVal = val, variantLitTy = ty } =
e { variantLitVal = renameType f val, variantLitTy = renameType f ty }
renameType f e @ ArrayLit { arrayLitVals = vals, arrayLitTy = ty } =
e { arrayLitVals = renameType f vals, arrayLitTy = renameType f ty }
renameType f e @ IntLit { intLitTy = ty } =
e { intLitTy = renameType f ty }
renameType f (LValue l) = LValue (renameType f l)
instance Rename Id (Exp tagty) where
rename f a @ Alloc { allocData = alloc } = a { allocData = rename f alloc }
rename f e @ Binop { binopLeft = left, binopRight = right } =
e { binopLeft = rename f left, binopRight = rename f right }
rename f e @ Call { callFunc = func, callArgs = args } =
e { callFunc = rename f func, callArgs = rename f args }
rename f e @ Conv { convVal = val } = e { convVal = rename f val }
rename f e @ Cast { castVal = val } = e { castVal = rename f val }
rename f e @ Unop { unopVal = val } = e { unopVal = rename f val }
rename f e @ AddrOf { addrofVal = val } = e { addrofVal = rename f val }
rename f e @ StructLit { structLitFields = fields } =
e { structLitFields = renameArray f fields }
rename f e @ VariantLit { variantLitVal = val } =
e { variantLitVal = rename f val }
rename f e @ ArrayLit { arrayLitVals = vals } =
e { arrayLitVals = rename f vals }
rename f (LValue l) = LValue (rename f l)
rename _ e = e
funcTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Type typetag)
funcTypePickler =
let
revfunc FuncType { funcTyRetTy = retty, funcTyArgTys = argtys,
funcTyPos = pos } = (argtys, retty, pos)
revfunc _ = error "Can't convert to FuncType"
in
xpWrap (\(argtys, retty, pos) -> FuncType { funcTyRetTy = retty,
funcTyArgTys = argtys,
funcTyPos = pos },
revfunc)
(xpElemNodes (gxFromString "FuncType")
(xpTriple (xpElemNodes (gxFromString "args")
(xpList xpickle))
(xpElemNodes (gxFromString "ret") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
XmlPickler [NodeG [] tag text] (Fieldname, FieldDef typetag) where
xpickle =
xpWrap (\((idx, fname, mut), (ty, pos)) ->
(idx, FieldDef { fieldDefName = gxToByteString fname,
fieldDefMutability = mut, fieldDefTy = ty,
fieldDefPos = pos }),
\(idx, FieldDef { fieldDefName = fname, fieldDefMutability = mut,
fieldDefTy = ty, fieldDefPos = pos }) ->
((idx, gxFromByteString fname, mut), (ty, pos)))
(xpElem (gxFromString "field")
(xpTriple xpickle (xpAttr (gxFromString "name") xpText)
xpickle)
(xpPair (xpElemNodes (gxFromString "type") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
fieldsPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Array Fieldname (FieldDef typetag))
fieldsPickler =
xpWrap (\l -> array (toEnum 0, toEnum (length l)) l, assocs)
(xpElemNodes (gxFromString "fields") (xpList xpickle))
structTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Type typetag)
structTypePickler =
let
revfunc StructType { structPacked = packed, structFields = fields,
structPos = pos } = (packed, (fields, pos))
revfunc _ = error "Can't convert to StructType"
in
xpWrap (\(packed, (fields, pos)) -> StructType { structPacked = packed,
structFields = fields,
structPos = pos },
revfunc)
(xpElem (gxFromString "StructType")
(xpAttr (gxFromString "packed") xpPrim)
(xpPair (xpElemNodes (gxFromString "fields") fieldsPickler)
(xpElemNodes (gxFromString "pos") xpickle)))
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
XmlPickler [NodeG [] tag text] (Formname, FormDef typetag) where
xpickle =
xpWrap (\((idx, fname, mut), (ty, pos)) ->
(idx, FormDef { formDefName = gxToByteString fname,
formDefMutability = mut, formDefTy = ty,
formDefPos = pos }),
\(idx, FormDef { formDefMutability = mut, formDefPos = pos,
formDefName = fname, formDefTy = ty }) ->
((idx, gxFromByteString fname, mut), (ty, pos)))
(xpElem (gxFromString "form")
(xpTriple xpickle (xpAttr (gxFromString "name") xpText)
xpickle)
(xpPair (xpElemNodes (gxFromString "type") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
formsPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Array Formname (FormDef typetag))
formsPickler =
xpWrap (\l -> array (toEnum 0, toEnum (length l)) l, assocs)
(xpElemNodes (gxFromString "forms") (xpList xpickle))
variantTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Type typetag)
variantTypePickler =
let
revfunc VariantType { variantTyForms = forms, variantTyPos = pos } =
(forms, pos)
revfunc _ = error "Can't convert to VariantType"
in
xpWrap (\(forms, pos) -> VariantType { variantTyForms = forms,
variantTyPos = pos },
revfunc)
(xpElemNodes (gxFromString "VariantType")
(xpPair (xpElemNodes (gxFromString "forms")
formsPickler)
(xpElemNodes (gxFromString "pos") xpickle)))
arrayTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Type typetag)
arrayTypePickler =
let
revfunc ArrayType { arrayElemTy = elemty, arrayLen = len,
arrayPos = pos } = (len, (elemty, pos))
revfunc _ = error "Can't convert to ArrayType"
in
xpWrap (\(len, (elemty, pos)) -> ArrayType { arrayElemTy = elemty,
arrayLen = len,
arrayPos = pos },
revfunc)
(xpElem (gxFromString "ArrayType")
(xpOption (xpAttr (gxFromString "len") xpPrim))
(xpPair (xpElemNodes (gxFromString "elem") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
ptrTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Type typetag)
ptrTypePickler =
let
revfunc PtrType { ptrTy = ptrty, ptrPos = pos } = (ptrty, pos)
revfunc _ = error "Can't convert to PtrType"
in
xpWrap (\(ptrty, pos) -> PtrType { ptrTy = ptrty, ptrPos = pos },
revfunc)
(xpElemNodes (gxFromString "PtrType")
(xpPair (xpElemNodes (gxFromString "inner") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
intTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Type typetag)
intTypePickler =
let
revfunc IntType { intSize = size, intSigned = signed,
intIntervals = intervals, intPos = pos } =
((signed, size), (intervals, pos))
revfunc _ = error "Can't convert to IntType"
in
xpWrap (\((signed, size), (intervals, pos)) ->
IntType { intSize = size, intSigned = signed,
intIntervals = intervals, intPos = pos }, revfunc)
(xpElem (gxFromString "IntType")
(xpPair (xpAttr (gxFromString "signed") xpPrim)
(xpAttr (gxFromString "size") xpPrim))
(xpPair (xpElemNodes (gxFromString "intervals") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
floatTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Type typetag)
floatTypePickler =
let
revfunc FloatType { floatSize = size, floatPos = pos } = (size, pos)
revfunc _ = error "Can't convert to FloatType"
in
xpWrap (\(size, pos) -> FloatType { floatSize = size, floatPos = pos },
revfunc)
(xpElem (gxFromString "FloatType")
(xpAttr (gxFromString "size") xpPrim)
(xpElemNodes (gxFromString "pos") xpickle))
idTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Type typetag)
idTypePickler =
let
revfunc IdType { idName = tyname, idPos = pos } = (tyname, pos)
revfunc _ = error "Can't convert to IdType"
in
xpWrap (\(tyname, pos) -> IdType { idName = tyname, idPos = pos }, revfunc)
(xpElem (gxFromString "IdType") xpickle
(xpElemNodes (gxFromString "pos") xpickle))
unitTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Type typetag)
unitTypePickler =
let
revfunc (UnitType pos) = pos
revfunc _ = error "Can't convert to UnitType"
in
xpWrap (UnitType, revfunc)
(xpElemNodes (gxFromString "UnitType")
(xpElemNodes (gxFromString "pos") xpickle))
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
XmlPickler [NodeG [] tag text] (Type typetag) where
xpickle =
let
picker FuncType {} = 0
picker StructType {} = 1
picker VariantType {} = 2
picker ArrayType {} = 3
picker PtrType {} = 4
picker IntType {} = 5
picker FloatType {} = 6
picker IdType {} = 7
picker UnitType {} = 8
in
xpAlt picker [funcTypePickler, structTypePickler, variantTypePickler,
arrayTypePickler, ptrTypePickler, intTypePickler,
floatTypePickler, idTypePickler, unitTypePickler ]
binopPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
binopPickler =
let
revfunc Binop { binopOp = op, binopLeft = left,
binopRight = right, binopPos = pos } =
(op, (left, right, pos))
revfunc _ = error "Can't convert to Binop"
in
xpWrap (\(op, (left, right, pos)) ->
Binop { binopOp = op, binopLeft = left,
binopRight = right, binopPos = pos }, revfunc)
(xpElem (gxFromString "Binop") xpickle
(xpTriple (xpElemNodes (gxFromString "left") xpickle)
(xpElemNodes (gxFromString "right") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
callPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
callPickler =
let
revfunc Call { callFunc = func, callArgs = args, callPos = pos } =
(func, args, pos)
revfunc _ = error "Can't convert to Call"
in
xpWrap (\(func, args, pos) -> Call { callFunc = func, callArgs = args,
callPos = pos }, revfunc)
(xpElemNodes (gxFromString "Call")
(xpTriple (xpElemNodes (gxFromString "func") xpickle)
(xpElemNodes (gxFromString "args")
(xpList xpickle))
(xpElemNodes (gxFromString "pos") xpickle)))
unopPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
unopPickler =
let
revfunc Unop { unopOp = op, unopVal = val, unopPos = pos } =
(op, (val, pos))
revfunc _ = error "Can't convert to Unop"
in
xpWrap (\(op, (val, pos)) -> Unop { unopOp = op, unopVal = val,
unopPos = pos }, revfunc)
(xpElem (gxFromString "Unop") xpickle
(xpPair (xpElemNodes (gxFromString "val") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
convPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
convPickler =
let
revfunc Conv { convVal = val, convTy = ty, convPos = pos } = (val, ty, pos)
revfunc _ = error "Can't convert to Conv"
in
xpWrap (\(val, ty, pos) -> Conv { convVal = val, convTy = ty,
convPos = pos }, revfunc)
(xpElemNodes (gxFromString "Conv")
(xpTriple (xpElemNodes (gxFromString "val") xpickle)
(xpElemNodes (gxFromString "type") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
castPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
castPickler =
let
revfunc Cast { castVal = val, castTy = ty, castPos = pos } = (val, ty, pos)
revfunc _ = error "Can't convert to Cast"
in
xpWrap (\(val, ty, pos) -> Cast { castVal = val, castTy = ty,
castPos = pos }, revfunc)
(xpElemNodes (gxFromString "Cast")
(xpTriple (xpElemNodes (gxFromString "val") xpickle)
(xpElemNodes (gxFromString "type") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
addrofPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
addrofPickler =
let
revfunc AddrOf { addrofVal = val, addrofPos = pos } = (val, pos)
revfunc _ = error "Can't convert to AddrOf"
in
xpWrap (\(val, pos) -> AddrOf { addrofVal = val, addrofPos = pos },
revfunc)
(xpElemNodes (gxFromString "AddrOf")
(xpPair (xpElemNodes (gxFromString "val") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
structLitPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
structLitPickler =
let
revfunc StructLit { structLitTy = ty, structLitFields = fields,
structLitPos = pos } = (ty, fields, pos)
revfunc _ = error "Can't convert to StructLit"
fieldValsPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Array Fieldname (Exp typetag))
fieldValsPickler =
xpWrap (\l -> array (toEnum 0, toEnum (length l)) l, assocs)
(xpList (xpElem (gxFromString "field") xpickle xpickle))
in
xpWrap (\(ty, fields, pos) ->
StructLit { structLitTy = ty, structLitFields = fields,
structLitPos = pos }, revfunc)
(xpElemNodes (gxFromString "StructLit")
(xpTriple (xpElemNodes (gxFromString "ty") xpickle)
(xpElemNodes (gxFromString "fields")
fieldValsPickler)
(xpElemNodes (gxFromString "pos") xpickle)))
variantLitPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
variantLitPickler =
let
revfunc VariantLit { variantLitTy = ty, variantLitVal = val,
variantLitForm = form, variantLitPos = pos } =
(form, (ty, val, pos))
revfunc _ = error "Can't convert to VariantLit"
in
xpWrap (\(form, (ty, val, pos)) ->
VariantLit { variantLitTy = ty, variantLitVal = val,
variantLitForm = form, variantLitPos = pos }, revfunc)
(xpElem (gxFromString "VariantLit") xpickle
(xpTriple (xpElemNodes (gxFromString "ty") xpickle)
(xpElemNodes (gxFromString "val") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
arrayLitPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
arrayLitPickler =
let
revfunc ArrayLit { arrayLitTy = ty, arrayLitVals = vals,
arrayLitPos = pos } = (ty, vals, pos)
revfunc _ = error "Can't convert to ArrayLit"
in
xpWrap (\(ty, vals, pos) -> ArrayLit { arrayLitTy = ty, arrayLitVals = vals,
arrayLitPos = pos }, revfunc)
(xpElemNodes (gxFromString "ArrayLit")
(xpTriple (xpElemNodes (gxFromString "ty") xpickle)
(xpElemNodes (gxFromString "vals")
(xpList xpickle))
(xpElemNodes (gxFromString "pos") xpickle)))
intLitPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
intLitPickler =
let
revfunc IntLit { intLitTy = ty, intLitVal = val, intLitPos = pos } =
(val, (ty, pos))
revfunc _ = error "Can't convert to IntType"
in
xpWrap (\(val, (ty, pos)) -> IntLit { intLitTy = ty, intLitVal = val,
intLitPos = pos }, revfunc)
(xpElem (gxFromString "IntType")
(xpAttr (gxFromString "size") xpPrim)
(xpPair (xpElemNodes (gxFromString "type") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
lvaluePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
lvaluePickler =
let
revfunc (LValue lval) = lval
revfunc _ = error "Can't convert to LValue"
in
xpWrap (LValue, revfunc) (xpElemNodes (gxFromString "LValue") xpickle)
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
XmlPickler [NodeG [] tag text] (Exp typetag) where
xpickle =
let
picker Alloc {} = 0
picker Binop {} = 1
picker Call {} = 1
picker Unop {} = 2
picker Conv {} = 3
picker Cast {} = 4
picker AddrOf {} = 5
picker StructLit {} = 6
picker VariantLit {} = 7
picker ArrayLit {} = 8
picker IntLit {} = 9
picker LValue {} = 10
in
xpAlt picker [undefined, binopPickler, callPickler, unopPickler,
convPickler, castPickler, addrofPickler,
structLitPickler, variantLitPickler, arrayLitPickler,
intLitPickler, lvaluePickler ]
functionPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text, Graph gr,
XmlPickler [NodeG [] tag text] tagty) =>
PU [NodeG [] tag text] (Global tagty gr)
functionPickler =
let
revfunc Function { funcName = fname, funcRetTy = retty, funcValTys = valtys,
funcParams = params, funcBody = body, funcPos = pos } =
(fname, (retty, valtys, params, body, pos))
revfunc _ = error "Can't convert to Function"
valtysPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] tagty) =>
PU [NodeG [] tag text] (Array Id (Type tagty))
valtysPickler =
xpWrap (\l -> array (toEnum 0, toEnum (length l)) l, assocs)
(xpList (xpElem (gxFromString "valty") xpickle xpickle))
in
xpWrap (\(fname, (retty, valtys, params, body, pos)) ->
Function { funcName = fname, funcRetTy = retty,
funcValTys = valtys, funcParams = params,
funcBody = body, funcPos = pos }, revfunc)
(xpElem (gxFromString "Function") (xpOption xpickle)
(xp5Tuple (xpElemNodes (gxFromString "retty") xpickle)
(xpElemNodes (gxFromString "valtys") valtysPickler)
(xpElemNodes (gxFromString "params")
(xpList xpickle))
(xpOption (xpElemNodes (gxFromString "body")
xpickle))
(xpElemNodes (gxFromString "pos") xpickle)))
globalvarPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] tagty) =>
PU [NodeG [] tag text] (Global tagty gr)
globalvarPickler =
let
revfunc GlobalVar { gvarName = gname, gvarTy = ty, gvarInit = init,
gvarMutability = mut, gvarPos = pos } =
((gname, mut), (ty, init, pos))
revfunc _ = error "Can't convert to GlobalVar"
in
xpWrap (\((gname, mut), (ty, init, pos)) ->
GlobalVar { gvarName = gname, gvarTy = ty, gvarInit = init,
gvarMutability = mut, gvarPos = pos }, revfunc)
(xpElem (gxFromString "Function") (xpPair (xpOption xpickle) xpickle)
(xpTriple (xpElemNodes (gxFromString "type") xpickle)
(xpOption (xpElemNodes (gxFromString "init")
xpickle))
(xpElemNodes (gxFromString "pos") xpickle)))
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text,
Graph gr, XmlPickler [NodeG [] tag text] tagty) =>
XmlPickler [NodeG [] tag text] (Global tagty gr) where
xpickle =
let
picker Function {} = 0
picker GlobalVar {} = 1
in
xpAlt picker [functionPickler, globalvarPickler]
{-
-- This mess is a good example of what I mean about format and a
-- naming function.
instance Graph gr => Format (Module gr) where
format (Module { modName = name, modTypes = types, modGlobals = globals,
modGCHeaders = gcheaders, modGenGCs = gcgen }) =
let
-- These functions here cause a heap of trouble. We want to
-- look up actual names, so they have to get moved inside the
-- format module call. This propogates downward and winds up
-- dragging almost everything inside.
formatTypename :: Typename -> Doc
formatTypename ty = "%" <> fst (types ! ty)
formatGCHeader :: GCHeader -> Doc
formatGCHeader hdr =
let
(ty, mut, mob) = gcheaders ! hdr
in
mut <+> mob <+> fst (types ! ty)
formatGlobalname :: Globalname -> Doc
formatGlobalname fname = "@" <>
(case globals ! fname of
Function { funcName = funcname } -> funcname
GlobalVar { gvarName = gvarname } -> gvarname)
formatType :: Type -> Doc
formatType (FuncType retty params) =
parenList (formatType retty) (map formatType params)
formatType (StructType packed fields) =
let
mapfun (str, mut, ty) =
mut <+> str <+> colon <+> formatType ty
fielddocs =
nest 2 (sep (punctuate comma (map mapfun (elems fields))))
in
if packed
then sep [format "<{", fielddocs, format "}>"]
else sep [lbrace, fielddocs, rbrace ]
formatType (PtrType (Native inner)) = formatType inner <> "*"
formatType (PtrType (GC ptrclass hdr)) =
ptrclass <+> formatGCHeader hdr
formatType (ArrayType (Just size) inner) =
formatType inner <> brackets size
formatType (ArrayType Nothing inner) = formatType inner <> "[]"
formatType (IntType True size) = "i" <> size
formatType (IntType False size) = "ui" <> size
formatType (IdType i) = formatTypename i
formatType (FloatType size) = format "f" <> size
formatType UnitType = format "unit"
formatExp (Call f args) =
parenList (formatExp f) (map formatExp args)
formatExp (GCAlloc header Nothing Nothing) =
"gcalloc" <+> formatGCHeader header
formatExp (GCAlloc header (Just size) Nothing) =
"gcalloc" <+> formatGCHeader header <+> brackets (formatExp size)
formatExp (GCAlloc header Nothing (Just gen)) =
"gcalloc" <+> formatGCHeader header <+> "gen" <+> formatExp gen
formatExp (GCAlloc header (Just size) (Just gen)) =
"gcalloc" <+> formatGCHeader header <+>
brackets (formatExp size) <+> "gen" <+> formatExp gen
formatExp (Binop op l r) =
parens (sep [ format op, formatExp l <> comma, formatExp r ])
formatExp (Unop op e) = parens (hang (format op) 2 (formatExp e))
formatExp (Conv ty inner) =
parens (sep [ format "conv", formatExp inner,
format "to", formatType ty ])
formatExp (Cast ty inner) =
parens (sep [ format "cast", formatExp inner,
format "to", formatType ty ])
formatExp (AddrOf l) = "addrof" <+> formatLValue l
formatExp (LValue l) = formatLValue l
formatExp (StructLit ty fields) =
let
headerdoc = "const" <+> formatType ty
in
braceBlock headerdoc (punctuate comma (map formatExp (elems fields)))
formatExp (ArrayLit ty inits) =
let
headerdoc = "const" <+> formatType ty
in
braceBlock headerdoc (punctuate comma (map formatExp inits))
formatExp (IntLit ty n) = hang (formatType ty) 2 (format n)
formatLValue :: LValue -> Doc
formatLValue (Deref e) = "*" <+> formatExp e
formatLValue (Index e i) = formatExp e <+> brackets (formatExp i)
formatLValue (Field (LValue (Deref e)) field) =
formatExp e <> "->" <> field
formatLValue (Field e field) = formatExp e <> "." <> field
formatLValue (Global g) = formatGlobalname g
formatLValue (Var v) = format v
formatStm :: Stm -> Doc
formatStm (Move dst src) =
formatLValue dst <+> "<-" <+> formatExp src
formatStm (Do e) = formatExp e
formatTransfer :: Transfer -> Doc
formatTransfer (Goto l) = "goto" <+> l
formatTransfer (Case e cases def) =
let
mapfun (i, l) = i <> colon <+> l
in
braceBlock ("case" <+> formatExp e)
(("default" <> colon <+> def) : map mapfun cases)
formatTransfer (Ret (Just e)) = "ret" <+> formatExp e
formatTransfer (Ret Nothing) = format "ret"
formatTransfer Unreachable = format "unreachable"
formatBlock (Block stms trans) =
vcat ((map formatStm stms) ++ [formatTransfer trans])
formatGlobal :: Graph gr => Global gr -> Doc
formatGlobal (Function { funcName = fname, funcRetTy = retty,
funcParams = argnames, funcValTys = vartypes,
funcBody = body }) =
let
argfun i = i <+> colon <+> formatType (vartypes ! i)
varfun (i, ty) = i <+> colon <+> formatType ty
header = parenList ("function" <+> fname) (map argfun argnames)
vardocs = map varfun (assocs vartypes)
fcontent =
case body of
Just (Body (Label entry) graph) ->
let
getnode = fromJust . lab graph
blockfun node =
("L" <> node <> colon) $$
nest 2 (formatBlock (getnode node))
in
vardocs ++ (map blockfun (dfs [entry] graph))
Nothing -> vardocs
in
braceBlock (header <+> colon <+> formatType retty) fcontent
formatGlobal (GlobalVar { gvarName = gname, gvarTy = ty,
gvarInit = Just body }) =
hang (hang ("global" <+> formatType ty) 2 gname) 2 (formatExp body)
formatGlobal (GlobalVar { gvarName = gname, gvarTy = ty,
gvarInit = Nothing }) =
hang ("global" <+> formatType ty) 2 gname
typefunc (tyname, Just ty) =
hang ("type" <+> tyname <+> equals) 2 (formatType ty)
typefunc (tyname, Nothing) = "type" <+> tyname
gcheaderfunc (GCHeader ind, (ty, mob, mut)) =
"gc_header_" <> ind <+> equals <+> mut <+> mob <+> fst (types ! ty)
gcgenfunc hdr = "gen" <+> formatGCHeader hdr
typesdocs = map typefunc (elems types)
gchdrdocs = map gcheaderfunc (assocs gcheaders)
gcgendocs = map gcgenfunc gcgen
globalsdocs = map formatGlobal (elems globals)
content = typesdocs ++ (space : gchdrdocs) ++
(space : gcgendocs) ++ (space : globalsdocs)
in
braceBlock ("module" <+> name) content
instance Graph gr => Show (Module gr) where
show = show . format
-}
| emc2/chill | src/IR/FlatIR/Syntax.hs | bsd-3-clause | 60,607 | 28 | 17 | 18,793 | 14,673 | 8,055 | 6,618 | 1,018 | 2 |
{-# OPTIONS #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.Py.ParserMonad
-- Copyright : (c) 2009 Bernie Pope
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : ghc
--
-- Monad support for Python parser and lexer.
-----------------------------------------------------------------------------
module Language.Py.ParserMonad
( P
, execParser
, execParserKeepComments
, runParser
, thenP
, returnP
, setLocation
, getLocation
, getInput
, setInput
, getLastToken
, setLastToken
, setLastEOL
, getLastEOL
, ParseError (..)
, ParseState (..)
, initialState
, pushStartCode
, popStartCode
, getStartCode
, getIndent
, pushIndent
, popIndent
, getIndentStackDepth
, getParen
, pushParen
, popParen
, getParenStackDepth
, addComment
, getComments
, spanError
) where
import Language.Py.SrcLocation (SrcLocation (..), SrcSpan (..), Span (..))
import Language.Py.Token (Token (..))
import Language.Py.ParseError (ParseError (..))
import Control.Applicative ((<$>))
import Control.Monad.State.Class
import Control.Monad.State.Strict as State
import Control.Monad.Error as Error
import Control.Monad.Error.Class
import Control.Monad.Identity as Identity
import Control.Monad.Trans as Trans
import Language.Py.Pretty
internalError :: String -> P a
internalError = throwError . StrError
spanError :: Span a => a -> String -> P b
spanError x str = throwError $ StrError $ unwords [prettyText $ getSpan x, str]
data ParseState = ParseState
{ location :: !SrcLocation -- position at current input location
, input :: !String -- the current input
, previousToken :: !Token -- the previous token
, startCodeStack :: [Int] -- a stack of start codes for the state of the lexer
, indentStack :: [Int] -- a stack of source column positions of indentation levels
, parenStack :: [Token] -- a stack of parens and brackets for indentation handling
, lastEOL :: !SrcSpan -- location of the most recent end-of-line encountered
, comments :: [Token] -- accumulated comments
} deriving Show
initToken :: Token
initToken = NewlineToken SpanEmpty
initialState :: SrcLocation -> String -> [Int] -> ParseState
initialState initLoc inp scStack = ParseState
{ location = initLoc
, input = inp
, previousToken = initToken
, startCodeStack = scStack
, indentStack = [1]
, parenStack = []
, lastEOL = SpanEmpty
, comments = []
}
type P a = StateT ParseState (Either ParseError) a
execParser :: P a -> ParseState -> Either ParseError a
execParser = evalStateT
execParserKeepComments :: P a -> ParseState -> Either ParseError (a, [Token])
execParserKeepComments parser
= evalStateT (parser >>= \x -> getComments >>= \c -> return (x, c))
runParser :: P a -> ParseState -> Either ParseError (a, ParseState)
runParser = runStateT
{-# INLINE returnP #-}
returnP :: a -> P a
returnP = return
{-# INLINE thenP #-}
thenP :: P a -> (a -> P b) -> P b
thenP = (>>=)
{-
failP :: SrcSpan -> [String] -> P a
failP span strs = throwError (prettyText span ++ ": " ++ unwords strs)
-}
setLastEOL :: SrcSpan -> P ()
setLastEOL span = modify $ \s -> s { lastEOL = span }
getLastEOL :: P SrcSpan
getLastEOL = gets lastEOL
setLocation :: SrcLocation -> P ()
setLocation loc = modify $ \s -> s { location = loc }
getLocation :: P SrcLocation
getLocation = gets location
getInput :: P String
getInput = gets input
setInput :: String -> P ()
setInput inp = modify $ \s -> s { input = inp }
getLastToken :: P Token
getLastToken = gets previousToken
setLastToken :: Token -> P ()
setLastToken tok = modify $ \s -> s { previousToken = tok }
pushStartCode :: Int -> P ()
pushStartCode code = do
oldStack <- gets startCodeStack
modify $ \s -> s { startCodeStack = code : oldStack }
popStartCode :: P ()
popStartCode = do
oldStack <- gets startCodeStack
case oldStack of
[] -> internalError "fatal error in lexer: attempt to pop empty start code stack"
_:rest -> modify $ \s -> s { startCodeStack = rest }
getStartCode :: P Int
getStartCode = do
oldStack <- gets startCodeStack
case oldStack of
[] -> internalError "fatal error in lexer: start code stack empty on getStartCode"
code:_ -> return code
pushIndent :: Int -> P ()
pushIndent indent = do
oldStack <- gets indentStack
modify $ \s -> s { indentStack = indent : oldStack }
popIndent :: P ()
popIndent = do
oldStack <- gets indentStack
case oldStack of
[] -> internalError "fatal error in lexer: attempt to pop empty indentation stack"
_:rest -> modify $ \s -> s { indentStack = rest }
getIndent :: P Int
getIndent = do
oldStack <- gets indentStack
case oldStack of
[] -> internalError "fatal error in lexer: indent stack empty on getIndent"
indent:_ -> return indent
getIndentStackDepth :: P Int
getIndentStackDepth = gets (length . indentStack)
pushParen :: Token -> P ()
pushParen symbol = do
oldStack <- gets parenStack
modify $ \s -> s { parenStack = symbol : oldStack }
popParen :: P ()
popParen = do
oldStack <- gets parenStack
case oldStack of
[] -> internalError "fatal error in lexer: attempt to pop empty paren stack"
_:rest -> modify $ \s -> s { parenStack = rest }
getParen :: P (Maybe Token)
getParen = do
oldStack <- gets parenStack
case oldStack of
[] -> return Nothing
symbol:_ -> return $ Just symbol
getParenStackDepth :: P Int
getParenStackDepth = gets (length . parenStack)
addComment :: Token -> P ()
addComment c = do
oldComments <- gets comments
modify $ \s -> s { comments = c : oldComments }
getComments :: P [Token]
getComments = reverse <$> gets comments
| codeq/language-py | src/Language/Py/ParserMonad.hs | bsd-3-clause | 5,757 | 0 | 13 | 1,169 | 1,610 | 887 | 723 | 166 | 2 |
-- |
-- Module : Crypto.PubKey.DSA
-- License : BSD-style
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : experimental
-- Portability : Good
--
-- An implementation of the Digital Signature Algorithm (DSA)
{-# LANGUAGE DeriveDataTypeable #-}
module Crypto.PubKey.DSA
( Params(..)
, Signature(..)
, PublicKey(..)
, PrivateKey(..)
, PublicNumber
, PrivateNumber
-- * Generation
, generatePrivate
, calculatePublic
-- * Signature primitive
, sign
, signWith
-- * Verification primitive
, verify
-- * Key pair
, KeyPair(..)
, toPublicKey
, toPrivateKey
) where
import Crypto.Random.Types
import Data.Bits (testBit)
import Data.Data
import Data.Maybe
import Crypto.Number.Basic (numBits)
import Crypto.Number.ModArithmetic (expFast, expSafe, inverse)
import Crypto.Number.Serialize
import Crypto.Number.Generate
import Crypto.Internal.ByteArray (ByteArrayAccess(length), convert, index, dropView, takeView)
import Crypto.Internal.Imports
import Crypto.Hash
import Prelude hiding (length)
-- | DSA Public Number, usually embedded in DSA Public Key
type PublicNumber = Integer
-- | DSA Private Number, usually embedded in DSA Private Key
type PrivateNumber = Integer
-- | Represent DSA parameters namely P, G, and Q.
data Params = Params
{ params_p :: Integer -- ^ DSA p
, params_g :: Integer -- ^ DSA g
, params_q :: Integer -- ^ DSA q
} deriving (Show,Read,Eq,Data,Typeable)
instance NFData Params where
rnf (Params p g q) = p `seq` g `seq` q `seq` ()
-- | Represent a DSA signature namely R and S.
data Signature = Signature
{ sign_r :: Integer -- ^ DSA r
, sign_s :: Integer -- ^ DSA s
} deriving (Show,Read,Eq,Data,Typeable)
instance NFData Signature where
rnf (Signature r s) = r `seq` s `seq` ()
-- | Represent a DSA public key.
data PublicKey = PublicKey
{ public_params :: Params -- ^ DSA parameters
, public_y :: PublicNumber -- ^ DSA public Y
} deriving (Show,Read,Eq,Data,Typeable)
instance NFData PublicKey where
rnf (PublicKey params y) = y `seq` params `seq` ()
-- | Represent a DSA private key.
--
-- Only x need to be secret.
-- the DSA parameters are publicly shared with the other side.
data PrivateKey = PrivateKey
{ private_params :: Params -- ^ DSA parameters
, private_x :: PrivateNumber -- ^ DSA private X
} deriving (Show,Read,Eq,Data,Typeable)
instance NFData PrivateKey where
rnf (PrivateKey params x) = x `seq` params `seq` ()
-- | Represent a DSA key pair
data KeyPair = KeyPair Params PublicNumber PrivateNumber
deriving (Show,Read,Eq,Data,Typeable)
instance NFData KeyPair where
rnf (KeyPair params y x) = x `seq` y `seq` params `seq` ()
-- | Public key of a DSA Key pair
toPublicKey :: KeyPair -> PublicKey
toPublicKey (KeyPair params pub _) = PublicKey params pub
-- | Private key of a DSA Key pair
toPrivateKey :: KeyPair -> PrivateKey
toPrivateKey (KeyPair params _ priv) = PrivateKey params priv
-- | generate a private number with no specific property
-- this number is usually called X in DSA text.
generatePrivate :: MonadRandom m => Params -> m PrivateNumber
generatePrivate (Params _ _ q) = generateMax q
-- | Calculate the public number from the parameters and the private key
calculatePublic :: Params -> PrivateNumber -> PublicNumber
calculatePublic (Params p g _) x = expSafe g x p
-- | sign message using the private key and an explicit k number.
signWith :: (ByteArrayAccess msg, HashAlgorithm hash)
=> Integer -- ^ k random number
-> PrivateKey -- ^ private key
-> hash -- ^ hash function
-> msg -- ^ message to sign
-> Maybe Signature
signWith k pk hashAlg msg
| r == 0 || s == 0 = Nothing
| otherwise = Just $ Signature r s
where -- parameters
(Params p g q) = private_params pk
x = private_x pk
-- compute r,s
kInv = fromJust $ inverse k q
hm = os2ip $ hashWith hashAlg msg
r = expSafe g k p `mod` q
s = (kInv * (hm + x * r)) `mod` q
-- | sign message using the private key.
sign :: (ByteArrayAccess msg, HashAlgorithm hash, MonadRandom m) => PrivateKey -> hash -> msg -> m Signature
sign pk hashAlg msg = do
k <- generateMax q
case signWith k pk hashAlg msg of
Nothing -> sign pk hashAlg msg
Just sig -> return sig
where
(Params _ _ q) = private_params pk
-- | verify a bytestring using the public key.
verify :: (ByteArrayAccess msg, HashAlgorithm hash) => hash -> PublicKey -> Signature -> msg -> Bool
verify hashAlg pk (Signature r s) m
-- Reject the signature if either 0 < r < q or 0 < s < q is not satisfied.
| r <= 0 || r >= q || s <= 0 || s >= q = False
| otherwise = v == r
where (Params p g q) = public_params pk
y = public_y pk
hm = os2ip . truncateHash $ hashWith hashAlg m
w = fromJust $ inverse s q
u1 = (hm*w) `mod` q
u2 = (r*w) `mod` q
v = ((expFast g u1 p) * (expFast y u2 p)) `mod` p `mod` q
-- if the hash is larger than the size of q, truncate it; FIXME: deal with the case of a q not evenly divisible by 8
truncateHash h = if numBits (os2ip h) > numBits q then takeView h (numBits q `div` 8) else dropView h 0
| tekul/cryptonite | Crypto/PubKey/DSA.hs | bsd-3-clause | 5,630 | 0 | 14 | 1,598 | 1,430 | 800 | 630 | 101 | 2 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | Provide ability to upload tarballs to Hackage.
module Stack.Upload
( -- * Upload
upload
, uploadBytes
, uploadRevision
-- * Credentials
, HackageCreds
, loadCreds
) where
import Stack.Prelude
import Data.Aeson (FromJSON (..),
ToJSON (..),
decode', encode,
object, withObject,
(.:), (.=))
import qualified Data.ByteString.Char8 as S
import qualified Data.ByteString.Lazy as L
import qualified Data.Conduit.Binary as CB
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
import qualified Data.Text.IO as TIO
import Network.HTTP.Client (Response,
RequestBody(RequestBodyLBS),
Request)
import Network.HTTP.Simple (withResponse,
getResponseStatusCode,
getResponseBody,
setRequestHeader,
parseRequest,
httpNoBody)
import Network.HTTP.Client.MultipartFormData (formDataBody, partFileRequestBody,
partBS, partLBS)
import Network.HTTP.Client.TLS (getGlobalManager,
applyDigestAuth,
displayDigestAuthException)
import Stack.Types.Config
import Stack.Types.PackageIdentifier (PackageIdentifier, packageIdentifierString,
packageIdentifierName)
import Stack.Types.PackageName (packageNameString)
import System.Directory (createDirectoryIfMissing,
removeFile)
import System.FilePath ((</>), takeFileName)
import System.IO (hFlush, stdout, putStrLn, putStr, getLine, print) -- TODO remove putStrLn, use logInfo
import System.IO.Echo (withoutInputEcho)
-- | Username and password to log into Hackage.
--
-- Since 0.1.0.0
data HackageCreds = HackageCreds
{ hcUsername :: !Text
, hcPassword :: !Text
, hcCredsFile :: !FilePath
}
deriving Show
instance ToJSON HackageCreds where
toJSON (HackageCreds u p _) = object
[ "username" .= u
, "password" .= p
]
instance FromJSON (FilePath -> HackageCreds) where
parseJSON = withObject "HackageCreds" $ \o -> HackageCreds
<$> o .: "username"
<*> o .: "password"
-- | Load Hackage credentials, either from a save file or the command
-- line.
--
-- Since 0.1.0.0
loadCreds :: Config -> IO HackageCreds
loadCreds config = do
fp <- credsFile config
elbs <- tryIO $ L.readFile fp
case either (const Nothing) Just elbs >>= decode' of
Nothing -> fromPrompt fp
Just mkCreds -> do
unless (configSaveHackageCreds config) $ do
putStrLn "WARNING: You've set save-hackage-creds to false"
putStrLn "However, credentials were found at:"
putStrLn $ " " ++ fp
return $ mkCreds fp
where
fromPrompt fp = do
putStr "Hackage username: "
hFlush stdout
username <- TIO.getLine
password <- promptPassword
let hc = HackageCreds
{ hcUsername = username
, hcPassword = password
, hcCredsFile = fp
}
when (configSaveHackageCreds config) $ do
let prompt = "Save hackage credentials to file at " ++ fp ++ " [y/n]? "
putStr prompt
input <- loopPrompt prompt
putStrLn "NOTE: Avoid this prompt in the future by using: save-hackage-creds: false"
hFlush stdout
case input of
"y" -> do
L.writeFile fp (encode hc)
putStrLn "Saved!"
hFlush stdout
_ -> return ()
return hc
loopPrompt :: String -> IO String
loopPrompt p = do
input <- TIO.getLine
case input of
"y" -> return "y"
"n" -> return "n"
_ -> do
putStr p
loopPrompt p
credsFile :: Config -> IO FilePath
credsFile config = do
let dir = toFilePath (configStackRoot config) </> "upload"
createDirectoryIfMissing True dir
return $ dir </> "credentials.json"
-- | Lifted from cabal-install, Distribution.Client.Upload
promptPassword :: IO Text
promptPassword = do
putStr "Hackage password: "
hFlush stdout
-- save/restore the terminal echoing status (no echoing for entering the password)
passwd <- withoutInputEcho $ fmap T.pack getLine
putStrLn ""
return passwd
applyCreds :: HackageCreds -> Request -> IO Request
applyCreds creds req0 = do
manager <- getGlobalManager
ereq <- applyDigestAuth
(encodeUtf8 $ hcUsername creds)
(encodeUtf8 $ hcPassword creds)
req0
manager
case ereq of
Left e -> do
putStrLn "WARNING: No HTTP digest prompt found, this will probably fail"
case fromException e of
Just e' -> putStrLn $ displayDigestAuthException e'
Nothing -> print e
return req0
Right req -> return req
-- | Upload a single tarball with the given @Uploader@. Instead of
-- sending a file like 'upload', this sends a lazy bytestring.
--
-- Since 0.1.2.1
uploadBytes :: HackageCreds
-> String -- ^ tar file name
-> L.ByteString -- ^ tar file contents
-> IO ()
uploadBytes creds tarName bytes = do
let req1 = setRequestHeader "Accept" ["text/plain"]
"https://hackage.haskell.org/packages/"
formData = [partFileRequestBody "package" tarName (RequestBodyLBS bytes)]
req2 <- formDataBody formData req1
req3 <- applyCreds creds req2
putStr $ "Uploading " ++ tarName ++ "... "
hFlush stdout
withResponse req3 $ \res ->
case getResponseStatusCode res of
200 -> putStrLn "done!"
401 -> do
putStrLn "authentication failure"
handleIO (const $ return ()) (removeFile (hcCredsFile creds))
throwString "Authentication failure uploading to server"
403 -> do
putStrLn "forbidden upload"
putStrLn "Usually means: you've already uploaded this package/version combination"
putStrLn "Ignoring error and continuing, full message from Hackage below:\n"
printBody res
503 -> do
putStrLn "service unavailable"
putStrLn "This error some times gets sent even though the upload succeeded"
putStrLn "Check on Hackage to see if your pacakge is present"
printBody res
code -> do
putStrLn $ "unhandled status code: " ++ show code
printBody res
throwString $ "Upload failed on " ++ tarName
printBody :: Response (ConduitM () S.ByteString IO ()) -> IO ()
printBody res = runConduit $ getResponseBody res .| CB.sinkHandle stdout
-- | Upload a single tarball with the given @Uploader@.
--
-- Since 0.1.0.0
upload :: HackageCreds -> FilePath -> IO ()
upload creds fp = uploadBytes creds (takeFileName fp) =<< L.readFile fp
uploadRevision :: HackageCreds
-> PackageIdentifier
-> L.ByteString
-> IO ()
uploadRevision creds ident cabalFile = do
req0 <- parseRequest $ concat
[ "https://hackage.haskell.org/package/"
, packageIdentifierString ident
, "/"
, packageNameString $ packageIdentifierName ident
, ".cabal/edit"
]
req1 <- formDataBody
[ partLBS "cabalfile" cabalFile
, partBS "publish" "on"
]
req0
req2 <- applyCreds creds req1
void $ httpNoBody req2
| MichielDerhaeg/stack | src/Stack/Upload.hs | bsd-3-clause | 8,635 | 0 | 19 | 3,263 | 1,647 | 833 | 814 | 191 | 5 |
module Models where
import Data.Monoid
import Language.Haskell.TH
import qualified Data.Text as Text
import Database.Persist.Quasi
import Database.Persist.Quasi.Internal
import Database.Persist.TH
import Database.Persist.Sql
-- TODO: we use lookupName and reify etc which breaks in IO. somehow need to
-- test this out elsewise
mkPersist' :: [UnboundEntityDef] -> IO [Dec]
mkPersist' = runQ . mkPersist sqlSettings
parseReferences' :: String -> IO Exp
parseReferences' = runQ . parseReferencesQ
parseReferencesQ :: String -> Q Exp
parseReferencesQ = parseReferences lowerCaseSettings . Text.pack
-- | # of models, # of fields
mkModels :: Int -> Int -> String
mkModels = mkModelsWithFieldModifier id
mkNullableModels :: Int -> Int -> String
mkNullableModels = mkModelsWithFieldModifier maybeFields
mkModelsWithFieldModifier :: (String -> String) -> Int -> Int -> String
mkModelsWithFieldModifier k i f =
unlines . fmap unlines . take i . map mkModel . zip [0..] . cycle $
[ "Model"
, "Foobar"
, "User"
, "King"
, "Queen"
, "Dog"
, "Cat"
]
where
mkModel :: (Int, String) -> [String]
mkModel (i', m) =
(m <> show i') : indent 4 (map k (mkFields f))
indent :: Int -> [String] -> [String]
indent i = map (replicate i ' ' ++)
mkFields :: Int -> [String]
mkFields i = take i $ map mkField $ zip [0..] $ cycle
[ "Bool"
, "Int"
, "String"
, "Double"
, "Text"
]
where
mkField :: (Int, String) -> String
mkField (i', typ) = "field" <> show i' <> "\t\t" <> typ
maybeFields :: String -> String
maybeFields = (++ " Maybe")
| yesodweb/persistent | persistent/bench/Models.hs | mit | 1,640 | 0 | 12 | 379 | 498 | 275 | 223 | 44 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeOperators #-}
module DTypes.Classes.DTraversable
( DTraversable (..)
, dtraverse'
, dsequenceA'
, dtoList
) where
import DTypes.Classes.DFunctor
import DTypes.Compose
import DTypes.Trafo
import Data.Functor.Identity (Identity (..))
#if MIN_VERSION_base(4,8,0)
import Control.Applicative (Const (..))
#else
import Control.Applicative (Applicative (..), (<$>), Const (..))
import Data.Traversable (Traversable (..))
#endif
class DFunctor d => DTraversable (d :: (k -> *) -> *) where
{-# MINIMAL dtraverse | dsequenceA #-}
dtraverse :: Applicative g => (f ==> Compose g h) -> d f -> g (d h)
dtraverse f = dsequenceA . dfmap f
dsequenceA :: Applicative g => d (Compose g h) -> g (d h)
dsequenceA = dtraverse id
-- TODO: more functions
instance (Traversable f, DTraversable d) => DTraversable (Compose f d) where
dsequenceA (Compose x) = Compose <$> traverse dsequenceA x
dtraverse' :: (DTraversable d, Applicative g) => (f ==> g) -> d f -> g (d Identity)
dtraverse' f = dtraverse (Compose . fmap Identity . f)
dsequenceA' :: (DTraversable d, Applicative f) => d f -> f (d Identity)
dsequenceA' = dsequenceA . dfmap (Compose . fmap Identity)
dtoList
:: DTraversable (d :: (* -> *) -> *)
=> d (Const a)
-> [a]
dtoList = getConst . dtraverse' (Const . (:[]) . getConst) -- robot monkey!
{-
dFoldMap
:: (Monoid m, DTraversable d)
=> (forall a. f a -> m) -> d f -> m
dFoldMap f = getConst . dTraverse (Const . f)
dFold
:: (Monoid m, DTraversable d)
=> d (Const m) -> m
dFold = getConst . dSequenceA
dToList
:: DTraversable d
=> d (Const a) -> [a]
dToList = dFoldMap (pure . getConst)
-}
| timjb/ftypes | src/DTypes/Classes/DTraversable.hs | mit | 1,725 | 0 | 12 | 336 | 475 | 255 | 220 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
module Tinc.Cache (
Cache
, CachedPackage(..)
, readCache
, findReusablePackages
, cachedExecutables
, populateCache
#ifdef TEST
, PopulateCacheAction(..)
, populateCacheAction
, PackageLocation(..)
, readPackageGraph
, readAddSourceHashes
, addAddSourceHashes
, listSandboxes
#endif
) where
import Control.Monad.Catch
import Control.Monad
import Control.Monad.IO.Class
import Data.List
import qualified Data.Map as Map
import Data.Yaml
import System.Directory hiding (getDirectoryContents, withCurrentDirectory)
import System.FilePath
import System.IO.Temp
import Data.Function
import Tinc.Fail
import Tinc.GhcInfo
import Tinc.GhcPkg
import Tinc.Package
import Tinc.PackageGraph
import Tinc.Process
import Tinc.Sandbox
import Tinc.SourceDependency
import Tinc.Types
import Util
data CachedPackage = CachedPackage {
cachedPackageName :: Package
, cachedPackageConfig :: Path PackageConfig
} deriving (Eq, Show)
cachedExecutables :: CachedPackage -> IO [FilePath]
cachedExecutables (CachedPackage package (Path config)) = do
exists <- doesDirectoryExist binDir
if exists
then listDirectoryContents binDir >>= mapM canonicalizePath
else return []
where
binDir = dropFileName config </> ".." </> "bin" </> showPackage package
findReusablePackages :: Cache -> [Package] -> [CachedPackage]
findReusablePackages (Cache globalPackages packageGraphs) installPlan = reusablePackages
where
reusablePackages :: [CachedPackage]
reusablePackages = nubBy ((==) `on` cachedPackageName) (concatMap findReusable packageGraphs)
findReusable :: PackageGraph PackageLocation -> [CachedPackage]
findReusable packageGraph =
[CachedPackage p c | (p, PackageConfig c) <- calculateReusablePackages packages packageGraph]
where
packages = nubBy ((==) `on` packageName) (installPlan ++ map fromSimplePackage globalPackages)
data Cache = Cache {
_cacheGlobalPackages :: [SimplePackage]
, _cachePackageGraphs :: [PackageGraph PackageLocation]
}
data PackageLocation = GlobalPackage | PackageConfig (Path PackageConfig)
deriving (Eq, Ord, Show)
fromSimplePackage :: SimplePackage -> Package
fromSimplePackage (SimplePackage name version) = Package name (Version version Nothing)
readPackageGraph :: (MonadIO m, Fail m, GhcPkg m) => [SimplePackage] -> Path PackageDb -> Path PackageDb -> m (PackageGraph PackageLocation)
readPackageGraph globalPackages globalPackageDb packageDb = do
packageConfigs <- liftIO $ cachedListPackages packageDb
let globalValues = map (, GlobalPackage) globalPackages
let values = map (fmap PackageConfig) packageConfigs
dot <- readDotFile
fromDot (globalValues ++ values) dot >>= liftIO . addAddSourceHashes packageDb
where
dotFile = path packageDb </> "packages.dot"
readDotFile = do
cachedIOAfter (liftIO $ touchPackageCache packageDb) dotFile $ do
readGhcPkg [globalPackageDb, packageDb] ["dot"]
addSourceHashesFile :: FilePath
addSourceHashesFile = "add-source.yaml"
readAddSourceHashes :: Path PackageDb -> IO [SourceDependency]
readAddSourceHashes packageDb = do
let file = path packageDb </> addSourceHashesFile
exists <- doesFileExist file
if exists
then decodeFileEither file >>= either (dieLoc . show) return
else return []
writeAddSourceHashes :: Path PackageDb -> [SourceDependency] -> IO ()
writeAddSourceHashes packageDb addSourceHashes
| null addSourceHashes = return ()
| otherwise = do
encodeFile (path packageDb </> addSourceHashesFile) addSourceHashes
touchPackageCache packageDb
addAddSourceHash :: Map.Map String String -> SimplePackage -> PackageLocation -> Package
addAddSourceHash hashes (SimplePackage name version) location = case location of
PackageConfig _ -> maybe package (\ hash -> Package name (Version version $ Just hash)) (Map.lookup (packageName package) hashes)
GlobalPackage -> package
where
package = Package name (Version version Nothing)
addAddSourceHashes :: Path PackageDb -> SimplePackageGraph PackageLocation -> IO (PackageGraph PackageLocation)
addAddSourceHashes packageDb graph = do
hashes <- mkMap <$> readAddSourceHashes packageDb
return $ mapIndex (addAddSourceHash hashes) graph
where
mkMap :: [SourceDependency] -> Map.Map String String
mkMap hashes = Map.fromList (map (\ (SourceDependency name hash) -> (name, hash)) hashes)
readCache :: GhcInfo -> Path CacheDir -> IO Cache
readCache ghcInfo cacheDir = do
globalPackages <- listGlobalPackages
sandboxes <- listSandboxes cacheDir
cache <- forM sandboxes $ \ sandbox -> do
packageDbPath <- findPackageDb sandbox
readPackageGraph globalPackages (ghcInfoGlobalPackageDb ghcInfo) packageDbPath
return (Cache globalPackages cache)
validMarker :: FilePath
validMarker = "tinc.valid.v3"
listSandboxes :: Path CacheDir -> IO [Path Sandbox]
listSandboxes (Path cacheDir) = map Path <$> listEntries
where
isValidCacheEntry :: FilePath -> IO Bool
isValidCacheEntry p = doesFileExist (p </> validMarker)
listEntries :: IO [FilePath]
listEntries = listDirectories cacheDir >>= filterM isValidCacheEntry
data PopulateCacheAction = PopulateCacheAction {
populateCacheActionInstallPlan :: [Package]
, populateCacheActionAddSource :: [Path SourceDependency]
, populateCacheActionWriteAddSourceHashes :: [SourceDependency]
} deriving (Eq, Show)
populateCacheAction :: Path SourceDependencyCache -> [Package] -> [CachedPackage] -> Either [CachedPackage] PopulateCacheAction
populateCacheAction sourceDependencyCache missing reusable
| null missing = Left reusable
| otherwise = Right PopulateCacheAction {
populateCacheActionInstallPlan = installPlan
, populateCacheActionAddSource = addSource
, populateCacheActionWriteAddSourceHashes = [SourceDependency name hash | Package name (Version _ (Just hash)) <- (missing ++ map cachedPackageName reusable)]
}
where
installPlan :: [Package]
installPlan = missing ++ [p | p@(Package _ (Version _ Nothing)) <- map cachedPackageName reusable]
addSource :: [Path SourceDependency]
addSource = map (sourceDependencyPath sourceDependencyCache) [SourceDependency name hash | Package name (Version _ (Just hash)) <- missing]
populateCache :: (MonadIO m, MonadMask m, Fail m, MonadProcess m) => Path CacheDir -> Path SourceDependencyCache -> [Package] -> [CachedPackage] -> m [CachedPackage]
populateCache cacheDir sourceDependencyCache missing reusable = either return populate (populateCacheAction sourceDependencyCache missing reusable)
where
populate PopulateCacheAction{..} = do
sandbox <- liftIO $ newCacheEntry cacheDir
withCurrentDirectory sandbox $ do
packageDb <- initSandbox populateCacheActionAddSource (map cachedPackageConfig reusable)
liftIO $ do
writeAddSourceHashes packageDb populateCacheActionWriteAddSourceHashes
writeFile validMarker ""
callProcessM "cabal" ("v1-install" : "--bindir=$prefix/bin/$pkgid" : map showPackage populateCacheActionInstallPlan)
map (uncurry CachedPackage)
. ignore_add_source_hashes_for_now_as_we_currently_do_not_need_them
<$> cachedListPackages packageDb
ignore_add_source_hashes_for_now_as_we_currently_do_not_need_them = map (\ (a, b) -> (fromSimplePackage a, b))
newCacheEntry :: Path CacheDir -> IO FilePath
newCacheEntry cacheDir = do
basename <- takeBaseName <$> getCurrentDirectory
createTempDirectory (path cacheDir) (basename ++ "-")
| haskell-tinc/tinc | src/Tinc/Cache.hs | bsd-3-clause | 7,770 | 0 | 16 | 1,363 | 2,063 | 1,057 | 1,006 | 144 | 2 |
{-# LANGUAGE GeneralizedNewtypeDeriving, ConstraintKinds, PatternGuards, TupleSections #-}
module Idris.ParseExpr where
import Prelude hiding (pi)
import Text.Trifecta.Delta
import Text.Trifecta hiding (span, stringLiteral, charLiteral, natural, symbol, char, string, whiteSpace, Err)
import Text.Parser.LookAhead
import Text.Parser.Expression
import qualified Text.Parser.Token as Tok
import qualified Text.Parser.Char as Chr
import qualified Text.Parser.Token.Highlight as Hi
import Idris.AbsSyntax
import Idris.ParseHelpers
import Idris.ParseOps
import Idris.DSL
import Idris.Core.TT
import Control.Applicative
import Control.Monad
import Control.Monad.State.Strict
import Data.Function (on)
import Data.Maybe
import qualified Data.List.Split as Spl
import Data.List
import Data.Monoid
import Data.Char
import qualified Data.HashSet as HS
import qualified Data.Text as T
import qualified Data.ByteString.UTF8 as UTF8
import Debug.Trace
-- | Allow implicit type declarations
allowImp :: SyntaxInfo -> SyntaxInfo
allowImp syn = syn { implicitAllowed = True }
-- | Disallow implicit type declarations
disallowImp :: SyntaxInfo -> SyntaxInfo
disallowImp syn = syn { implicitAllowed = False }
{-| Parses an expression as a whole
@
FullExpr ::= Expr EOF_t;
@
-}
fullExpr :: SyntaxInfo -> IdrisParser PTerm
fullExpr syn = do x <- expr syn
eof
i <- get
return $ debindApp syn (desugar syn i x)
tryFullExpr :: SyntaxInfo -> IState -> String -> Either Err PTerm
tryFullExpr syn st input =
case runparser (fullExpr syn) st "" input of
Success tm -> Right tm
Failure e -> Left (Msg (show e))
{- | Parses an expression
@
Expr ::= Pi
@
-}
expr :: SyntaxInfo -> IdrisParser PTerm
expr = pi
{- | Parses an expression with possible operator applied
@
OpExpr ::= {- Expression Parser with Operators based on Expr' -};
@
-}
opExpr :: SyntaxInfo -> IdrisParser PTerm
opExpr syn = do i <- get
buildExpressionParser (table (idris_infixes i)) (expr' syn)
{- | Parses either an internally defined expression or
a user-defined one
@
Expr' ::= "External (User-defined) Syntax"
| InternalExpr;
@
-}
expr' :: SyntaxInfo -> IdrisParser PTerm
expr' syn = try (externalExpr syn)
<|> internalExpr syn
<?> "expression"
{- | Parses a user-defined expression -}
externalExpr :: SyntaxInfo -> IdrisParser PTerm
externalExpr syn = do i <- get
(FC fn start _) <- getFC
expr <- extensions syn (syntaxRulesList $ syntax_rules i)
(FC _ _ end) <- getFC
let outerFC = FC fn start end
return (mapPTermFC (fixFC outerFC) (fixFCH fn outerFC) expr)
<?> "user-defined expression"
where -- Fix non-highlighting FCs by approximating with the span of the syntax application
fixFC outer inner | inner `fcIn` outer = inner
| otherwise = outer
-- Fix highlighting FCs by making them useless, to avoid spurious highlights
fixFCH fn outer inner | inner `fcIn` outer = inner
| otherwise = FileFC fn
{- | Parses a simple user-defined expression -}
simpleExternalExpr :: SyntaxInfo -> IdrisParser PTerm
simpleExternalExpr syn = do i <- get
extensions syn (filter isSimple (syntaxRulesList $ syntax_rules i))
where
isSimple (Rule (Expr x:xs) _ _) = False
isSimple (Rule (SimpleExpr x:xs) _ _) = False
isSimple (Rule [Keyword _] _ _) = True
isSimple (Rule [Symbol _] _ _) = True
isSimple (Rule (_:xs) _ _) = case last xs of
Keyword _ -> True
Symbol _ -> True
_ -> False
isSimple _ = False
{- | Tries to parse a user-defined expression given a list of syntactic extensions -}
extensions :: SyntaxInfo -> [Syntax] -> IdrisParser PTerm
extensions syn rules = extension syn [] (filter isValid rules)
<?> "user-defined expression"
where
isValid :: Syntax -> Bool
isValid (Rule _ _ AnySyntax) = True
isValid (Rule _ _ PatternSyntax) = inPattern syn
isValid (Rule _ _ TermSyntax) = not (inPattern syn)
isValid (DeclRule _ _) = False
data SynMatch = SynTm PTerm | SynBind FC Name -- ^ the FC is for highlighting information
deriving Show
extension :: SyntaxInfo -> [Maybe (Name, SynMatch)] -> [Syntax] -> IdrisParser PTerm
extension syn ns rules =
choice $ flip map (groupBy (ruleGroup `on` syntaxSymbols) rules) $ \rs ->
case head rs of -- can never be []
Rule (symb:_) _ _ -> try $ do
n <- extensionSymbol symb
extension syn (n : ns) [Rule ss t ctx | (Rule (_:ss) t ctx) <- rs]
-- If we have more than one Rule in this bucket, our grammar is
-- nondeterministic.
Rule [] ptm _ -> return (flatten (updateSynMatch (mapMaybe id ns) ptm))
where
ruleGroup [] [] = True
ruleGroup (s1:_) (s2:_) = s1 == s2
ruleGroup _ _ = False
extensionSymbol :: SSymbol -> IdrisParser (Maybe (Name, SynMatch))
extensionSymbol (Keyword n) = do fc <- reservedFC (show n)
highlightP fc AnnKeyword
return Nothing
extensionSymbol (Expr n) = do tm <- expr syn
return $ Just (n, SynTm tm)
extensionSymbol (SimpleExpr n) = do tm <- simpleExpr syn
return $ Just (n, SynTm tm)
extensionSymbol (Binding n) = do (b, fc) <- name
return $ Just (n, SynBind fc b)
extensionSymbol (Symbol s) = do fc <- symbolFC s
highlightP fc AnnKeyword
return Nothing
flatten :: PTerm -> PTerm -- flatten application
flatten (PApp fc (PApp _ f as) bs) = flatten (PApp fc f (as ++ bs))
flatten t = t
updateSynMatch = update
where
updateB :: [(Name, SynMatch)] -> (Name, FC) -> (Name, FC)
updateB ns (n, fc) = case lookup n ns of
Just (SynBind tfc t) -> (t, tfc)
_ -> (n, fc)
update :: [(Name, SynMatch)] -> PTerm -> PTerm
update ns (PRef fc hls n) = case lookup n ns of
Just (SynTm t) -> t
_ -> PRef fc hls n
update ns (PPatvar fc n) = uncurry (flip PPatvar) $ updateB ns (n, fc)
update ns (PLam fc n nfc ty sc)
= let (n', nfc') = updateB ns (n, nfc)
in PLam fc n' nfc' (update ns ty) (update (dropn n ns) sc)
update ns (PPi p n fc ty sc)
= let (n', nfc') = updateB ns (n, fc)
in PPi (updTacImp ns p) n' nfc'
(update ns ty) (update (dropn n ns) sc)
update ns (PLet fc n nfc ty val sc)
= let (n', nfc') = updateB ns (n, nfc)
in PLet fc n' nfc' (update ns ty)
(update ns val) (update (dropn n ns) sc)
update ns (PApp fc t args)
= PApp fc (update ns t) (map (fmap (update ns)) args)
update ns (PAppBind fc t args)
= PAppBind fc (update ns t) (map (fmap (update ns)) args)
update ns (PMatchApp fc n) = let (n', nfc') = updateB ns (n, fc)
in PMatchApp nfc' n'
update ns (PIfThenElse fc c t f)
= PIfThenElse fc (update ns c) (update ns t) (update ns f)
update ns (PCase fc c opts)
= PCase fc (update ns c) (map (pmap (update ns)) opts)
update ns (PRewrite fc eq tm mty)
= PRewrite fc (update ns eq) (update ns tm) (fmap (update ns) mty)
update ns (PPair fc hls p l r) = PPair fc hls p (update ns l) (update ns r)
update ns (PDPair fc hls p l t r)
= PDPair fc hls p (update ns l) (update ns t) (update ns r)
update ns (PAs fc n t) = PAs fc (fst $ updateB ns (n, NoFC)) (update ns t)
update ns (PAlternative ms a as) = PAlternative ms a (map (update ns) as)
update ns (PHidden t) = PHidden (update ns t)
update ns (PGoal fc r n sc) = PGoal fc (update ns r) n (update ns sc)
update ns (PDoBlock ds) = PDoBlock $ map (upd ns) ds
where upd :: [(Name, SynMatch)] -> PDo -> PDo
upd ns (DoExp fc t) = DoExp fc (update ns t)
upd ns (DoBind fc n nfc t) = DoBind fc n nfc (update ns t)
upd ns (DoLet fc n nfc ty t) = DoLet fc n nfc (update ns ty) (update ns t)
upd ns (DoBindP fc i t ts)
= DoBindP fc (update ns i) (update ns t)
(map (\(l,r) -> (update ns l, update ns r)) ts)
upd ns (DoLetP fc i t) = DoLetP fc (update ns i) (update ns t)
update ns (PIdiom fc t) = PIdiom fc $ update ns t
update ns (PMetavar fc n) = uncurry (flip PMetavar) $ updateB ns (n, fc)
update ns (PProof tacs) = PProof $ map (updTactic ns) tacs
update ns (PTactics tacs) = PTactics $ map (updTactic ns) tacs
update ns (PDisamb nsps t) = PDisamb nsps $ update ns t
update ns (PUnifyLog t) = PUnifyLog $ update ns t
update ns (PNoImplicits t) = PNoImplicits $ update ns t
update ns (PQuasiquote tm mty) = PQuasiquote (update ns tm) (fmap (update ns) mty)
update ns (PUnquote t) = PUnquote $ update ns t
update ns (PQuoteName n res fc) = let (n', fc') = (updateB ns (n, fc))
in PQuoteName n' res fc'
update ns (PRunElab fc t nsp) = PRunElab fc (update ns t) nsp
update ns (PConstSugar fc t) = PConstSugar fc $ update ns t
-- PConstSugar probably can't contain anything substitutable, but it's hard to track
update ns t = t
updTactic :: [(Name, SynMatch)] -> PTactic -> PTactic
-- handle all the ones with Names explicitly, then use fmap for the rest with PTerms
updTactic ns (Intro ns') = Intro $ map (fst . updateB ns . (, NoFC)) ns'
updTactic ns (Focus n) = Focus . fst $ updateB ns (n, NoFC)
updTactic ns (Refine n bs) = Refine (fst $ updateB ns (n, NoFC)) bs
updTactic ns (Claim n t) = Claim (fst $ updateB ns (n, NoFC)) (update ns t)
updTactic ns (MatchRefine n) = MatchRefine (fst $ updateB ns (n, NoFC))
updTactic ns (LetTac n t) = LetTac (fst $ updateB ns (n, NoFC)) (update ns t)
updTactic ns (LetTacTy n ty tm) = LetTacTy (fst $ updateB ns (n, NoFC)) (update ns ty) (update ns tm)
updTactic ns (ProofSearch rec prover depth top psns hints) = ProofSearch rec prover depth
(fmap (fst . updateB ns . (, NoFC)) top) (map (fst . updateB ns . (, NoFC)) psns) (map (fst . updateB ns . (, NoFC)) hints)
updTactic ns (Try l r) = Try (updTactic ns l) (updTactic ns r)
updTactic ns (TSeq l r) = TSeq (updTactic ns l) (updTactic ns r)
updTactic ns (GoalType s tac) = GoalType s $ updTactic ns tac
updTactic ns (TDocStr (Left n)) = TDocStr . Left . fst $ updateB ns (n, NoFC)
updTactic ns t = fmap (update ns) t
updTacImp ns (TacImp o st scr) = TacImp o st (update ns scr)
updTacImp _ x = x
dropn :: Name -> [(Name, a)] -> [(Name, a)]
dropn n [] = []
dropn n ((x,t) : xs) | n == x = xs
| otherwise = (x,t):dropn n xs
{- | Parses a (normal) built-in expression
@
InternalExpr ::=
UnifyLog
| RecordType
| SimpleExpr
| Lambda
| QuoteGoal
| Let
| If
| RewriteTerm
| CaseExpr
| DoBlock
| App
;
@
-}
internalExpr :: SyntaxInfo -> IdrisParser PTerm
internalExpr syn =
unifyLog syn
<|> runElab syn
<|> disamb syn
<|> noImplicits syn
<|> recordType syn
<|> if_ syn
<|> lambda syn
<|> quoteGoal syn
<|> let_ syn
<|> rewriteTerm syn
<|> doBlock syn
<|> caseExpr syn
<|> app syn
<?> "expression"
{- | Parses the "impossible" keyword
@
Impossible ::= 'impossible'
@
-}
impossible :: IdrisParser PTerm
impossible = do fc <- reservedFC "impossible"
highlightP fc AnnKeyword
return PImpossible
{- | Parses a case expression
@
CaseExpr ::=
'case' Expr 'of' OpenBlock CaseOption+ CloseBlock;
@
-}
caseExpr :: SyntaxInfo -> IdrisParser PTerm
caseExpr syn = do kw1 <- reservedFC "case"; fc <- getFC
scr <- expr syn; kw2 <- reservedFC "of";
opts <- indentedBlock1 (caseOption syn)
highlightP kw1 AnnKeyword
highlightP kw2 AnnKeyword
return (PCase fc scr opts)
<?> "case expression"
{- | Parses a case in a case expression
@
CaseOption ::=
Expr (Impossible | '=>' Expr) Terminator
;
@
-}
caseOption :: SyntaxInfo -> IdrisParser (PTerm, PTerm)
caseOption syn = do lhs <- expr (syn { inPattern = True })
r <- impossible <|> symbol "=>" *> expr syn
return (lhs, r)
<?> "case option"
warnTacticDeprecation :: FC -> IdrisParser ()
warnTacticDeprecation fc =
do ist <- get
let cmdline = opt_cmdline (idris_options ist)
unless (NoOldTacticDeprecationWarnings `elem` cmdline) $
put ist { parserWarnings =
(fc, Msg "This style of tactic proof is deprecated. See %runElab for the replacement.") : parserWarnings ist }
{- | Parses a proof block
@
ProofExpr ::=
'proof' OpenBlock Tactic'* CloseBlock
;
@
-}
proofExpr :: SyntaxInfo -> IdrisParser PTerm
proofExpr syn = do kw <- reservedFC "proof"
ts <- indentedBlock1 (tactic syn)
highlightP kw AnnKeyword
warnTacticDeprecation kw
return $ PProof ts
<?> "proof block"
{- | Parses a tactics block
@
TacticsExpr :=
'tactics' OpenBlock Tactic'* CloseBlock
;
@
-}
tacticsExpr :: SyntaxInfo -> IdrisParser PTerm
tacticsExpr syn = do kw <- reservedFC "tactics"
ts <- indentedBlock1 (tactic syn)
highlightP kw AnnKeyword
warnTacticDeprecation kw
return $ PTactics ts
<?> "tactics block"
{- | Parses a simple expression
@
SimpleExpr ::=
{- External (User-defined) Simple Expression -}
| '?' Name
| % 'instance'
| 'Refl' ('{' Expr '}')?
| ProofExpr
| TacticsExpr
| FnName
| Idiom
| List
| Alt
| Bracketed
| Constant
| Type
| 'Void'
| Quasiquote
| NameQuote
| Unquote
| '_'
;
@
-}
simpleExpr :: SyntaxInfo -> IdrisParser PTerm
simpleExpr syn =
try (simpleExternalExpr syn)
<|> do (x, FC f (l, c) end) <- try (lchar '?' *> name)
return (PMetavar (FC f (l, c-1) end) x)
<|> do lchar '%'; fc <- getFC; reserved "instance"; return (PResolveTC fc)
<|> do reserved "elim_for"; fc <- getFC; t <- fst <$> fnName; return (PRef fc [] (SN $ ElimN t))
<|> proofExpr syn
<|> tacticsExpr syn
<|> try (do reserved "Type"; symbol "*"; return $ PUniverse AllTypes)
<|> do reserved "AnyType"; return $ PUniverse AllTypes
<|> PType <$> reservedFC "Type"
<|> do reserved "UniqueType"; return $ PUniverse UniqueType
<|> do reserved "NullType"; return $ PUniverse NullType
<|> do (c, cfc) <- constant
fc <- getFC
return (modifyConst syn fc (PConstant cfc c))
<|> do symbol "'"; fc <- getFC; str <- fst <$> name
return (PApp fc (PRef fc [] (sUN "Symbol_"))
[pexp (PConstant NoFC (Str (show str)))])
<|> do (x, fc) <- fnName
if inPattern syn
then option (PRef fc [fc] x)
(do reservedOp "@"
s <- simpleExpr syn
fcIn <- getFC
return (PAs fcIn x s))
else return (PRef fc [fc] x)
<|> idiom syn
<|> listExpr syn
<|> alt syn
<|> do reservedOp "!"
s <- simpleExpr syn
fc <- getFC
return (PAppBind fc s [])
<|> bracketed (disallowImp syn)
<|> quasiquote syn
<|> namequote syn
<|> unquote syn
<|> do lchar '_'; return Placeholder
<?> "expression"
{- |Parses an expression in braces
@
Bracketed ::= '(' Bracketed'
@
-}
bracketed :: SyntaxInfo -> IdrisParser PTerm
bracketed syn = do (FC fn (sl, sc) _) <- getFC
lchar '(' <?> "parenthesized expression"
bracketed' (FC fn (sl, sc) (sl, sc+1)) syn
{- |Parses the rest of an expression in braces
@
Bracketed' ::=
')'
| Expr ')'
| ExprList ')'
| Expr '**' Expr ')'
| Operator Expr ')'
| Expr Operator ')'
| Name ':' Expr '**' Expr ')'
;
@
-}
bracketed' :: FC -> SyntaxInfo -> IdrisParser PTerm
bracketed' open syn =
do (FC f start (l, c)) <- getFC
lchar ')'
return $ PTrue (spanFC open (FC f start (l, c+1))) TypeOrTerm
<|> try (do (ln, lnfc) <- name
colonFC <- lcharFC ':'
lty <- expr syn
starsFC <- reservedOpFC "**"
fc <- getFC
r <- expr syn
close <- lcharFC ')'
return (PDPair fc [open, colonFC, starsFC, close] TypeOrTerm (PRef lnfc [] ln) lty r))
<|> try (do fc <- getFC; o <- operator; e <- expr syn; lchar ')'
-- No prefix operators! (bit of a hack here...)
if (o == "-" || o == "!")
then fail "minus not allowed in section"
else return $ PLam fc (sMN 1000 "ARG") NoFC Placeholder
(PApp fc (PRef fc [] (sUN o)) [pexp (PRef fc [] (sMN 1000 "ARG")),
pexp e]))
<|> try (do l <- simpleExpr syn
op <- option Nothing (do o <- operator
lchar ')'
return (Just o))
fc0 <- getFC
case op of
Nothing -> bracketedExpr syn open l
Just o -> return $ PLam fc0 (sMN 1000 "ARG") NoFC Placeholder
(PApp fc0 (PRef fc0 [] (sUN o)) [pexp l,
pexp (PRef fc0 [] (sMN 1000 "ARG"))]))
<|> do l <- expr syn
bracketedExpr syn open l
-- | Parse the contents of parentheses, after an expression has been parsed.
bracketedExpr :: SyntaxInfo -> FC -> PTerm -> IdrisParser PTerm
bracketedExpr syn openParenFC e =
do lchar ')'; return e
<|> do exprs <- many (do comma <- lcharFC ','
r <- expr syn
return (r, comma))
closeParenFC <- lcharFC ')'
let hilite = [openParenFC, closeParenFC] ++ map snd exprs
return $ PPair openParenFC hilite TypeOrTerm e (mergePairs exprs)
<|> do starsFC <- reservedOpFC "**"
r <- expr syn
closeParenFC <- lcharFC ')'
return (PDPair starsFC [openParenFC, starsFC, closeParenFC] TypeOrTerm e Placeholder r)
<?> "end of bracketed expression"
where mergePairs :: [(PTerm, FC)] -> PTerm
mergePairs [(t, fc)] = t
mergePairs ((t, fc):rs) = PPair fc [] TypeOrTerm t (mergePairs rs)
-- bit of a hack here. If the integer doesn't fit in an Int, treat it as a
-- big integer, otherwise try fromInteger and the constants as alternatives.
-- a better solution would be to fix fromInteger to work with Integer, as the
-- name suggests, rather than Int
{-| Finds optimal type for integer constant -}
modifyConst :: SyntaxInfo -> FC -> PTerm -> PTerm
modifyConst syn fc (PConstant inFC (BI x))
| not (inPattern syn)
= PConstSugar inFC $ -- wrap in original location for highlighting
PAlternative [] FirstSuccess
(PApp fc (PRef fc [] (sUN "fromInteger")) [pexp (PConstant NoFC (BI (fromInteger x)))]
: consts)
| otherwise = PConstSugar inFC $
PAlternative [] FirstSuccess consts
where
consts = [ PConstant inFC (BI x)
, PConstant inFC (I (fromInteger x))
, PConstant inFC (B8 (fromInteger x))
, PConstant inFC (B16 (fromInteger x))
, PConstant inFC (B32 (fromInteger x))
, PConstant inFC (B64 (fromInteger x))
]
modifyConst syn fc x = x
{- | Parses an alternative expression
@
Alt ::= '(|' Expr_List '|)';
Expr_List ::=
Expr'
| Expr' ',' Expr_List
;
@
-}
alt :: SyntaxInfo -> IdrisParser PTerm
alt syn = do symbol "(|"; alts <- sepBy1 (expr' syn) (lchar ','); symbol "|)"
return (PAlternative [] FirstSuccess alts)
{- | Parses a possibly hidden simple expression
@
HSimpleExpr ::=
'.' SimpleExpr
| SimpleExpr
;
@
-}
hsimpleExpr :: SyntaxInfo -> IdrisParser PTerm
hsimpleExpr syn =
do lchar '.'
e <- simpleExpr syn
return $ PHidden e
<|> simpleExpr syn
<?> "expression"
{- | Parses a unification log expression
UnifyLog ::=
'%' 'unifyLog' SimpleExpr
;
-}
unifyLog :: SyntaxInfo -> IdrisParser PTerm
unifyLog syn = do (FC fn (sl, sc) kwEnd) <- try (lchar '%' *> reservedFC "unifyLog")
tm <- simpleExpr syn
highlightP (FC fn (sl, sc-1) kwEnd) AnnKeyword
return (PUnifyLog tm)
<?> "unification log expression"
{- | Parses a new-style tactics expression
RunTactics ::=
'%' 'runElab' SimpleExpr
;
-}
runElab :: SyntaxInfo -> IdrisParser PTerm
runElab syn = do (FC fn (sl, sc) kwEnd) <- try (lchar '%' *> reservedFC "runElab")
fc <- getFC
tm <- simpleExpr syn
highlightP (FC fn (sl, sc-1) kwEnd) AnnKeyword
return $ PRunElab fc tm (syn_namespace syn)
<?> "new-style tactics expression"
{- | Parses a disambiguation expression
Disamb ::=
'%' 'disamb' NameList Expr
;
-}
disamb :: SyntaxInfo -> IdrisParser PTerm
disamb syn = do kw <- reservedFC "with"
ns <- sepBy1 (fst <$> name) (lchar ',')
tm <- expr' syn
highlightP kw AnnKeyword
return (PDisamb (map tons ns) tm)
<?> "namespace disambiguation expression"
where tons (NS n s) = txt (show n) : s
tons n = [txt (show n)]
{- | Parses a no implicits expression
@
NoImplicits ::=
'%' 'noImplicits' SimpleExpr
;
@
-}
noImplicits :: SyntaxInfo -> IdrisParser PTerm
noImplicits syn = do try (lchar '%' *> reserved "noImplicits")
tm <- simpleExpr syn
return (PNoImplicits tm)
<?> "no implicits expression"
{- | Parses a function application expression
@
App ::=
'mkForeign' Arg Arg*
| MatchApp
| SimpleExpr Arg*
;
MatchApp ::=
SimpleExpr '<==' FnName
;
@
-}
app :: SyntaxInfo -> IdrisParser PTerm
app syn = do f <- simpleExpr syn
(do try $ reservedOp "<=="
fc <- getFC
ff <- fst <$> fnName
return (PLet fc (sMN 0 "match") NoFC
f
(PMatchApp fc ff)
(PRef fc [] (sMN 0 "match")))
<?> "matching application expression") <|> (do
fc <- getFC
i <- get
args <- many (do notEndApp; arg syn)
case args of
[] -> return f
_ -> return (flattenFromInt fc f args))
<?> "function application"
where
-- bit of a hack to deal with the situation where we're applying a
-- literal to an argument, which we may want for obscure applications
-- of fromInteger, and this will help disambiguate better.
-- We know, at least, it won't be one of the constants!
flattenFromInt fc (PAlternative _ x alts) args
| Just i <- getFromInt alts
= PApp fc (PRef fc [] (sUN "fromInteger")) (i : args)
flattenFromInt fc f args = PApp fc f args
getFromInt ((PApp _ (PRef _ _ n) [a]) : _) | n == sUN "fromInteger" = Just a
getFromInt (_ : xs) = getFromInt xs
getFromInt _ = Nothing
{-| Parses a function argument
@
Arg ::=
ImplicitArg
| ConstraintArg
| SimpleExpr
;
@
-}
arg :: SyntaxInfo -> IdrisParser PArg
arg syn = implicitArg syn
<|> constraintArg syn
<|> do e <- simpleExpr syn
return (pexp e)
<?> "function argument"
{-| Parses an implicit function argument
@
ImplicitArg ::=
'{' Name ('=' Expr)? '}'
;
@
-}
implicitArg :: SyntaxInfo -> IdrisParser PArg
implicitArg syn = do lchar '{'
(n, nfc) <- name
fc <- getFC
v <- option (PRef nfc [nfc] n) (do lchar '='
expr syn)
lchar '}'
return (pimp n v True)
<?> "implicit function argument"
{-| Parses a constraint argument (for selecting a named type class instance)
> ConstraintArg ::=
> '@{' Expr '}'
> ;
-}
constraintArg :: SyntaxInfo -> IdrisParser PArg
constraintArg syn = do symbol "@{"
e <- expr syn
symbol "}"
return (pconst e)
<?> "constraint argument"
{-| Parses a quasiquote expression (for building reflected terms using the elaborator)
> Quasiquote ::= '`(' Expr ')'
-}
quasiquote :: SyntaxInfo -> IdrisParser PTerm
quasiquote syn = do startFC <- symbolFC "`("
e <- expr syn { syn_in_quasiquote = (syn_in_quasiquote syn) + 1 ,
inPattern = False }
g <- optional $
do fc <- symbolFC ":"
ty <- expr syn { inPattern = False } -- don't allow antiquotes
return (ty, fc)
endFC <- symbolFC ")"
mapM_ (uncurry highlightP) [(startFC, AnnKeyword), (endFC, AnnKeyword), (spanFC startFC endFC, AnnQuasiquote)]
case g of
Just (_, fc) -> highlightP fc AnnKeyword
_ -> return ()
return $ PQuasiquote e (fst <$> g)
<?> "quasiquotation"
{-| Parses an unquoting inside a quasiquotation (for building reflected terms using the elaborator)
> Unquote ::= ',' Expr
-}
unquote :: SyntaxInfo -> IdrisParser PTerm
unquote syn = do guard (syn_in_quasiquote syn > 0)
startFC <- symbolFC "~"
e <- simpleExpr syn { syn_in_quasiquote = syn_in_quasiquote syn - 1 }
endFC <- getFC
highlightP startFC AnnKeyword
highlightP (spanFC startFC endFC) AnnAntiquote
return $ PUnquote e
<?> "unquotation"
{-| Parses a quotation of a name (for using the elaborator to resolve boring details)
> NameQuote ::= '`{' Name '}'
-}
namequote :: SyntaxInfo -> IdrisParser PTerm
namequote syn = do (startFC, res) <-
try (do fc <- symbolFC "`{{"
return (fc, False)) <|>
(do fc <- symbolFC "`{"
return (fc, True))
(n, nfc) <- fnName
endFC <- if res then symbolFC "}" else symbolFC "}}"
mapM_ (uncurry highlightP)
[ (startFC, AnnKeyword)
, (endFC, AnnKeyword)
, (spanFC startFC endFC, AnnQuasiquote)
]
return $ PQuoteName n res nfc
<?> "quoted name"
{-| Parses a record field setter expression
@
RecordType ::=
'record' '{' FieldTypeList '}';
@
@
FieldTypeList ::=
FieldType
| FieldType ',' FieldTypeList
;
@
@
FieldType ::=
FnName '=' Expr
;
@
-}
recordType :: SyntaxInfo -> IdrisParser PTerm
recordType syn =
do kw <- reservedFC "record"
lchar '{'
fgs <- fieldGetOrSet
lchar '}'
fc <- getFC
rec <- optional (simpleExpr syn)
highlightP kw AnnKeyword
case fgs of
Left fields ->
case rec of
Nothing ->
return (PLam fc (sMN 0 "fldx") NoFC Placeholder
(applyAll fc fields (PRef fc [] (sMN 0 "fldx"))))
Just v -> return (applyAll fc fields v)
Right fields ->
case rec of
Nothing ->
return (PLam fc (sMN 0 "fldx") NoFC Placeholder
(getAll fc (reverse fields)
(PRef fc [] (sMN 0 "fldx"))))
Just v -> return (getAll fc (reverse fields) v)
<?> "record setting expression"
where fieldSet :: IdrisParser ([Name], PTerm)
fieldSet = do ns <- fieldGet
lchar '='
e <- expr syn
return (ns, e)
<?> "field setter"
fieldGet :: IdrisParser [Name]
fieldGet = sepBy1 (fst <$> fnName) (symbol "->")
fieldGetOrSet :: IdrisParser (Either [([Name], PTerm)] [Name])
fieldGetOrSet = try (do fs <- sepBy1 fieldSet (lchar ',')
return (Left fs))
<|> do f <- fieldGet
return (Right f)
applyAll :: FC -> [([Name], PTerm)] -> PTerm -> PTerm
applyAll fc [] x = x
applyAll fc ((ns, e) : es) x
= applyAll fc es (doUpdate fc ns e x)
doUpdate fc [n] e get
= PApp fc (PRef fc [] (mkType n)) [pexp e, pexp get]
doUpdate fc (n : ns) e get
= PApp fc (PRef fc [] (mkType n))
[pexp (doUpdate fc ns e (PApp fc (PRef fc [] n) [pexp get])),
pexp get]
getAll :: FC -> [Name] -> PTerm -> PTerm
getAll fc [n] e = PApp fc (PRef fc [] n) [pexp e]
getAll fc (n:ns) e = PApp fc (PRef fc [] n) [pexp (getAll fc ns e)]
-- | Creates setters for record types on necessary functions
mkType :: Name -> Name
mkType (UN n) = sUN ("set_" ++ str n)
mkType (MN 0 n) = sMN 0 ("set_" ++ str n)
mkType (NS n s) = NS (mkType n) s
{- | Parses a type signature
@
TypeSig ::=
':' Expr
;
@
@
TypeExpr ::= ConstraintList? Expr;
@
-}
typeExpr :: SyntaxInfo -> IdrisParser PTerm
typeExpr syn = do cs <- if implicitAllowed syn then constraintList syn else return []
sc <- expr syn
return (bindList (PPi constraint) cs sc)
<?> "type signature"
{- | Parses a lambda expression
@
Lambda ::=
'\\' TypeOptDeclList LambdaTail
| '\\' SimpleExprList LambdaTail
;
@
@
SimpleExprList ::=
SimpleExpr
| SimpleExpr ',' SimpleExprList
;
@
@
LambdaTail ::=
Impossible
| '=>' Expr
@
-}
lambda :: SyntaxInfo -> IdrisParser PTerm
lambda syn = do lchar '\\' <?> "lambda expression"
((do xt <- try $ tyOptDeclList syn
fc <- getFC
sc <- lambdaTail
return (bindList (PLam fc) xt sc))
<|>
(do ps <- sepBy (do fc <- getFC
e <- simpleExpr (syn { inPattern = True })
return (fc, e))
(lchar ',')
sc <- lambdaTail
return (pmList (zip [0..] ps) sc)))
<?> "lambda expression"
where pmList :: [(Int, (FC, PTerm))] -> PTerm -> PTerm
pmList [] sc = sc
pmList ((i, (fc, x)) : xs) sc
= PLam fc (sMN i "lamp") NoFC Placeholder
(PCase fc (PRef fc [] (sMN i "lamp"))
[(x, pmList xs sc)])
lambdaTail :: IdrisParser PTerm
lambdaTail = impossible <|> symbol "=>" *> expr syn
{- | Parses a term rewrite expression
@
RewriteTerm ::=
'rewrite' Expr ('==>' Expr)? 'in' Expr
;
@
-}
rewriteTerm :: SyntaxInfo -> IdrisParser PTerm
rewriteTerm syn = do kw <- reservedFC "rewrite"
fc <- getFC
prf <- expr syn
giving <- optional (do symbol "==>"; expr' syn)
kw' <- reservedFC "in"; sc <- expr syn
highlightP kw AnnKeyword
highlightP kw' AnnKeyword
return (PRewrite fc
(PApp fc (PRef fc [] (sUN "sym")) [pexp prf]) sc
giving)
<?> "term rewrite expression"
{- |Parses a let binding
@
Let ::=
'let' Name TypeSig'? '=' Expr 'in' Expr
| 'let' Expr' '=' Expr' 'in' Expr
TypeSig' ::=
':' Expr'
;
@
-}
let_ :: SyntaxInfo -> IdrisParser PTerm
let_ syn = try (do kw <- reservedFC "let"
ls <- indentedBlock (let_binding syn)
kw' <- reservedFC "in"; sc <- expr syn
highlightP kw AnnKeyword; highlightP kw' AnnKeyword
return (buildLets ls sc))
<?> "let binding"
where buildLets [] sc = sc
buildLets ((fc, PRef nfc _ n, ty, v, []) : ls) sc
= PLet fc n nfc ty v (buildLets ls sc)
buildLets ((fc, pat, ty, v, alts) : ls) sc
= PCase fc v ((pat, buildLets ls sc) : alts)
let_binding syn = do fc <- getFC;
pat <- expr' (syn { inPattern = True })
ty <- option Placeholder (do lchar ':'; expr' syn)
lchar '='
v <- expr syn
ts <- option [] (do lchar '|'
sepBy1 (do_alt syn) (lchar '|'))
return (fc,pat,ty,v,ts)
{- | Parses a conditional expression
@
If ::= 'if' Expr 'then' Expr 'else' Expr
@
-}
if_ :: SyntaxInfo -> IdrisParser PTerm
if_ syn = (do ifFC <- reservedFC "if"
fc <- getFC
c <- expr syn
thenFC <- reservedFC "then"
t <- expr syn
elseFC <- reservedFC "else"
f <- expr syn
mapM_ (flip highlightP AnnKeyword) [ifFC, thenFC, elseFC]
return (PIfThenElse fc c t f))
<?> "conditional expression"
{- | Parses a quote goal
@
QuoteGoal ::=
'quoteGoal' Name 'by' Expr 'in' Expr
;
@
-}
quoteGoal :: SyntaxInfo -> IdrisParser PTerm
quoteGoal syn = do kw1 <- reservedFC "quoteGoal"; n <- fst <$> name;
kw2 <- reservedFC "by"
r <- expr syn
kw3 <- reservedFC "in"
fc <- getFC
sc <- expr syn
mapM_ (flip highlightP AnnKeyword) [kw1, kw2, kw3]
return (PGoal fc r n sc)
<?> "quote goal expression"
{- | Parses a dependent type signature
@
Pi ::= PiOpts Static? Pi'
@
@
Pi' ::=
OpExpr ('->' Pi)?
| '(' TypeDeclList ')' '->' Pi
| '{' TypeDeclList '}' '->' Pi
| '{' 'auto' TypeDeclList '}' '->' Pi
| '{' 'default' SimpleExpr TypeDeclList '}' '->' Pi
;
@
-}
bindsymbol opts st syn
= do symbol "->"
return (Exp opts st False)
explicitPi opts st syn
= do xt <- try (lchar '(' *> typeDeclList syn <* lchar ')')
binder <- bindsymbol opts st syn
sc <- expr syn
return (bindList (PPi binder) xt sc)
autoImplicit opts st syn
= do kw <- reservedFC "auto"
when (st == Static) $ fail "auto implicits can not be static"
xt <- typeDeclList syn
lchar '}'
symbol "->"
sc <- expr syn
highlightP kw AnnKeyword
return (bindList (PPi
(TacImp [] Dynamic (PTactics [ProofSearch True True 100 Nothing [] []]))) xt sc)
defaultImplicit opts st syn = do
kw <- reservedFC "default"
when (st == Static) $ fail "default implicits can not be static"
ist <- get
script' <- simpleExpr syn
let script = debindApp syn . desugar syn ist $ script'
xt <- typeDeclList syn
lchar '}'
symbol "->"
sc <- expr syn
highlightP kw AnnKeyword
return (bindList (PPi (TacImp [] Dynamic script)) xt sc)
normalImplicit opts st syn = do
xt <- typeDeclList syn <* lchar '}'
symbol "->"
cs <- constraintList syn
sc <- expr syn
let (im,cl)
= if implicitAllowed syn
then (Imp opts st False Nothing,
constraint)
else (Imp opts st False (Just (Impl False)),
Imp opts st False (Just (Impl True)))
return (bindList (PPi im) xt
(bindList (PPi cl) cs sc))
implicitPi opts st syn =
autoImplicit opts st syn
<|> defaultImplicit opts st syn
<|> normalImplicit opts st syn
unboundPi opts st syn = do
x <- opExpr syn
(do binder <- bindsymbol opts st syn
sc <- expr syn
return (PPi binder (sUN "__pi_arg") NoFC x sc))
<|> return x
pi :: SyntaxInfo -> IdrisParser PTerm
pi syn =
do opts <- piOpts syn
st <- static
explicitPi opts st syn
<|> try (do lchar '{'; implicitPi opts st syn)
<|> unboundPi opts st syn
<?> "dependent type signature"
{- | Parses Possible Options for Pi Expressions
@
PiOpts ::= '.'?
@
-}
piOpts :: SyntaxInfo -> IdrisParser [ArgOpt]
piOpts syn | implicitAllowed syn =
lchar '.' *> return [InaccessibleArg]
<|> return []
piOpts syn = return []
{- | Parses a type constraint list
@
ConstraintList ::=
'(' Expr_List ')' '=>'
| Expr '=>'
;
@
-}
constraintList :: SyntaxInfo -> IdrisParser [(Name, FC, PTerm)]
constraintList syn = try (constraintList1 syn)
<|> return []
constraintList1 :: SyntaxInfo -> IdrisParser [(Name, FC, PTerm)]
constraintList1 syn = try (do lchar '('
tys <- sepBy1 nexpr (lchar ',')
lchar ')'
reservedOp "=>"
return tys)
<|> try (do t <- opExpr (disallowImp syn)
reservedOp "=>"
return [(defname, NoFC, t)])
<?> "type constraint list"
where nexpr = try (do (n, fc) <- name; lchar ':'
e <- expr syn
return (n, fc, e))
<|> do e <- expr syn
return (defname, NoFC, e)
defname = sMN 0 "constrarg"
{- | Parses a type declaration list
@
TypeDeclList ::=
FunctionSignatureList
| NameList TypeSig
;
@
@
FunctionSignatureList ::=
Name TypeSig
| Name TypeSig ',' FunctionSignatureList
;
@
-}
typeDeclList :: SyntaxInfo -> IdrisParser [(Name, FC, PTerm)]
typeDeclList syn = try (sepBy1 (do (x, xfc) <- fnName
lchar ':'
t <- typeExpr (disallowImp syn)
return (x, xfc, t))
(lchar ','))
<|> do ns <- sepBy1 name (lchar ',')
lchar ':'
t <- typeExpr (disallowImp syn)
return (map (\(x, xfc) -> (x, xfc, t)) ns)
<?> "type declaration list"
{- | Parses a type declaration list with optional parameters
@
TypeOptDeclList ::=
NameOrPlaceholder TypeSig?
| NameOrPlaceholder TypeSig? ',' TypeOptDeclList
;
@
@
NameOrPlaceHolder ::= Name | '_';
@
-}
tyOptDeclList :: SyntaxInfo -> IdrisParser [(Name, FC, PTerm)]
tyOptDeclList syn = sepBy1 (do (x, fc) <- nameOrPlaceholder
t <- option Placeholder (do lchar ':'
expr syn)
return (x, fc, t))
(lchar ',')
<?> "type declaration list"
where nameOrPlaceholder :: IdrisParser (Name, FC)
nameOrPlaceholder = fnName
<|> do symbol "_"
return (sMN 0 "underscore", NoFC)
<?> "name or placeholder"
{- | Parses a list literal expression e.g. [1,2,3] or a comprehension [ (x, y) | x <- xs , y <- ys ]
@
ListExpr ::=
'[' ']'
| '[' Expr '|' DoList ']'
| '[' ExprList ']'
;
@
@
DoList ::=
Do
| Do ',' DoList
;
@
@
ExprList ::=
Expr
| Expr ',' ExprList
;
@
-}
listExpr :: SyntaxInfo -> IdrisParser PTerm
listExpr syn = do (FC f (l, c) _) <- getFC
lchar '['; fc <- getFC;
(try . token $ do (char ']' <?> "end of list expression")
(FC _ _ (l', c')) <- getFC
return (mkNil (FC f (l, c) (l', c'))))
<|> (do x <- expr syn <?> "expression"
(do try (lchar '|') <?> "list comprehension"
qs <- sepBy1 (do_ syn) (lchar ',')
lchar ']'
return (PDoBlock (map addGuard qs ++
[DoExp fc (PApp fc (PRef fc [] (sUN "return"))
[pexp x])]))) <|>
(do xs <- many (do (FC fn (sl, sc) _) <- getFC
lchar ',' <?> "list element"
let commaFC = FC fn (sl, sc) (sl, sc + 1)
elt <- expr syn
return (elt, commaFC))
(FC fn (sl, sc) _) <- getFC
lchar ']' <?> "end of list expression"
let rbrackFC = FC fn (sl, sc) (sl, sc+1)
return (mkList fc rbrackFC ((x, (FC f (l, c) (l, c+1))) : xs))))
<?> "list expression"
where
mkNil :: FC -> PTerm
mkNil fc = PRef fc [fc] (sUN "Nil")
mkList :: FC -> FC -> [(PTerm, FC)] -> PTerm
mkList errFC nilFC [] = PRef nilFC [nilFC] (sUN "Nil")
mkList errFC nilFC ((x, fc) : xs) = PApp errFC (PRef fc [fc] (sUN "::")) [pexp x, pexp (mkList errFC nilFC xs)]
addGuard :: PDo -> PDo
addGuard (DoExp fc e) = DoExp fc (PApp fc (PRef fc [] (sUN "guard"))
[pexp e])
addGuard x = x
{- | Parses a do-block
@
Do' ::= Do KeepTerminator;
@
@
DoBlock ::=
'do' OpenBlock Do'+ CloseBlock
;
@
-}
doBlock :: SyntaxInfo -> IdrisParser PTerm
doBlock syn
= do kw <- reservedFC "do"
ds <- indentedBlock1 (do_ syn)
highlightP kw AnnKeyword
return (PDoBlock ds)
<?> "do block"
{- | Parses an expression inside a do block
@
Do ::=
'let' Name TypeSig'? '=' Expr
| 'let' Expr' '=' Expr
| Name '<-' Expr
| Expr' '<-' Expr
| Expr
;
@
-}
do_ :: SyntaxInfo -> IdrisParser PDo
do_ syn
= try (do kw <- reservedFC "let"
(i, ifc) <- name
ty <- option Placeholder (do lchar ':'
expr' syn)
reservedOp "="
fc <- getFC
e <- expr syn
highlightP kw AnnKeyword
return (DoLet fc i ifc ty e))
<|> try (do kw <- reservedFC "let"
i <- expr' syn
reservedOp "="
fc <- getFC
sc <- expr syn
highlightP kw AnnKeyword
return (DoLetP fc i sc))
<|> try (do (i, ifc) <- name
symbol "<-"
fc <- getFC
e <- expr syn;
option (DoBind fc i ifc e)
(do lchar '|'
ts <- sepBy1 (do_alt syn) (lchar '|')
return (DoBindP fc (PRef ifc [ifc] i) e ts)))
<|> try (do i <- expr' syn
symbol "<-"
fc <- getFC
e <- expr syn;
option (DoBindP fc i e [])
(do lchar '|'
ts <- sepBy1 (do_alt syn) (lchar '|')
return (DoBindP fc i e ts)))
<|> do e <- expr syn
fc <- getFC
return (DoExp fc e)
<?> "do block expression"
do_alt syn = do l <- expr' syn
option (Placeholder, l)
(do symbol "=>"
r <- expr' syn
return (l, r))
{- | Parses an expression in idiom brackets
@
Idiom ::= '[|' Expr '|]';
@
-}
idiom :: SyntaxInfo -> IdrisParser PTerm
idiom syn
= do symbol "[|"
fc <- getFC
e <- expr syn
symbol "|]"
return (PIdiom fc e)
<?> "expression in idiom brackets"
{- |Parses a constant or literal expression
@
Constant ::=
'Integer'
| 'Int'
| 'Char'
| 'Double'
| 'String'
| 'Bits8'
| 'Bits16'
| 'Bits32'
| 'Bits64'
| Float_t
| Natural_t
| VerbatimString_t
| String_t
| Char_t
;
@
-}
constants :: [(String, Idris.Core.TT.Const)]
constants =
[ ("Integer", AType (ATInt ITBig))
, ("Int", AType (ATInt ITNative))
, ("Char", AType (ATInt ITChar))
, ("Double", AType ATFloat)
, ("String", StrType)
, ("prim__WorldType", WorldType)
, ("prim__TheWorld", TheWorld)
, ("Bits8", AType (ATInt (ITFixed IT8)))
, ("Bits16", AType (ATInt (ITFixed IT16)))
, ("Bits32", AType (ATInt (ITFixed IT32)))
, ("Bits64", AType (ATInt (ITFixed IT64)))
]
-- | Parse a constant and its source span
constant :: IdrisParser (Idris.Core.TT.Const, FC)
constant = choice [ do fc <- reservedFC name; return (ty, fc)
| (name, ty) <- constants
]
<|> do (f, fc) <- try float; return (Fl f, fc)
<|> do (i, fc) <- natural; return (BI i, fc)
<|> do (s, fc) <- verbatimStringLiteral; return (Str s, fc)
<|> do (s, fc) <- stringLiteral; return (Str s, fc)
<|> do (c, fc) <- try charLiteral; return (Ch c, fc) --Currently ambigous with symbols
<?> "constant or literal"
{- | Parses a verbatim multi-line string literal (triple-quoted)
@
VerbatimString_t ::=
'\"\"\"' ~'\"\"\"' '\"\"\"'
;
@
-}
verbatimStringLiteral :: MonadicParsing m => m (String, FC)
verbatimStringLiteral = token $ do (FC f start _) <- getFC
try $ string "\"\"\""
str <- manyTill anyChar $ try (string "\"\"\"")
(FC _ _ end) <- getFC
return (str, FC f start end)
{- | Parses a static modifier
@
Static ::=
'[' static ']'
;
@
-}
static :: IdrisParser Static
static = do reserved "[static]"; return Static
<|> return Dynamic
<?> "static modifier"
{- | Parses a tactic script
@
Tactic ::= 'intro' NameList?
| 'intros'
| 'refine' Name Imp+
| 'mrefine' Name
| 'rewrite' Expr
| 'induction' Expr
| 'equiv' Expr
| 'let' Name ':' Expr' '=' Expr
| 'let' Name '=' Expr
| 'focus' Name
| 'exact' Expr
| 'applyTactic' Expr
| 'reflect' Expr
| 'fill' Expr
| 'try' Tactic '|' Tactic
| '{' TacticSeq '}'
| 'compute'
| 'trivial'
| 'solve'
| 'attack'
| 'state'
| 'term'
| 'undo'
| 'qed'
| 'abandon'
| ':' 'q'
;
Imp ::= '?' | '_';
TacticSeq ::=
Tactic ';' Tactic
| Tactic ';' TacticSeq
;
@
-}
-- | A specification of the arguments that tactics can take
data TacticArg = NameTArg -- ^ Names: n1, n2, n3, ... n
| ExprTArg
| AltsTArg
| StringLitTArg
-- The FIXMEs are Issue #1766 in the issue tracker.
-- https://github.com/idris-lang/Idris-dev/issues/1766
-- | A list of available tactics and their argument requirements
tactics :: [([String], Maybe TacticArg, SyntaxInfo -> IdrisParser PTactic)]
tactics =
[ (["intro"], Nothing, const $ -- FIXME syntax for intro (fresh name)
do ns <- sepBy (spaced (fst <$> name)) (lchar ','); return $ Intro ns)
, noArgs ["intros"] Intros
, noArgs ["unfocus"] Unfocus
, (["refine"], Just ExprTArg, const $
do n <- spaced (fst <$> fnName)
imps <- many imp
return $ Refine n imps)
, (["claim"], Nothing, \syn ->
do n <- indentPropHolds gtProp *> (fst <$> name)
goal <- indentPropHolds gtProp *> expr syn
return $ Claim n goal)
, (["mrefine"], Just ExprTArg, const $
do n <- spaced (fst <$> fnName)
return $ MatchRefine n)
, expressionTactic ["rewrite"] Rewrite
, expressionTactic ["case"] CaseTac
, expressionTactic ["induction"] Induction
, expressionTactic ["equiv"] Equiv
, (["let"], Nothing, \syn -> -- FIXME syntax for let
do n <- (indentPropHolds gtProp *> (fst <$> name))
(do indentPropHolds gtProp *> lchar ':'
ty <- indentPropHolds gtProp *> expr' syn
indentPropHolds gtProp *> lchar '='
t <- indentPropHolds gtProp *> expr syn
i <- get
return $ LetTacTy n (desugar syn i ty) (desugar syn i t))
<|> (do indentPropHolds gtProp *> lchar '='
t <- indentPropHolds gtProp *> expr syn
i <- get
return $ LetTac n (desugar syn i t)))
, (["focus"], Just ExprTArg, const $
do n <- spaced (fst <$> name)
return $ Focus n)
, expressionTactic ["exact"] Exact
, expressionTactic ["applyTactic"] ApplyTactic
, expressionTactic ["byReflection"] ByReflection
, expressionTactic ["reflect"] Reflect
, expressionTactic ["fill"] Fill
, (["try"], Just AltsTArg, \syn ->
do t <- spaced (tactic syn)
lchar '|'
t1 <- spaced (tactic syn)
return $ Try t t1)
, noArgs ["compute"] Compute
, noArgs ["trivial"] Trivial
, noArgs ["unify"] DoUnify
, (["search"], Nothing, const $
do depth <- option 10 $ fst <$> natural
return (ProofSearch True True (fromInteger depth) Nothing [] []))
, noArgs ["instance"] TCInstance
, noArgs ["solve"] Solve
, noArgs ["attack"] Attack
, noArgs ["state", ":state"] ProofState
, noArgs ["term", ":term"] ProofTerm
, noArgs ["undo", ":undo"] Undo
, noArgs ["qed", ":qed"] Qed
, noArgs ["abandon", ":q"] Abandon
, noArgs ["skip"] Skip
, noArgs ["sourceLocation"] SourceFC
, expressionTactic [":e", ":eval"] TEval
, expressionTactic [":t", ":type"] TCheck
, expressionTactic [":search"] TSearch
, (["fail"], Just StringLitTArg, const $
do msg <- fst <$> stringLiteral
return $ TFail [Idris.Core.TT.TextPart msg])
, ([":doc"], Just ExprTArg, const $
do whiteSpace
doc <- (Right . fst <$> constant) <|> (Left . fst <$> fnName)
eof
return (TDocStr doc))
]
where
expressionTactic names tactic = (names, Just ExprTArg, \syn ->
do t <- spaced (expr syn)
i <- get
return $ tactic (desugar syn i t))
noArgs names tactic = (names, Nothing, const (return tactic))
spaced parser = indentPropHolds gtProp *> parser
imp :: IdrisParser Bool
imp = do lchar '?'; return False
<|> do lchar '_'; return True
tactic :: SyntaxInfo -> IdrisParser PTactic
tactic syn = choice [ do choice (map reserved names); parser syn
| (names, _, parser) <- tactics ]
<|> do lchar '{'
t <- tactic syn;
lchar ';';
ts <- sepBy1 (tactic syn) (lchar ';')
lchar '}'
return $ TSeq t (mergeSeq ts)
<|> ((lchar ':' >> empty) <?> "prover command")
<?> "tactic"
where
mergeSeq :: [PTactic] -> PTactic
mergeSeq [t] = t
mergeSeq (t:ts) = TSeq t (mergeSeq ts)
-- | Parses a tactic as a whole
fullTactic :: SyntaxInfo -> IdrisParser PTactic
fullTactic syn = do t <- tactic syn
eof
return t
| NightRa/Idris-dev | src/Idris/ParseExpr.hs | bsd-3-clause | 52,579 | 1 | 37 | 19,378 | 15,961 | 7,834 | 8,127 | -1 | -1 |
{-# LANGUAGE Rank2Types #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Array.Lens
-- Copyright : (C) 2012-16 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : provisional
-- Portability : MPTCs, Rank2Types, LiberalTypeSynonyms
--
----------------------------------------------------------------------------
module Data.Array.Lens
(
-- * Setters
ixmapped
) where
import Control.Lens
import Data.Array.IArray hiding (index)
-- | This 'setter' can be used to derive a new 'IArray' from an old 'IAarray' by
-- applying a function to each of the indices to look it up in the old 'IArray'.
--
-- This is a /contravariant/ 'Setter'.
--
-- @
-- 'ixmap' ≡ 'over' '.' 'ixmapped'
-- 'ixmapped' ≡ 'setting' '.' 'ixmap'
-- 'over' ('ixmapped' b) f arr '!' i ≡ arr '!' f i
-- 'bounds' ('over' ('ixmapped' b) f arr) ≡ b
-- @
ixmapped :: (IArray a e, Ix i, Ix j) => (i,i) -> IndexPreservingSetter (a j e) (a i e) i j
ixmapped = setting . ixmap
{-# INLINE ixmapped #-}
| ddssff/lens | src/Data/Array/Lens.hs | bsd-3-clause | 1,120 | 0 | 9 | 201 | 125 | 81 | 44 | 9 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.LineSegments
-- Copyright : (c) Sven Panne 2002-2013
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
-- This module corresponds to section 3.4 (Line Segments) of the OpenGL 2.1
-- specs.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.LineSegments (
-- * Line Rasterization
lineWidth,
-- * Line Stipple
lineStipple,
-- * Line Antialiasing
lineSmooth,
-- * Implementation-Dependent Limits
aliasedLineWidthRange, smoothLineWidthRange, smoothLineWidthGranularity
) where
import Control.Monad
import Graphics.Rendering.OpenGL.GL.Capability
import Graphics.Rendering.OpenGL.GL.QueryUtils
import Graphics.Rendering.OpenGL.GL.StateVar
import Graphics.Rendering.OpenGL.Raw
--------------------------------------------------------------------------------
-- | 'lineWidth' contains the rasterized width of both aliased and antialiased
-- lines. The initial value is 1. Using a line width other than 1 has different
-- effects, depending on whether line antialiasing is enabled (see
-- 'lineSmooth'). Line antialiasing is initially disabled.
--
-- If line antialiasing is disabled, the actual width is determined by rounding
-- the supplied width to the nearest integer. (If the rounding results in the
-- value 0, it is as if the line width were 1.) If /delta x/ >= /delta y/, /i/
-- pixels are filled in each column that is rasterized, where /i/ is the
-- rounded value of 'lineWidth'. Otherwise, /i/ pixels are filled in each row
-- that is rasterized.
--
-- If antialiasing is enabled, line rasterization produces a fragment for each
-- pixel square that intersects the region lying within the rectangle having
-- width equal to the current line width, length equal to the actual length of
-- the line, and centered on the mathematical line segment. The coverage value
-- for each fragment is the window coordinate area of the intersection of the
-- rectangular region with the corresponding pixel square. This value is saved
-- and used in the final rasterization step.
--
-- Not all widths can be supported when line antialiasing is enabled. If an
-- unsupported width is requested, the nearest supported width is used. Only
-- width 1 is guaranteed to be supported; others depend on the implementation.
-- Likewise, there is a range for aliased line widths as well. To query the
-- range of supported widths of antialiased lines and the size difference
-- between supported widths within the range, query 'smoothLineWidthRange' and
-- 'smoothLineWidthGranularity', respectively. For aliased lines, query the
-- supported range with 'aliasedLineWidthRange'.
--
-- The line width specified when 'lineWidth' is set is always returned when it
-- is queried. Clamping and rounding for aliased and antialiased lines have no
-- effect on the specified value.
--
-- A non-antialiased line width may be clamped to an implementation-dependent
-- maximum. Query 'aliasedLineWidthRange' to determine the maximum width.
--
-- An 'Graphics.Rendering.OpenGL.GLU.Errors.InvalidValue' is generated if
-- 'lineWidth' is set to a value less than or equal to zero.
--
-- An 'Graphics.Rendering.OpenGL.GLU.Errors.InvalidOperation' is generated if
-- 'lineWidth' is set during
-- 'Graphics.Rendering.OpenGL.GL.BeginEnd.renderPrimitive'.
lineWidth :: StateVar GLfloat
lineWidth = makeStateVar (getFloat1 id GetLineWidth) glLineWidth
--------------------------------------------------------------------------------
-- | Line stippling masks out certain fragments produced by rasterization; those
-- fragments will not be drawn. The masking is achieved by using three
-- parameters: the repeat count (1st element of the 'lineStipple' pair, clamped
-- to the range [ 1 .. 256 ]), the 16-bit line stipple pattern (2nd element),
-- and an integer stipple counter /s/.
--
-- The counter /s/ is reset to 0 at before the first action during
-- 'Graphics.Rendering.OpenGL.GL.BeginEnd.renderPrimitive' is called and before
-- each line segment during
-- 'Graphics.Rendering.OpenGL.GL.BeginEnd.renderPrimitive' is generated. It is
-- incremented after each fragment of a unit width aliased line segment is
-- generated or after each /i/ fragments of an /i/ width line segment are
-- generated. The /i/ fragments associated with count /s/ are masked out if
-- @'Data.Bits.testBit' /pattern/ (( /s/ \/ /factor/ ) /mod/ 16)@ is 'False',
-- otherwise these fragments are sent to the frame buffer. Bit zero of the
-- pattern is the least significant bit, i.e. it is used first.
--
-- Antialiased lines are treated as a sequence of rectangles of height 1 for
-- purposes of stippling. Whether rectangle /s/ is rasterized or not depends on
-- the fragment rule described for aliased lines, counting rectangles rather
-- than groups of fragments.
--
-- The initial value of 'lineStipple' is 'Nothing', i.e. line stippling is
-- disabled.
--
-- An 'Graphics.Rendering.OpenGL.GLU.Errors.InvalidOperation' is generated if
-- 'lineStipple' is set during
-- 'Graphics.Rendering.OpenGL.GL.BeginEnd.renderPrimitive'.
lineStipple :: StateVar (Maybe (GLint, GLushort))
lineStipple =
makeStateVarMaybe
(return CapLineStipple)
(liftM2 (,) (getInteger1 id GetLineStippleRepeat)
(getInteger1 fromIntegral GetLineStipplePattern))
(uncurry glLineStipple)
--------------------------------------------------------------------------------
-- | Controls whether line antialiasing is enabled. The initial state is
-- 'Graphics.Rendering.OpenGL.GL.Capability.Disabled'.
lineSmooth :: StateVar Capability
lineSmooth = makeCapability CapLineSmooth
--------------------------------------------------------------------------------
-- | The smallest and largest supported width of aliased lines.
aliasedLineWidthRange :: GettableStateVar (GLfloat, GLfloat)
aliasedLineWidthRange =
makeGettableStateVar $ getFloat2 (,) GetAliasedLineWidthRange
-- | The smallest and largest supported width of antialiased lines.
smoothLineWidthRange :: GettableStateVar (GLfloat, GLfloat)
smoothLineWidthRange =
makeGettableStateVar $ getFloat2 (,) GetSmoothLineWidthRange
-- | The antialiased line width granularity, i.e. the size difference between
-- supported widths.
smoothLineWidthGranularity :: GettableStateVar GLfloat
smoothLineWidthGranularity =
makeGettableStateVar $ getFloat1 id GetSmoothLineWidthGranularity
| hesiod/OpenGL | src/Graphics/Rendering/OpenGL/GL/LineSegments.hs | bsd-3-clause | 6,619 | 0 | 9 | 965 | 362 | 248 | 114 | 30 | 1 |
------------------------------------------------------------------------------
--
-- Haskell: The Craft of Functional Programming, 3e
-- Simon Thompson
-- (c) Addison-Wesley, 1996-2011.
--
-- Chapter 10
--
-------------------------------------------------------------------------
-- Generalization: patterns of computation
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
module Chapter10 where
import Prelude hiding (map,filter,zipWith,foldr1,foldr,concat,and)
import Pictures hiding (flipV,beside)
import qualified Chapter7
-- Higher-order functions: functions as arguments
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-- Mapping a function along a list.
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
map,map' :: (a -> b) -> [a] -> [b]
map' f xs = [ f x | x <- xs ] -- (map.0)
map f [] = [] -- (map.1)
map f (x:xs) = f x : map f xs -- (map.2)
-- Examples using map.
-- Double all the elements of a list ...
doubleAll :: [Integer] -> [Integer]
doubleAll xs = map double xs
where
double x = 2*x
-- ... convert characters to their numeric codes ...
convertChrs :: [Char] -> [Int]
convertChrs xs = map fromEnum xs
-- ... flip a Picture in a vertical mirror.
flipV :: Picture -> Picture
flipV xs = map reverse xs
-- Modelling properties as functions
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-- Is an integer even?
isEven :: Integer -> Bool
isEven n = (n `mod` 2 == 0)
-- Is a list sorted?
isSorted :: [Integer] -> Bool
isSorted xs = (xs == iSort xs)
-- Filtering -- the filter function
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
filter :: (a -> Bool) -> [a] -> [a]
filter p [] = [] -- (filter.1)
filter p (x:xs)
| p x = x : filter p xs -- (filter.2)
| otherwise = filter p xs -- (filter.3)
-- A list comprehension also serves to define filter,
filter' p xs = [ x | x <- xs , p x ] -- (filter.0)
-- Combining zip and map -- the zipWith function
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
zipWith :: (a -> b -> c) -> [a] -> [b] -> [c]
zipWith f (x:xs) (y:ys) = f x y : zipWith f xs ys
zipWith f _ _ = []
beside :: Picture -> Picture -> Picture
beside pic1 pic2 = zipWith (++) pic1 pic2
-- Folding and primitive recursion
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-- Folding an operation into a non-empty list
foldr1 :: (a -> a -> a) -> [a] -> a
foldr1 f [x] = x -- (foldr1.1)
foldr1 f (x:xs) = f x (foldr1 f xs) -- (foldr1.2)
-- Examples using foldr1
foldEx1 = foldr1 (+) [3,98,1]
foldEx2 = foldr1 (||) [False,True,False]
foldEx3 = foldr1 (++) ["Freak ", "Out" , "", "!"]
foldEx4 = foldr1 min [6]
foldEx5 = foldr1 (*) [1 .. 6]
-- Folding into an arbitrary list: using a starting value on the empty list.
foldr f s [] = s -- (foldr.1)
foldr f s (x:xs) = f x (foldr f s xs) -- (foldr.2)
-- Concatenating a list using foldr.
concat :: [[a]] -> [a]
concat xs = foldr (++) [] xs
-- Conjoining a list of Bool using foldr.
and :: [Bool] -> Bool
and bs = foldr (&&) True bs
-- Can define foldr1 using foldr:
-- foldr1 f (x:xs) = foldr f x xs -- (foldr1.0)
-- Folding in general -- foldr again
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-- The type of foldr is more general than you would initially expect...
foldr :: (a -> b -> b) -> b -> [a] -> b
rev :: [a] -> [a]
rev xs = foldr snoc [] xs
snoc :: a -> [a] -> [a]
snoc x xs = xs ++ [x]
-- Sorting a list using foldr
iSort :: [Integer] -> [Integer]
iSort xs = foldr Chapter7.ins [] xs
-- From the exercises: a mystery function ...
mystery xs = foldr (++) [] (map sing xs)
sing x = [x]
-- Generalizing: splitting up lists
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-- Getting the first word from the front of a String ...
getWord :: String -> String
getWord [] = [] -- (getWord.1)
getWord (x:xs)
| elem x Chapter7.whitespace = [] -- (getWord.2)
| otherwise = x : getWord xs -- (getWord.3)
-- ... which generalizes to a function which gets items from the front of a list
-- until an item has the required property.
getUntil :: (a -> Bool) -> [a] -> [a]
getUntil p [] = []
getUntil p (x:xs)
| p x = []
| otherwise = x : getUntil p xs
-- The original getWord function defined from getUntil
-- getWord xs
-- = getUntil p xs
-- where
-- p x = elem x whitespace
| c089/haskell-craft3e | Chapter10.hs | mit | 4,289 | 8 | 9 | 973 | 1,216 | 682 | 534 | 63 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
module Stack.Constants.Config
( distDirFromDir
, workDirFromDir
, distRelativeDir
, imageStagingDir
, projectDockerSandboxDir
, configCacheFile
, configCabalMod
, buildCachesDir
, testSuccessFile
, testBuiltFile
, hpcRelativeDir
, hpcDirFromDir
, objectInterfaceDirL
, templatesDir
) where
import Stack.Prelude
import Stack.Constants
import Stack.Types.Compiler
import Stack.Types.Config
import Stack.Types.PackageIdentifier
import Path
-- | Output .o/.hi directory.
objectInterfaceDirL :: HasBuildConfig env => Getting r env (Path Abs Dir)
objectInterfaceDirL = to $ \env -> -- FIXME is this idomatic lens code?
let workDir = view workDirL env
root = view projectRootL env
in root </> workDir </> $(mkRelDir "odir/")
-- | The directory containing the files used for dirtiness check of source files.
buildCachesDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs Dir)
buildCachesDir dir =
liftM
(</> $(mkRelDir "stack-build-caches"))
(distDirFromDir dir)
-- | The filename used to mark tests as having succeeded
testSuccessFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory
-> m (Path Abs File)
testSuccessFile dir =
liftM
(</> $(mkRelFile "stack-test-success"))
(distDirFromDir dir)
-- | The filename used to mark tests as having built
testBuiltFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory
-> m (Path Abs File)
testBuiltFile dir =
liftM
(</> $(mkRelFile "stack-test-built"))
(distDirFromDir dir)
-- | The filename used for dirtiness check of config.
configCacheFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs File)
configCacheFile dir =
liftM
(</> $(mkRelFile "stack-config-cache"))
(distDirFromDir dir)
-- | The filename used for modification check of .cabal
configCabalMod :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs File)
configCabalMod dir =
liftM
(</> $(mkRelFile "stack-cabal-mod"))
(distDirFromDir dir)
-- | Directory for HPC work.
hpcDirFromDir
:: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs Dir)
hpcDirFromDir fp =
liftM (fp </>) hpcRelativeDir
-- | Relative location of directory for HPC work.
hpcRelativeDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> m (Path Rel Dir)
hpcRelativeDir =
liftM (</> $(mkRelDir "hpc")) distRelativeDir
-- | Package's build artifacts directory.
distDirFromDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir
-> m (Path Abs Dir)
distDirFromDir fp =
liftM (fp </>) distRelativeDir
-- | Package's working directory.
workDirFromDir :: (MonadReader env m, HasEnvConfig env)
=> Path Abs Dir
-> m (Path Abs Dir)
workDirFromDir fp = view $ workDirL.to (fp </>)
-- | Directory for project templates.
templatesDir :: Config -> Path Abs Dir
templatesDir config = view stackRootL config </> $(mkRelDir "templates")
-- | Relative location of build artifacts.
distRelativeDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> m (Path Rel Dir)
distRelativeDir = do
cabalPkgVer <- view cabalVersionL
platform <- platformGhcRelDir
wc <- view $ actualCompilerVersionL.to whichCompiler
-- Cabal version, suffixed with "_ghcjs" if we're using GHCJS.
envDir <-
parseRelDir $
(if wc == Ghcjs then (++ "_ghcjs") else id) $
packageIdentifierString $
PackageIdentifier cabalPackageName cabalPkgVer
platformAndCabal <- useShaPathOnWindows (platform </> envDir)
workDir <- view workDirL
return $
workDir </>
$(mkRelDir "dist") </>
platformAndCabal
-- | Docker sandbox from project root.
projectDockerSandboxDir :: (MonadReader env m, HasConfig env)
=> Path Abs Dir -- ^ Project root
-> m (Path Abs Dir) -- ^ Docker sandbox
projectDockerSandboxDir projectRoot = do
workDir <- view workDirL
return $ projectRoot </> workDir </> $(mkRelDir "docker/")
-- | Image staging dir from project root.
imageStagingDir :: (MonadReader env m, HasConfig env, MonadThrow m)
=> Path Abs Dir -- ^ Project root
-> Int -- ^ Index of image
-> m (Path Abs Dir) -- ^ Docker sandbox
imageStagingDir projectRoot imageIdx = do
workDir <- view workDirL
idxRelDir <- parseRelDir (show imageIdx)
return $ projectRoot </> workDir </> $(mkRelDir "image") </> idxRelDir
| anton-dessiatov/stack | src/Stack/Constants/Config.hs | bsd-3-clause | 5,034 | 0 | 13 | 1,231 | 1,240 | 641 | 599 | 116 | 2 |
module Stack.Options.TestParser where
import Data.Maybe
import Data.Monoid.Extra
import Options.Applicative
import Options.Applicative.Args
import Options.Applicative.Builder.Extra
import Stack.Options.Utils
import Stack.Types.Config
-- | Parser for test arguments.
-- FIXME hide args
testOptsParser :: Bool -> Parser TestOptsMonoid
testOptsParser hide0 =
TestOptsMonoid
<$> firstBoolFlags
"rerun-tests"
"running already successful tests"
hide
<*> fmap
(fromMaybe [])
(optional
(argsOption
(long "test-arguments" <>
metavar "TEST_ARGS" <>
help "Arguments passed in to the test suite program" <>
hide)))
<*> optionalFirst
(switch
(long "coverage" <>
help "Generate a code coverage report" <>
hide))
<*> optionalFirst
(switch
(long "no-run-tests" <>
help "Disable running of tests. (Tests will still be built.)" <>
hide))
where hide = hideMods hide0
| AndreasPK/stack | src/Stack/Options/TestParser.hs | bsd-3-clause | 1,314 | 0 | 17 | 571 | 204 | 107 | 97 | 34 | 1 |
{-# LANGUAGE CPP, Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude, MagicHash, StandaloneDeriving, BangPatterns,
KindSignatures, DataKinds, ConstraintKinds,
MultiParamTypeClasses, FunctionalDependencies #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
-- ip :: IP x a => a is strictly speaking ambiguous, but IP is magic
{-# LANGUAGE UndecidableSuperClasses #-}
-- Because of the type-variable superclasses for tuples
{-# OPTIONS_GHC -Wno-unused-imports #-}
-- -Wno-unused-imports needed for the GHC.Tuple import below. Sigh.
{-# OPTIONS_GHC -Wno-unused-top-binds #-}
-- -Wno-unused-top-binds is there (I hope) to stop Haddock complaining
-- about the constraint tuples being defined but not used
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Classes
-- Copyright : (c) The University of Glasgow, 1992-2002
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable (GHC extensions)
--
-- Basic classes.
--
-----------------------------------------------------------------------------
module GHC.Classes(
-- * Implicit paramaters
IP(..),
-- * Equality and ordering
Eq(..),
Ord(..),
-- ** Monomorphic equality operators
-- | See GHC.Classes#matching_overloaded_methods_in_rules
eqInt, neInt,
eqWord, neWord,
eqChar, neChar,
eqFloat, eqDouble,
-- ** Monomorphic comparison operators
gtInt, geInt, leInt, ltInt, compareInt, compareInt#,
gtWord, geWord, leWord, ltWord, compareWord, compareWord#,
-- * Functions over Bool
(&&), (||), not,
-- * Integer arithmetic
divInt#, modInt#
) where
-- GHC.Magic is used in some derived instances
import GHC.Magic ()
import GHC.IntWord64
import GHC.Prim
import GHC.Tuple
import GHC.Types
#include "MachDeps.h"
infix 4 ==, /=, <, <=, >=, >
infixr 3 &&
infixr 2 ||
default () -- Double isn't available yet
-- | The syntax @?x :: a@ is desugared into @IP "x" a@
-- IP is declared very early, so that libraries can take
-- advantage of the implicit-call-stack feature
class IP (x :: Symbol) a | x -> a where
ip :: a
{- $matching_overloaded_methods_in_rules
Matching on class methods (e.g. @(==)@) in rewrite rules tends to be a bit
fragile. For instance, consider this motivating example from the @bytestring@
library,
> break :: (Word8 -> Bool) -> ByteString -> (ByteString, ByteString)
> breakByte :: Word8 -> ByteString -> (ByteString, ByteString)
> {-# RULES "break -> breakByte" forall a. break (== x) = breakByte x #-}
Here we have two functions, with @breakByte@ providing an optimized
implementation of @break@ where the predicate is merely testing for equality
with a known @Word8@. As written, however, this rule will be quite fragile as
the @(==)@ class operation rule may rewrite the predicate before our @break@
rule has a chance to fire.
For this reason, most of the primitive types in @base@ have 'Eq' and 'Ord'
instances defined in terms of helper functions with inlinings delayed to phase
1. For instance, @Word8@\'s @Eq@ instance looks like,
> instance Eq Word8 where
> (==) = eqWord8
> (/=) = neWord8
>
> eqWord8, neWord8 :: Word8 -> Word8 -> Bool
> eqWord8 (W8# x) (W8# y) = ...
> neWord8 (W8# x) (W8# y) = ...
> {-# INLINE [1] eqWord8 #-}
> {-# INLINE [1] neWord8 #-}
This allows us to save our @break@ rule above by rewriting it to instead match
against @eqWord8@,
> {-# RULES "break -> breakByte" forall a. break (`eqWord8` x) = breakByte x #-}
Currently this is only done for '(==)', '(/=)', '(<)', '(<=)', '(>)', and '(>=)'
for the types in "GHC.Word" and "GHC.Int".
-}
-- | The 'Eq' class defines equality ('==') and inequality ('/=').
-- All the basic datatypes exported by the "Prelude" are instances of 'Eq',
-- and 'Eq' may be derived for any datatype whose constituents are also
-- instances of 'Eq'.
--
-- Minimal complete definition: either '==' or '/='.
--
class Eq a where
(==), (/=) :: a -> a -> Bool
{-# INLINE (/=) #-}
{-# INLINE (==) #-}
x /= y = not (x == y)
x == y = not (x /= y)
{-# MINIMAL (==) | (/=) #-}
deriving instance Eq ()
deriving instance (Eq a, Eq b) => Eq (a, b)
deriving instance (Eq a, Eq b, Eq c) => Eq (a, b, c)
deriving instance (Eq a, Eq b, Eq c, Eq d) => Eq (a, b, c, d)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e) => Eq (a, b, c, d, e)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f)
=> Eq (a, b, c, d, e, f)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g)
=> Eq (a, b, c, d, e, f, g)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g,
Eq h)
=> Eq (a, b, c, d, e, f, g, h)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g,
Eq h, Eq i)
=> Eq (a, b, c, d, e, f, g, h, i)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g,
Eq h, Eq i, Eq j)
=> Eq (a, b, c, d, e, f, g, h, i, j)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g,
Eq h, Eq i, Eq j, Eq k)
=> Eq (a, b, c, d, e, f, g, h, i, j, k)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g,
Eq h, Eq i, Eq j, Eq k, Eq l)
=> Eq (a, b, c, d, e, f, g, h, i, j, k, l)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g,
Eq h, Eq i, Eq j, Eq k, Eq l, Eq m)
=> Eq (a, b, c, d, e, f, g, h, i, j, k, l, m)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g,
Eq h, Eq i, Eq j, Eq k, Eq l, Eq m, Eq n)
=> Eq (a, b, c, d, e, f, g, h, i, j, k, l, m, n)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g,
Eq h, Eq i, Eq j, Eq k, Eq l, Eq m, Eq n, Eq o)
=> Eq (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o)
instance (Eq a) => Eq [a] where
{-# SPECIALISE instance Eq [[Char]] #-}
{-# SPECIALISE instance Eq [Char] #-}
{-# SPECIALISE instance Eq [Int] #-}
[] == [] = True
(x:xs) == (y:ys) = x == y && xs == ys
_xs == _ys = False
deriving instance Eq Bool
deriving instance Eq Ordering
instance Eq Word where
(==) = eqWord
(/=) = neWord
-- See GHC.Classes#matching_overloaded_methods_in_rules
{-# INLINE [1] eqWord #-}
{-# INLINE [1] neWord #-}
eqWord, neWord :: Word -> Word -> Bool
(W# x) `eqWord` (W# y) = isTrue# (x `eqWord#` y)
(W# x) `neWord` (W# y) = isTrue# (x `neWord#` y)
-- See GHC.Classes#matching_overloaded_methods_in_rules
instance Eq Char where
(==) = eqChar
(/=) = neChar
-- See GHC.Classes#matching_overloaded_methods_in_rules
{-# INLINE [1] eqChar #-}
{-# INLINE [1] neChar #-}
eqChar, neChar :: Char -> Char -> Bool
(C# x) `eqChar` (C# y) = isTrue# (x `eqChar#` y)
(C# x) `neChar` (C# y) = isTrue# (x `neChar#` y)
instance Eq Float where
(==) = eqFloat
-- See GHC.Classes#matching_overloaded_methods_in_rules
{-# INLINE [1] eqFloat #-}
eqFloat :: Float -> Float -> Bool
(F# x) `eqFloat` (F# y) = isTrue# (x `eqFloat#` y)
instance Eq Double where
(==) = eqDouble
-- See GHC.Classes#matching_overloaded_methods_in_rules
{-# INLINE [1] eqDouble #-}
eqDouble :: Double -> Double -> Bool
(D# x) `eqDouble` (D# y) = isTrue# (x ==## y)
instance Eq Int where
(==) = eqInt
(/=) = neInt
-- See GHC.Classes#matching_overloaded_methods_in_rules
{-# INLINE [1] eqInt #-}
{-# INLINE [1] neInt #-}
eqInt, neInt :: Int -> Int -> Bool
(I# x) `eqInt` (I# y) = isTrue# (x ==# y)
(I# x) `neInt` (I# y) = isTrue# (x /=# y)
#if WORD_SIZE_IN_BITS < 64
instance Eq TyCon where
(==) (TyCon hi1 lo1 _ _) (TyCon hi2 lo2 _ _)
= isTrue# (hi1 `eqWord64#` hi2) && isTrue# (lo1 `eqWord64#` lo2)
instance Ord TyCon where
compare (TyCon hi1 lo1 _ _) (TyCon hi2 lo2 _ _)
| isTrue# (hi1 `gtWord64#` hi2) = GT
| isTrue# (hi1 `ltWord64#` hi2) = LT
| isTrue# (lo1 `gtWord64#` lo2) = GT
| isTrue# (lo1 `ltWord64#` lo2) = LT
| True = EQ
#else
instance Eq TyCon where
(==) (TyCon hi1 lo1 _ _) (TyCon hi2 lo2 _ _)
= isTrue# (hi1 `eqWord#` hi2) && isTrue# (lo1 `eqWord#` lo2)
instance Ord TyCon where
compare (TyCon hi1 lo1 _ _) (TyCon hi2 lo2 _ _)
| isTrue# (hi1 `gtWord#` hi2) = GT
| isTrue# (hi1 `ltWord#` hi2) = LT
| isTrue# (lo1 `gtWord#` lo2) = GT
| isTrue# (lo1 `ltWord#` lo2) = LT
| True = EQ
#endif
-- | The 'Ord' class is used for totally ordered datatypes.
--
-- Instances of 'Ord' can be derived for any user-defined
-- datatype whose constituent types are in 'Ord'. The declared order
-- of the constructors in the data declaration determines the ordering
-- in derived 'Ord' instances. The 'Ordering' datatype allows a single
-- comparison to determine the precise ordering of two objects.
--
-- Minimal complete definition: either 'compare' or '<='.
-- Using 'compare' can be more efficient for complex types.
--
class (Eq a) => Ord a where
compare :: a -> a -> Ordering
(<), (<=), (>), (>=) :: a -> a -> Bool
max, min :: a -> a -> a
compare x y = if x == y then EQ
-- NB: must be '<=' not '<' to validate the
-- above claim about the minimal things that
-- can be defined for an instance of Ord:
else if x <= y then LT
else GT
x < y = case compare x y of { LT -> True; _ -> False }
x <= y = case compare x y of { GT -> False; _ -> True }
x > y = case compare x y of { GT -> True; _ -> False }
x >= y = case compare x y of { LT -> False; _ -> True }
-- These two default methods use '<=' rather than 'compare'
-- because the latter is often more expensive
max x y = if x <= y then y else x
min x y = if x <= y then x else y
{-# MINIMAL compare | (<=) #-}
deriving instance Ord ()
deriving instance (Ord a, Ord b) => Ord (a, b)
deriving instance (Ord a, Ord b, Ord c) => Ord (a, b, c)
deriving instance (Ord a, Ord b, Ord c, Ord d) => Ord (a, b, c, d)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e) => Ord (a, b, c, d, e)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f)
=> Ord (a, b, c, d, e, f)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g)
=> Ord (a, b, c, d, e, f, g)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g,
Ord h)
=> Ord (a, b, c, d, e, f, g, h)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g,
Ord h, Ord i)
=> Ord (a, b, c, d, e, f, g, h, i)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g,
Ord h, Ord i, Ord j)
=> Ord (a, b, c, d, e, f, g, h, i, j)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g,
Ord h, Ord i, Ord j, Ord k)
=> Ord (a, b, c, d, e, f, g, h, i, j, k)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g,
Ord h, Ord i, Ord j, Ord k, Ord l)
=> Ord (a, b, c, d, e, f, g, h, i, j, k, l)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g,
Ord h, Ord i, Ord j, Ord k, Ord l, Ord m)
=> Ord (a, b, c, d, e, f, g, h, i, j, k, l, m)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g,
Ord h, Ord i, Ord j, Ord k, Ord l, Ord m, Ord n)
=> Ord (a, b, c, d, e, f, g, h, i, j, k, l, m, n)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g,
Ord h, Ord i, Ord j, Ord k, Ord l, Ord m, Ord n, Ord o)
=> Ord (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o)
instance (Ord a) => Ord [a] where
{-# SPECIALISE instance Ord [[Char]] #-}
{-# SPECIALISE instance Ord [Char] #-}
{-# SPECIALISE instance Ord [Int] #-}
compare [] [] = EQ
compare [] (_:_) = LT
compare (_:_) [] = GT
compare (x:xs) (y:ys) = case compare x y of
EQ -> compare xs ys
other -> other
deriving instance Ord Bool
deriving instance Ord Ordering
-- We don't use deriving for Ord Char, because for Ord the derived
-- instance defines only compare, which takes two primops. Then
-- '>' uses compare, and therefore takes two primops instead of one.
instance Ord Char where
(C# c1) > (C# c2) = isTrue# (c1 `gtChar#` c2)
(C# c1) >= (C# c2) = isTrue# (c1 `geChar#` c2)
(C# c1) <= (C# c2) = isTrue# (c1 `leChar#` c2)
(C# c1) < (C# c2) = isTrue# (c1 `ltChar#` c2)
instance Ord Float where
(F# x) `compare` (F# y)
= if isTrue# (x `ltFloat#` y) then LT
else if isTrue# (x `eqFloat#` y) then EQ
else GT
(F# x) < (F# y) = isTrue# (x `ltFloat#` y)
(F# x) <= (F# y) = isTrue# (x `leFloat#` y)
(F# x) >= (F# y) = isTrue# (x `geFloat#` y)
(F# x) > (F# y) = isTrue# (x `gtFloat#` y)
instance Ord Double where
(D# x) `compare` (D# y)
= if isTrue# (x <## y) then LT
else if isTrue# (x ==## y) then EQ
else GT
(D# x) < (D# y) = isTrue# (x <## y)
(D# x) <= (D# y) = isTrue# (x <=## y)
(D# x) >= (D# y) = isTrue# (x >=## y)
(D# x) > (D# y) = isTrue# (x >## y)
instance Ord Int where
compare = compareInt
(<) = ltInt
(<=) = leInt
(>=) = geInt
(>) = gtInt
-- See GHC.Classes#matching_overloaded_methods_in_rules
{-# INLINE [1] gtInt #-}
{-# INLINE [1] geInt #-}
{-# INLINE [1] ltInt #-}
{-# INLINE [1] leInt #-}
gtInt, geInt, ltInt, leInt :: Int -> Int -> Bool
(I# x) `gtInt` (I# y) = isTrue# (x ># y)
(I# x) `geInt` (I# y) = isTrue# (x >=# y)
(I# x) `ltInt` (I# y) = isTrue# (x <# y)
(I# x) `leInt` (I# y) = isTrue# (x <=# y)
compareInt :: Int -> Int -> Ordering
(I# x#) `compareInt` (I# y#) = compareInt# x# y#
compareInt# :: Int# -> Int# -> Ordering
compareInt# x# y#
| isTrue# (x# <# y#) = LT
| isTrue# (x# ==# y#) = EQ
| True = GT
instance Ord Word where
compare = compareWord
(<) = ltWord
(<=) = leWord
(>=) = geWord
(>) = gtWord
-- See GHC.Classes#matching_overloaded_methods_in_rules
{-# INLINE [1] gtWord #-}
{-# INLINE [1] geWord #-}
{-# INLINE [1] ltWord #-}
{-# INLINE [1] leWord #-}
gtWord, geWord, ltWord, leWord :: Word -> Word -> Bool
(W# x) `gtWord` (W# y) = isTrue# (x `gtWord#` y)
(W# x) `geWord` (W# y) = isTrue# (x `geWord#` y)
(W# x) `ltWord` (W# y) = isTrue# (x `ltWord#` y)
(W# x) `leWord` (W# y) = isTrue# (x `leWord#` y)
compareWord :: Word -> Word -> Ordering
(W# x#) `compareWord` (W# y#) = compareWord# x# y#
compareWord# :: Word# -> Word# -> Ordering
compareWord# x# y#
| isTrue# (x# `ltWord#` y#) = LT
| isTrue# (x# `eqWord#` y#) = EQ
| True = GT
-- OK, so they're technically not part of a class...:
-- Boolean functions
-- | Boolean \"and\"
(&&) :: Bool -> Bool -> Bool
True && x = x
False && _ = False
-- | Boolean \"or\"
(||) :: Bool -> Bool -> Bool
True || _ = True
False || x = x
-- | Boolean \"not\"
not :: Bool -> Bool
not True = False
not False = True
------------------------------------------------------------------------
-- These don't really belong here, but we don't have a better place to
-- put them
-- These functions have built-in rules.
{-# NOINLINE [0] divInt# #-}
{-# NOINLINE [0] modInt# #-}
divInt# :: Int# -> Int# -> Int#
x# `divInt#` y#
-- Be careful NOT to overflow if we do any additional arithmetic
-- on the arguments... the following previous version of this
-- code has problems with overflow:
-- | (x# ># 0#) && (y# <# 0#) = ((x# -# y#) -# 1#) `quotInt#` y#
-- | (x# <# 0#) && (y# ># 0#) = ((x# -# y#) +# 1#) `quotInt#` y#
= if isTrue# (x# ># 0#) && isTrue# (y# <# 0#) then ((x# -# 1#) `quotInt#` y#) -# 1#
else if isTrue# (x# <# 0#) && isTrue# (y# ># 0#) then ((x# +# 1#) `quotInt#` y#) -# 1#
else x# `quotInt#` y#
modInt# :: Int# -> Int# -> Int#
x# `modInt#` y#
= if isTrue# (x# ># 0#) && isTrue# (y# <# 0#) ||
isTrue# (x# <# 0#) && isTrue# (y# ># 0#)
then if isTrue# (r# /=# 0#) then r# +# y# else 0#
else r#
where
!r# = x# `remInt#` y#
{- *************************************************************
* *
* Constraint tuples *
* *
************************************************************* -}
class ()
class (c1, c2) => (c1, c2)
class (c1, c2, c3) => (c1, c2, c3)
class (c1, c2, c3, c4) => (c1, c2, c3, c4)
class (c1, c2, c3, c4, c5) => (c1, c2, c3, c4, c5)
class (c1, c2, c3, c4, c5, c6) => (c1, c2, c3, c4, c5, c6)
class (c1, c2, c3, c4, c5, c6, c7) => (c1, c2, c3, c4, c5, c6, c7)
class (c1, c2, c3, c4, c5, c6, c7, c8) => (c1, c2, c3, c4, c5, c6, c7, c8)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17,c18)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58,
c59)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58,
c59)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58,
c59, c60)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58,
c59, c60)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58,
c59, c60, c61)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58,
c59, c60, c61)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58,
c59, c60, c61, c62)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58,
c59, c60, c61, c62)
| snoyberg/ghc | libraries/ghc-prim/GHC/Classes.hs | bsd-3-clause | 37,767 | 2 | 12 | 10,676 | 17,866 | 11,177 | 6,689 | -1 | -1 |
{-# LANGUAGE Arrows #-}
module CmdFail006 where
f = proc x -> ~(_ -< _)
| sdiehl/ghc | testsuite/tests/parser/should_fail/cmdFail006.hs | bsd-3-clause | 73 | 2 | 7 | 16 | 25 | 15 | 10 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pl-PL">
<title>Technology detection | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Zawartość</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Szukaj</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Ulubione</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/wappalyzer/src/main/javahelp/org/zaproxy/zap/extension/wappalyzer/resources/help_pl_PL/helpset_pl_PL.hs | apache-2.0 | 984 | 78 | 66 | 159 | 418 | 211 | 207 | -1 | -1 |
module E.Annotate where
import Control.Monad.Reader
import Data.Monoid
import qualified Data.Traversable as T
import E.E
import E.Program
import E.Subst
import GenUtil
import Info.Info(Info)
import Name.Id
import Util.HasSize
import Util.SetLike
annotateCombs :: forall m . Monad m =>
(IdMap (Maybe E))
-> (Id -> Info -> m Info) -- ^ annotate based on Id map
-> (E -> Info -> m Info) -- ^ annotate letbound bindings
-> (E -> Info -> m Info) -- ^ annotate lambdabound bindings
-> [Comb] -- ^ terms to annotate
-> m [Comb]
annotateCombs imap idann letann lamann cs = do
cs <- forM cs $ \comb -> do
nfo <- letann (combBody comb) (tvrInfo $ combHead comb)
nt <- annotate imap idann letann lamann (tvrType $ combHead comb)
return $ combHead_u (tvrInfo_s nfo . tvrType_s nt) comb
let nimap = fromList [ (combIdent c, Just . EVar $ combHead c) | c <- cs ]
`mappend` imap
f :: (IdMap (Maybe E)) -> E -> m E
f ni e = annotate ni idann letann lamann e
let mrule :: Rule -> m Rule
mrule r = do
let g tvr = do
nfo <- idann (tvrIdent tvr) (tvrInfo tvr)
let ntvr = tvr { tvrInfo = nfo }
return (ntvr,minsert (tvrIdent tvr) (Just $ EVar ntvr))
bs <- mapM g $ ruleBinds r
let nnimap = (foldr (.) id $ snds bs) nimap :: IdMap (Maybe E)
args <- mapM (f nnimap) (ruleArgs r)
body <- (f nnimap) (ruleBody r)
return r { ruleBinds = fsts bs, ruleBody = body, ruleArgs = args }
forM cs $ \comb -> do
rs <- mapM mrule (combRules comb)
nb <- f nimap (combBody comb)
return . combRules_s rs . combBody_s nb $ comb
annotateDs :: Monad m =>
(IdMap (Maybe E))
-> (Id -> Info -> m Info) -- ^ annotate based on Id map
-> (E -> Info -> m Info) -- ^ annotate letbound bindings
-> (E -> Info -> m Info) -- ^ annotate lambdabound bindings
-> [(TVr,E)] -- ^ terms to annotate
-> m [(TVr,E)]
annotateDs imap idann letann lamann ds = do
ELetRec { eDefs = ds', eBody = Unknown } <- annotate imap idann letann lamann (ELetRec ds Unknown)
return ds'
annotateProgram :: Monad m =>
(IdMap (Maybe E))
-> (Id -> Info -> m Info) -- ^ annotate based on Id map
-> (E -> Info -> m Info) -- ^ annotate letbound bindings
-> (E -> Info -> m Info) -- ^ annotate lambdabound bindings
-> Program -- ^ terms to annotate
-> m Program
annotateProgram imap idann letann lamann prog = do
ds <- annotateCombs imap idann letann lamann (progCombinators prog)
return $ programUpdate $ prog { progCombinators = ds }
type AM m = ReaderT (IdMap (Maybe E)) m
annotate :: Monad m =>
(IdMap (Maybe E))
-> (Id -> Info -> m Info) -- ^ annotate based on Id map
-> (E -> Info -> m Info) -- ^ annotate letbound bindings
-> (E -> Info -> m Info) -- ^ annotate lambdabound bindings
-> E -- ^ term to annotate
-> m E
annotate imap idann letann lamann e = runReaderT (f e) imap where
f eo@(EVar tvr@(TVr { tvrIdent = i, tvrType = t })) = do
mp <- ask
case mlookup i mp of
Just (Just v) -> return v
_ -> return eo
f (ELam tvr e) = lp ELam tvr e
f (EPi tvr e) = lp EPi tvr e
f (EAp a b) = liftM2 EAp (f a) (f b)
f (EError x e) = liftM (EError x) (f e)
f (EPrim x es e) = liftM2 (EPrim x) (mapM f es) (f e)
f ELetRec { eDefs = dl, eBody = e } = do
dl' <- flip mapM dl $ \ (t,e) -> do
nfo <- lift $ letann e (tvrInfo t)
return t { tvrInfo = nfo }
(as,rs) <- liftM unzip $ mapMntvr dl'
local (foldr (.) id rs) $ do
ds <- mapM f (snds dl)
e' <- f e
return $ ELetRec (zip as ds) e'
f (ELit l) = liftM ELit $ litSMapM f l
f Unknown = return Unknown
f e@(ESort {}) = return e
f ec@(ECase {}) = do
e' <- f $ eCaseScrutinee ec
let caseBind = eCaseBind ec
(b',r) <- ntvr [] caseBind
d <- local r $ T.mapM f $ eCaseDefault ec
let da (Alt lc@LitCons { litName = s, litArgs = vs, litType = t } e) = do
t' <- f t
(as,rs) <- liftM unzip $ mapMntvr vs
e' <- local (foldr (.) id rs) $ f e
return $ Alt lc { litArgs = as, litType = t' } e'
da (Alt l e) = do
l' <- T.mapM f l
e' <- f e
return $ Alt l' e'
alts <- local r (mapM da $ eCaseAlts ec)
t' <- f (eCaseType ec)
return $ caseUpdate ECase { eCaseAllFV = error "no eCaseAllFV needed", eCaseScrutinee = e', eCaseType = t', eCaseDefault = d, eCaseBind = b', eCaseAlts = alts }
lp lam tvr@(TVr { tvrIdent = n, tvrType = t}) e | n == emptyId = do
t' <- f t
nfo <- lift $ lamann e (tvrInfo tvr)
nfo <- lift $ idann n nfo
e' <- local (minsert n Nothing) $ f e
return $ lam (tvr { tvrIdent = emptyId, tvrType = t', tvrInfo = nfo}) e'
lp lam tvr e = do
nfo <- lift $ lamann e (tvrInfo tvr)
(tv,r) <- ntvr [] tvr { tvrInfo = nfo }
e' <- local r $ f e
return $ lam tv e'
mapMntvr ts = f ts [] where
f [] xs = return $ reverse xs
f (t:ts) rs = do
(t',r) <- ntvr vs t
local r $ f ts ((t',r):rs)
vs = [ tvrIdent x | x <- ts ]
ntvr xs tvr@(TVr { tvrIdent = n, tvrType = t}) | n == emptyId = do
t' <- f t
nfo <- lift $ idann emptyId (tvrInfo tvr)
let nvr = (tvr { tvrType = t', tvrInfo = nfo})
return (nvr,id)
ntvr xs tvr@(TVr {tvrIdent = i, tvrType = t}) = do
t' <- f t
ss <- ask
nfo' <- lift $ idann i (tvrInfo tvr)
let i' = mnv xs i ss
let nvr = (tvr { tvrIdent = i', tvrType = t', tvrInfo = nfo'})
case i == i' of
True -> return (nvr,minsert i (Just $ EVar nvr))
False -> return (nvr,minsert i (Just $ EVar nvr) . minsert i' Nothing)
mnv xs i ss
| isInvalidId i || i `member` ss = newId (size ss) isOkay
| otherwise = i
where isOkay i = (i `notMember` ss) && (i `notElem` xs)
| m-alvarez/jhc | src/E/Annotate.hs | mit | 6,286 | 0 | 22 | 2,197 | 2,771 | 1,383 | 1,388 | -1 | -1 |
{-# LANGUAGE CPP, MagicHash #-}
-- | Dynamically lookup up values from modules and loading them.
module DynamicLoading (
#ifdef GHCI
-- * Loading plugins
loadPlugins,
-- * Force loading information
forceLoadModuleInterfaces,
forceLoadNameModuleInterface,
forceLoadTyCon,
-- * Finding names
lookupRdrNameInModuleForPlugins,
-- * Loading values
getValueSafely,
getHValueSafely,
lessUnsafeCoerce
#endif
) where
#ifdef GHCI
import Linker ( linkModule, getHValue )
import SrcLoc ( noSrcSpan )
import Finder ( findImportedModule, cannotFindModule )
import TcRnMonad ( initTcInteractive, initIfaceTcRn )
import LoadIface ( loadPluginInterface )
import RdrName ( RdrName, ImportSpec(..), ImpDeclSpec(..)
, ImpItemSpec(..), mkGlobalRdrEnv, lookupGRE_RdrName
, gre_name, mkRdrQual )
import OccName ( mkVarOcc )
import RnNames ( gresFromAvails )
import DynFlags
import Plugins ( Plugin, CommandLineOption )
import PrelNames ( pluginTyConName )
import HscTypes
import BasicTypes ( HValue )
import TypeRep ( mkTyConTy, pprTyThingCategory )
import Type ( Type, eqType )
import TyCon ( TyCon )
import Name ( Name, nameModule_maybe )
import Id ( idType )
import Module ( Module, ModuleName )
import Panic
import FastString
import ErrUtils
import Outputable
import Exception
import Hooks
import Data.Maybe ( mapMaybe )
import GHC.Exts ( unsafeCoerce# )
loadPlugins :: HscEnv -> IO [(ModuleName, Plugin, [CommandLineOption])]
loadPlugins hsc_env
= do { plugins <- mapM (loadPlugin hsc_env) to_load
; return $ map attachOptions $ to_load `zip` plugins }
where
dflags = hsc_dflags hsc_env
to_load = pluginModNames dflags
attachOptions (mod_nm, plug) = (mod_nm, plug, options)
where
options = [ option | (opt_mod_nm, option) <- pluginModNameOpts dflags
, opt_mod_nm == mod_nm ]
loadPlugin :: HscEnv -> ModuleName -> IO Plugin
loadPlugin hsc_env mod_name
= do { let plugin_rdr_name = mkRdrQual mod_name (mkVarOcc "plugin")
dflags = hsc_dflags hsc_env
; mb_name <- lookupRdrNameInModuleForPlugins hsc_env mod_name
plugin_rdr_name
; case mb_name of {
Nothing ->
throwGhcExceptionIO (CmdLineError $ showSDoc dflags $ hsep
[ ptext (sLit "The module"), ppr mod_name
, ptext (sLit "did not export the plugin name")
, ppr plugin_rdr_name ]) ;
Just name ->
do { plugin_tycon <- forceLoadTyCon hsc_env pluginTyConName
; mb_plugin <- getValueSafely hsc_env name (mkTyConTy plugin_tycon)
; case mb_plugin of
Nothing ->
throwGhcExceptionIO (CmdLineError $ showSDoc dflags $ hsep
[ ptext (sLit "The value"), ppr name
, ptext (sLit "did not have the type")
, ppr pluginTyConName, ptext (sLit "as required")])
Just plugin -> return plugin } } }
-- | Force the interfaces for the given modules to be loaded. The 'SDoc' parameter is used
-- for debugging (@-ddump-if-trace@) only: it is shown as the reason why the module is being loaded.
forceLoadModuleInterfaces :: HscEnv -> SDoc -> [Module] -> IO ()
forceLoadModuleInterfaces hsc_env doc modules
= (initTcInteractive hsc_env $
initIfaceTcRn $
mapM_ (loadPluginInterface doc) modules)
>> return ()
-- | Force the interface for the module containing the name to be loaded. The 'SDoc' parameter is used
-- for debugging (@-ddump-if-trace@) only: it is shown as the reason why the module is being loaded.
forceLoadNameModuleInterface :: HscEnv -> SDoc -> Name -> IO ()
forceLoadNameModuleInterface hsc_env reason name = do
let name_modules = mapMaybe nameModule_maybe [name]
forceLoadModuleInterfaces hsc_env reason name_modules
-- | Load the 'TyCon' associated with the given name, come hell or high water. Fails if:
--
-- * The interface could not be loaded
-- * The name is not that of a 'TyCon'
-- * The name did not exist in the loaded module
forceLoadTyCon :: HscEnv -> Name -> IO TyCon
forceLoadTyCon hsc_env con_name = do
forceLoadNameModuleInterface hsc_env (ptext (sLit "contains a name used in an invocation of loadTyConTy")) con_name
mb_con_thing <- lookupTypeHscEnv hsc_env con_name
case mb_con_thing of
Nothing -> throwCmdLineErrorS dflags $ missingTyThingError con_name
Just (ATyCon tycon) -> return tycon
Just con_thing -> throwCmdLineErrorS dflags $ wrongTyThingError con_name con_thing
where dflags = hsc_dflags hsc_env
-- | Loads the value corresponding to a 'Name' if that value has the given 'Type'. This only provides limited safety
-- in that it is up to the user to ensure that that type corresponds to the type you try to use the return value at!
--
-- If the value found was not of the correct type, returns @Nothing@. Any other condition results in an exception:
--
-- * If we could not load the names module
-- * If the thing being loaded is not a value
-- * If the Name does not exist in the module
-- * If the link failed
getValueSafely :: HscEnv -> Name -> Type -> IO (Maybe a)
getValueSafely hsc_env val_name expected_type = do
mb_hval <- lookupHook getValueSafelyHook getHValueSafely dflags hsc_env val_name expected_type
case mb_hval of
Nothing -> return Nothing
Just hval -> do
value <- lessUnsafeCoerce dflags "getValueSafely" hval
return (Just value)
where
dflags = hsc_dflags hsc_env
getHValueSafely :: HscEnv -> Name -> Type -> IO (Maybe HValue)
getHValueSafely hsc_env val_name expected_type = do
forceLoadNameModuleInterface hsc_env (ptext (sLit "contains a name used in an invocation of getHValueSafely")) val_name
-- Now look up the names for the value and type constructor in the type environment
mb_val_thing <- lookupTypeHscEnv hsc_env val_name
case mb_val_thing of
Nothing -> throwCmdLineErrorS dflags $ missingTyThingError val_name
Just (AnId id) -> do
-- Check the value type in the interface against the type recovered from the type constructor
-- before finally casting the value to the type we assume corresponds to that constructor
if expected_type `eqType` idType id
then do
-- Link in the module that contains the value, if it has such a module
case nameModule_maybe val_name of
Just mod -> do linkModule hsc_env mod
return ()
Nothing -> return ()
-- Find the value that we just linked in and cast it given that we have proved it's type
hval <- getHValue hsc_env val_name
return (Just hval)
else return Nothing
Just val_thing -> throwCmdLineErrorS dflags $ wrongTyThingError val_name val_thing
where dflags = hsc_dflags hsc_env
-- | Coerce a value as usual, but:
--
-- 1) Evaluate it immediately to get a segfault early if the coercion was wrong
--
-- 2) Wrap it in some debug messages at verbosity 3 or higher so we can see what happened
-- if it /does/ segfault
lessUnsafeCoerce :: DynFlags -> String -> a -> IO b
lessUnsafeCoerce dflags context what = do
debugTraceMsg dflags 3 $ (ptext $ sLit "Coercing a value in") <+> (text context) <> (ptext $ sLit "...")
output <- evaluate (unsafeCoerce# what)
debugTraceMsg dflags 3 $ ptext $ sLit "Successfully evaluated coercion"
return output
-- | Finds the 'Name' corresponding to the given 'RdrName' in the
-- context of the 'ModuleName'. Returns @Nothing@ if no such 'Name'
-- could be found. Any other condition results in an exception:
--
-- * If the module could not be found
-- * If we could not determine the imports of the module
--
-- Can only be used for looking up names while loading plugins (and is
-- *not* suitable for use within plugins). The interface file is
-- loaded very partially: just enough that it can be used, without its
-- rules and instances affecting (and being linked from!) the module
-- being compiled. This was introduced by 57d6798.
--
-- See Note [Care with plugin imports] in LoadIface.
lookupRdrNameInModuleForPlugins :: HscEnv -> ModuleName -> RdrName -> IO (Maybe Name)
lookupRdrNameInModuleForPlugins hsc_env mod_name rdr_name = do
-- First find the package the module resides in by searching exposed packages and home modules
found_module <- findImportedModule hsc_env mod_name Nothing
case found_module of
Found _ mod -> do
-- Find the exports of the module
(_, mb_iface) <- initTcInteractive hsc_env $
initIfaceTcRn $
loadPluginInterface doc mod
case mb_iface of
Just iface -> do
-- Try and find the required name in the exports
let decl_spec = ImpDeclSpec { is_mod = mod_name, is_as = mod_name
, is_qual = False, is_dloc = noSrcSpan }
imp_spec = ImpSpec decl_spec ImpAll
env = mkGlobalRdrEnv (gresFromAvails (Just imp_spec) (mi_exports iface))
case lookupGRE_RdrName rdr_name env of
[gre] -> return (Just (gre_name gre))
[] -> return Nothing
_ -> panic "lookupRdrNameInModule"
Nothing -> throwCmdLineErrorS dflags $ hsep [ptext (sLit "Could not determine the exports of the module"), ppr mod_name]
err -> throwCmdLineErrorS dflags $ cannotFindModule dflags mod_name err
where
dflags = hsc_dflags hsc_env
doc = ptext (sLit "contains a name used in an invocation of lookupRdrNameInModule")
wrongTyThingError :: Name -> TyThing -> SDoc
wrongTyThingError name got_thing = hsep [ptext (sLit "The name"), ppr name, ptext (sLit "is not that of a value but rather a"), pprTyThingCategory got_thing]
missingTyThingError :: Name -> SDoc
missingTyThingError name = hsep [ptext (sLit "The name"), ppr name, ptext (sLit "is not in the type environment: are you sure it exists?")]
throwCmdLineErrorS :: DynFlags -> SDoc -> IO a
throwCmdLineErrorS dflags = throwCmdLineError . showSDoc dflags
throwCmdLineError :: String -> IO a
throwCmdLineError = throwGhcExceptionIO . CmdLineError
#endif
| urbanslug/ghc | compiler/main/DynamicLoading.hs | bsd-3-clause | 10,751 | 1 | 24 | 2,945 | 1,980 | 1,028 | 952 | 2 | 0 |
{-# LANGUAGE PolyKinds , GADTs, ScopedTypeVariables, PatternSynonyms,
ViewPatterns #-}
module T12968 where
data TypeRep (a :: k)
data TRAppG (fun :: k2) where
TRAppG :: forall k1 k2 (a :: k1 -> k2) (b :: k1) .
TypeRep a -> TypeRep b -> TRAppG (a b)
pattern TRApp :: forall k2 (fun :: k2). ()
=> forall k1 (a :: k1 -> k2) (b :: k1). (fun ~ a b)
=> TypeRep a -> TypeRep b -> TypeRep fun
pattern TRApp a b <- ((undefined :: TypeRep fun -> TRAppG fun) -> TRAppG a b)
| sdiehl/ghc | testsuite/tests/patsyn/should_compile/T12968.hs | bsd-3-clause | 515 | 0 | 12 | 147 | 196 | 112 | 84 | -1 | -1 |
module Distribution.Simple.Test.LibV09
( runTest
-- Test stub
, simpleTestStub
, stubFilePath, stubMain, stubName, stubWriteLog
, writeSimpleTestStub
) where
import Distribution.Compat.CreatePipe ( createPipe )
import Distribution.Compat.Environment ( getEnvironment )
import Distribution.Compat.TempFile ( openTempFile )
import Distribution.ModuleName ( ModuleName )
import qualified Distribution.PackageDescription as PD
import Distribution.Simple.Build.PathsModule ( pkgPathEnvVar )
import Distribution.Simple.BuildPaths ( exeExtension )
import Distribution.Simple.Compiler ( compilerInfo )
import Distribution.Simple.Hpc ( guessWay, markupTest, tixDir, tixFilePath )
import Distribution.Simple.InstallDirs
( fromPathTemplate, initialPathTemplateEnv, PathTemplateVariable(..)
, substPathTemplate , toPathTemplate, PathTemplate )
import qualified Distribution.Simple.LocalBuildInfo as LBI
import Distribution.Simple.Setup
( TestFlags(..), TestShowDetails(..), fromFlag, configCoverage )
import Distribution.Simple.Test.Log
import Distribution.Simple.Utils
( die, notice, rawSystemIOWithEnv, addLibraryPath )
import Distribution.System ( Platform (..) )
import Distribution.TestSuite
import Distribution.Text
import Distribution.Verbosity ( normal )
import Control.Exception ( bracket )
import Control.Monad ( when, unless )
import Data.Maybe ( mapMaybe )
import System.Directory
( createDirectoryIfMissing, doesDirectoryExist, doesFileExist
, getCurrentDirectory, removeDirectoryRecursive, removeFile
, setCurrentDirectory )
import System.Exit ( ExitCode(..), exitWith )
import System.FilePath ( (</>), (<.>) )
import System.IO ( hClose, hGetContents, hPutStr )
runTest :: PD.PackageDescription
-> LBI.LocalBuildInfo
-> TestFlags
-> PD.TestSuite
-> IO TestSuiteLog
runTest pkg_descr lbi flags suite = do
let isCoverageEnabled = fromFlag $ configCoverage $ LBI.configFlags lbi
way = guessWay lbi
pwd <- getCurrentDirectory
existingEnv <- getEnvironment
let cmd = LBI.buildDir lbi </> stubName suite
</> stubName suite <.> exeExtension
-- Check that the test executable exists.
exists <- doesFileExist cmd
unless exists $ die $ "Error: Could not find test program \"" ++ cmd
++ "\". Did you build the package first?"
-- Remove old .tix files if appropriate.
unless (fromFlag $ testKeepTix flags) $ do
let tDir = tixDir distPref way $ PD.testName suite
exists' <- doesDirectoryExist tDir
when exists' $ removeDirectoryRecursive tDir
-- Create directory for HPC files.
createDirectoryIfMissing True $ tixDir distPref way $ PD.testName suite
-- Write summary notices indicating start of test suite
notice verbosity $ summarizeSuiteStart $ PD.testName suite
suiteLog <- bracket openCabalTemp deleteIfExists $ \tempLog -> do
(rIn, wIn) <- createPipe
(rOut, wOut) <- createPipe
-- Prepare standard input for test executable
--appendFile tempInput $ show (tempInput, PD.testName suite)
hPutStr wIn $ show (tempLog, PD.testName suite)
hClose wIn
-- Run test executable
_ <- do let opts = map (testOption pkg_descr lbi suite) $ testOptions flags
dataDirPath = pwd </> PD.dataDir pkg_descr
tixFile = pwd </> tixFilePath distPref way (PD.testName suite)
pkgPathEnv = (pkgPathEnvVar pkg_descr "datadir", dataDirPath)
: existingEnv
shellEnv = [("HPCTIXFILE", tixFile) | isCoverageEnabled]
++ pkgPathEnv
-- Add (DY)LD_LIBRARY_PATH if needed
shellEnv' <- if LBI.withDynExe lbi
then do
let (Platform _ os) = LBI.hostPlatform lbi
clbi = LBI.getComponentLocalBuildInfo
lbi
(LBI.CTestName
(PD.testName suite))
paths <- LBI.depLibraryPaths
True False lbi clbi
return (addLibraryPath os paths shellEnv)
else return shellEnv
rawSystemIOWithEnv verbosity cmd opts Nothing (Just shellEnv')
-- these handles are closed automatically
(Just rIn) (Just wOut) (Just wOut)
-- Generate final log file name
let finalLogName l = testLogDir
</> testSuiteLogPath
(fromFlag $ testHumanLog flags) pkg_descr lbi
(testSuiteName l) (testLogs l)
-- Generate TestSuiteLog from executable exit code and a machine-
-- readable test log
suiteLog <- fmap ((\l -> l { logFile = finalLogName l }) . read)
$ readFile tempLog
-- Write summary notice to log file indicating start of test suite
appendFile (logFile suiteLog) $ summarizeSuiteStart $ PD.testName suite
-- Append contents of temporary log file to the final human-
-- readable log file
logText <- hGetContents rOut
appendFile (logFile suiteLog) logText
-- Write end-of-suite summary notice to log file
appendFile (logFile suiteLog) $ summarizeSuiteFinish suiteLog
-- Show the contents of the human-readable log file on the terminal
-- if there is a failure and/or detailed output is requested
let details = fromFlag $ testShowDetails flags
whenPrinting = when $ (details > Never)
&& (not (suitePassed $ testLogs suiteLog) || details == Always)
&& verbosity >= normal
whenPrinting $ putStr $ unlines $ lines logText
return suiteLog
-- Write summary notice to terminal indicating end of test suite
notice verbosity $ summarizeSuiteFinish suiteLog
when isCoverageEnabled $
markupTest verbosity lbi distPref (display $ PD.package pkg_descr) suite
return suiteLog
where
deleteIfExists file = do
exists <- doesFileExist file
when exists $ removeFile file
testLogDir = distPref </> "test"
openCabalTemp = do
(f, h) <- openTempFile testLogDir $ "cabal-test-" <.> "log"
hClose h >> return f
distPref = fromFlag $ testDistPref flags
verbosity = fromFlag $ testVerbosity flags
-- TODO: This is abusing the notion of a 'PathTemplate'. The result isn't
-- necessarily a path.
testOption :: PD.PackageDescription
-> LBI.LocalBuildInfo
-> PD.TestSuite
-> PathTemplate
-> String
testOption pkg_descr lbi suite template =
fromPathTemplate $ substPathTemplate env template
where
env = initialPathTemplateEnv
(PD.package pkg_descr) (LBI.pkgKey lbi)
(compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) ++
[(TestSuiteNameVar, toPathTemplate $ PD.testName suite)]
-- Test stub ----------
-- | The name of the stub executable associated with a library 'TestSuite'.
stubName :: PD.TestSuite -> FilePath
stubName t = PD.testName t ++ "Stub"
-- | The filename of the source file for the stub executable associated with a
-- library 'TestSuite'.
stubFilePath :: PD.TestSuite -> FilePath
stubFilePath t = stubName t <.> "hs"
-- | Write the source file for a library 'TestSuite' stub executable.
writeSimpleTestStub :: PD.TestSuite -- ^ library 'TestSuite' for which a stub
-- is being created
-> FilePath -- ^ path to directory where stub source
-- should be located
-> IO ()
writeSimpleTestStub t dir = do
createDirectoryIfMissing True dir
let filename = dir </> stubFilePath t
PD.TestSuiteLibV09 _ m = PD.testInterface t
writeFile filename $ simpleTestStub m
-- | Source code for library test suite stub executable
simpleTestStub :: ModuleName -> String
simpleTestStub m = unlines
[ "module Main ( main ) where"
, "import Distribution.Simple.Test.LibV09 ( stubMain )"
, "import " ++ show (disp m) ++ " ( tests )"
, "main :: IO ()"
, "main = stubMain tests"
]
-- | Main function for test stubs. Once, it was written directly into the stub,
-- but minimizing the amount of code actually in the stub maximizes the number
-- of detectable errors when Cabal is compiled.
stubMain :: IO [Test] -> IO ()
stubMain tests = do
(f, n) <- fmap read getContents
dir <- getCurrentDirectory
results <- tests >>= stubRunTests
setCurrentDirectory dir
stubWriteLog f n results
-- | The test runner used in library "TestSuite" stub executables. Runs a list
-- of 'Test's. An executable calling this function is meant to be invoked as
-- the child of a Cabal process during @.\/setup test@. A 'TestSuiteLog',
-- provided by Cabal, is read from the standard input; it supplies the name of
-- the test suite and the location of the machine-readable test suite log file.
-- Human-readable log information is written to the standard output for capture
-- by the calling Cabal process.
stubRunTests :: [Test] -> IO TestLogs
stubRunTests tests = do
logs <- mapM stubRunTests' tests
return $ GroupLogs "Default" logs
where
stubRunTests' (Test t) = do
l <- run t >>= finish
summarizeTest normal Always l
return l
where
finish (Finished result) =
return TestLog
{ testName = name t
, testOptionsReturned = defaultOptions t
, testResult = result
}
finish (Progress _ next) = next >>= finish
stubRunTests' g@(Group {}) = do
logs <- mapM stubRunTests' $ groupTests g
return $ GroupLogs (groupName g) logs
stubRunTests' (ExtraOptions _ t) = stubRunTests' t
maybeDefaultOption opt =
maybe Nothing (\d -> Just (optionName opt, d)) $ optionDefault opt
defaultOptions testInst = mapMaybe maybeDefaultOption $ options testInst
-- | From a test stub, write the 'TestSuiteLog' to temporary file for the calling
-- Cabal process to read.
stubWriteLog :: FilePath -> String -> TestLogs -> IO ()
stubWriteLog f n logs = do
let testLog = TestSuiteLog { testSuiteName = n, testLogs = logs, logFile = f }
writeFile (logFile testLog) $ show testLog
when (suiteError logs) $ exitWith $ ExitFailure 2
when (suiteFailed logs) $ exitWith $ ExitFailure 1
exitWith ExitSuccess
| DavidAlphaFox/ghc | libraries/Cabal/Cabal/Distribution/Simple/Test/LibV09.hs | bsd-3-clause | 10,854 | 0 | 27 | 3,138 | 2,254 | 1,158 | 1,096 | 177 | 4 |
module B4 (myFringe) where
import D4 hiding (sumSquares)
import qualified D4
instance SameOrNot Float
where
isSameOrNot a b = a == b
isNotSame a b = a /= b
myFringe :: (Tree a) -> [a]
myFringe (Leaf x) = [x]
myFringe (Branch left right) = myFringe right
sumSquares ((x : xs)) = (x ^ 2) + (sumSquares xs)
sumSquares [] = 0
| kmate/HaRe | old/testing/renaming/B4_AstOut.hs | bsd-3-clause | 343 | 0 | 8 | 83 | 157 | 85 | 72 | 11 | 1 |
{-# OPTIONS -fglasgow-exts -O -dshow-passes #-}
module Foo where
import GHC.Base
foo :: Int -> Int
foo (I# n#) = bar i i
where i# = n# +# 1#
i = I# i#
bar :: Int -> Int -> Int
{-# INLINE [0] bar #-}
bar _ n = n
{- The trouble here was
*** Simplify:
Result size = 25
Result size = 25
Result size = 25
Result size = 25
Result size = 25
*** Simplify:
Result size = 25
Result size = 25
Result size = 25
Result size = 25
Result size = 25
etc.
The reason was this:
x = n# +# 1#
i = I# x
Being an unboxed value, we were treating the argument context of x
as interesting, and hence inlining x in the arg of I#. But then we just
float it out again, giving an infinite loop.
-}
| ezyang/ghc | testsuite/tests/eyeball/inline2.hs | bsd-3-clause | 810 | 0 | 7 | 291 | 82 | 45 | 37 | -1 | -1 |
module P004Spec where
import qualified P004 as P
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "isPalindrome" $
it "回文数判定" $ do
let t = True
let f = False
let input = [0, 1, 9, 10, 11, 12, 21, 22, 100, 101, 111, 112, 121, 1001, 1010, 2022, 3303, 4444, 4554]
let expected = [t, t, t, f, t, f, f, t, f, t, t, f, t, t, f, f, f, t, t]
map P.isPalindrome input `shouldBe` expected
describe "solveBasic" $
it "N桁の数を掛け合わせてできる最大の回文数" $
map P.solveBasic [1, 2] `shouldBe` [9, 9009]
describe "solve" $
it "N桁の数を掛け合わせてできる最大の回文数" $
map P.solve [1, 2] `shouldBe` [9, 9009]
| yyotti/euler_haskell | test/P004Spec.hs | mit | 747 | 0 | 13 | 179 | 310 | 177 | 133 | 20 | 1 |
#!/usr/bin/env runhaskell
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoImplicitPrelude #-}
-- import Control.Monad
-- import Data.Functor
-- import Data.Maybe
-- import Data.Monoid
import Debug.Trace
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import BasicPrelude hiding (empty)
import Prelude.Unicode
import Turtle
import Network.URI
import qualified Filesystem.Path.CurrentOS as P
f & g = g $ f
parseVCSLine ∷ Text → Either Text (URI,Text)
parseVCSLine l = case T.split (≡'@') l of
[uriStr,branch] → case parseURI (removeTrailingSlash $ T.unpack uriStr) of
Nothing → Left l
Just uri → Right (uri,branch)
_ → Left l
getVCSInfo ∷ Text → Shell [Text]
getVCSInfo pkg = do
vcs ← empty
& inproc "cabal-db" ["vcs", pkg]
& inproc "grep" ["://"]
& inshell "sed -r 's:\\x1B\\[[0-9;]*[mK]::g; s:^ *::'"
return $ lines vcs
stripDotGit x = fromMaybe x $ T.stripSuffix ".git" x
pathFromGitURI ∷ Text → Maybe Text
pathFromGitURI p = r $ reverse $ T.split (≡'/') p
where r [] = Nothing
r [""] = Nothing
r ("":xs) = r xs
r (x:_) = Just $ stripDotGit x
run ∷ Text → IO ExitCode
run x = do
wd ← pwd
echo $ T.pack $ concat["(", P.encodeString wd, ")$", T.unpack x]
shell x empty
removeTrailingSlash x = fromMaybe x $ T.unpack <$> T.stripSuffix "/" (T.pack x)
printVCS ∷ (URI,Text) → IO ()
printVCS (uri,br) = do
(pathFromGitURI $ T.pack $ uriPath uri) & \case
Nothing → return()
Just "zlib" → return()
Just d → do
h ← home
echo $ "cd " <> T.pack(P.encodeString(h <> "warpdeps"))
cd $ h <> "warpdeps"
run $ "git clone " <> stripDotGit(show uri) <> ".git"
ok ← testdir $ fromText d
if not ok then return() else do
cd $ fromText d
run $ "git checkout " <> br
run $ "src do-all -m program"
run $ "src push"
return()
main ∷ IO ()
main = sh $ do
x ← map parseVCSLine <$> getVCSInfo "warp"
forM (lefts x) $ traceM . T.unpack . ("Failed to parse VCS URI line: " <>)
forM (rights x) $ liftIO . printVCS
| sourcegraph/srclib-haskell | process-all-dependencies.hs | mit | 2,199 | 5 | 18 | 530 | 825 | 414 | 411 | 61 | 4 |
import Control.Monad (unless)
import Test.Hspec (Spec, describe, expectationFailure, it, shouldBe)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import House (rhyme)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = describe "rhyme" $ do
-- First we test the input, line by line, to give more
-- useful error messages.
it "matches lines" $ sequence_ lineAssertions
-- Finally, because testing lines we are unable
-- to detect a missing newline at the end of the
-- lyrics, we test the full song.
it "matches full song" $ rhyme `shouldBe` lyrics
where
lineAssertions = zipWith checkLine [1 :: Int ..] $ zipMaybe (lines rhyme) (lines lyrics)
checkLine lineno (got, want) =
unless (got == want) $
expectationFailure $ "mismatch at line " ++ show lineno ++ "\nexpected: " ++ show want ++ "\n but got: " ++ show got
zipMaybe [] [] = []
zipMaybe (x:xs) [] = (Just x , Nothing) : zipMaybe xs []
zipMaybe [] (y:ys) = (Nothing, Just y ) : zipMaybe [] ys
zipMaybe (x:xs) (y:ys) = (Just x , Just y ) : zipMaybe xs ys
-- Lyrics extracted from `exercism/problem-specifications` on 2016-09-23.
lyrics :: String
lyrics = "This is the house that Jack built.\n\
\\n\
\This is the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the cat\n\
\that killed the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the dog\n\
\that worried the cat\n\
\that killed the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the cow with the crumpled horn\n\
\that tossed the dog\n\
\that worried the cat\n\
\that killed the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the maiden all forlorn\n\
\that milked the cow with the crumpled horn\n\
\that tossed the dog\n\
\that worried the cat\n\
\that killed the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the man all tattered and torn\n\
\that kissed the maiden all forlorn\n\
\that milked the cow with the crumpled horn\n\
\that tossed the dog\n\
\that worried the cat\n\
\that killed the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the priest all shaven and shorn\n\
\that married the man all tattered and torn\n\
\that kissed the maiden all forlorn\n\
\that milked the cow with the crumpled horn\n\
\that tossed the dog\n\
\that worried the cat\n\
\that killed the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the rooster that crowed in the morn\n\
\that woke the priest all shaven and shorn\n\
\that married the man all tattered and torn\n\
\that kissed the maiden all forlorn\n\
\that milked the cow with the crumpled horn\n\
\that tossed the dog\n\
\that worried the cat\n\
\that killed the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the farmer sowing his corn\n\
\that kept the rooster that crowed in the morn\n\
\that woke the priest all shaven and shorn\n\
\that married the man all tattered and torn\n\
\that kissed the maiden all forlorn\n\
\that milked the cow with the crumpled horn\n\
\that tossed the dog\n\
\that worried the cat\n\
\that killed the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the horse and the hound and the horn\n\
\that belonged to the farmer sowing his corn\n\
\that kept the rooster that crowed in the morn\n\
\that woke the priest all shaven and shorn\n\
\that married the man all tattered and torn\n\
\that kissed the maiden all forlorn\n\
\that milked the cow with the crumpled horn\n\
\that tossed the dog\n\
\that worried the cat\n\
\that killed the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n"
-- 473a8c3f65f5e8aba509bad8d3632a10ee4927fe
| exercism/xhaskell | exercises/practice/house/test/Tests.hs | mit | 4,864 | 0 | 14 | 1,616 | 392 | 209 | 183 | 20 | 4 |
-- CamelCase Method
-- https://www.codewars.com/kata/587731fda577b3d1b0001196
module CamelCase.JorgeVS.Kata where
import Data.Char (toUpper)
camelCase :: String -> String
camelCase = concatMap (\(x:xs) -> toUpper x:xs) . words
| gafiatulin/codewars | src/6 kyu/CamelCase.hs | mit | 230 | 0 | 10 | 28 | 62 | 36 | 26 | 4 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module InTheKnow.Routes.Common.Templates (
base
) where
import Prelude hiding (head, div)
import Text.Blaze.Html5 hiding (base)
import qualified Text.Blaze.Html5.Attributes as A
import Data.Text (Text)
import Data.Monoid ((<>))
base :: Text -> Html -> Html
base t content =
docTypeHtml $ do
head $ do
title (toHtml $ t <> " | InTheKnow")
body $ do
div ! A.class_ "content" $ content
div ! A.class_ "content2" $ content
| jb55/intheknow | InTheKnow/Routes/Common/Templates.hs | mit | 488 | 0 | 14 | 104 | 160 | 90 | 70 | 16 | 1 |
{-# LANGUAGE RecordWildCards #-}
{- |
Generate and solve friction constraints for colliding objects.
-}
module Physics.Constraints.Contact.Friction where
import Control.Lens
import Physics.Constraint
import Physics.Constraints.SolutionProcessors
import Physics.Constraints.Types
import Physics.Contact.Types
import Physics.Linear
import Utils.Utils
constraintGen :: Flipping Contact
-> (PhysicalObj, PhysicalObj)
-> Constraint
constraintGen fContact ab =
flipExtract $ flipMap toConstraint fContact ab
{-# INLINE constraintGen #-}
toConstraint :: Contact
-> (PhysicalObj, PhysicalObj)
-> Constraint
toConstraint c ab = Constraint (jacobian c ab) 0
{-# INLINE toConstraint #-}
jacobian :: Contact
-> (PhysicalObj, PhysicalObj)
-> V6
jacobian Contact {..} (a, b) = ja `join3v3` jb
where
ja = ta `append2` ((p' `minusV2` xa) `crossV2` ta)
jb = tb `append2` ((p' `minusV2` xb) `crossV2` tb)
xa = _physObjPos a
xb = _physObjPos b
(P2 p') = _contactCenter
ta = negateV2 tb
tb = clockwiseV2 n
n = _contactNormal
{-# INLINE jacobian #-}
pairMu :: (Double, Double) -> Double
pairMu (ua, ub) = (ua + ub) / 2
{-# INLINE pairMu #-}
solutionProcessor :: (Double, Double)
-> Lagrangian
-> Lagrangian
-> Lagrangian
-> Processed Lagrangian
solutionProcessor ab nonpen = clampAbs (nonpen & lagrangianVal *~ pairMu ab)
{-# INLINE solutionProcessor #-}
| ublubu/shapes | shapes/src/Physics/Constraints/Contact/Friction.hs | mit | 1,593 | 0 | 11 | 445 | 391 | 224 | 167 | 43 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
module Database.Persist.Sql.Raw where
import Database.Persist
import Database.Persist.Sql.Types
import Database.Persist.Sql.Class
import qualified Data.Map as Map
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Reader (ReaderT, ask, MonadReader)
import Control.Monad.Trans.Resource (release)
import Data.Acquire (allocateAcquire, Acquire, mkAcquire, with)
import Data.IORef (writeIORef, readIORef, newIORef)
import Control.Exception (throwIO)
import Control.Monad (when, liftM)
import Data.Text (Text, pack)
import Control.Monad.Logger (logDebugS, runLoggingT)
import Data.Int (Int64)
import qualified Data.Text as T
import Data.Conduit
import Control.Monad.Trans.Resource (MonadResource)
rawQuery :: (MonadResource m, MonadReader env m, HasPersistBackend env SqlBackend)
=> Text
-> [PersistValue]
-> Source m [PersistValue]
rawQuery sql vals = do
srcRes <- liftPersist $ rawQueryRes sql vals
(releaseKey, src) <- allocateAcquire srcRes
src
release releaseKey
rawQueryRes
:: (MonadIO m1, MonadIO m2)
=> Text
-> [PersistValue]
-> ReaderT SqlBackend m1 (Acquire (Source m2 [PersistValue]))
rawQueryRes sql vals = do
conn <- ask
let make = do
runLoggingT ($logDebugS (pack "SQL") $ pack $ show sql ++ " " ++ show vals)
(connLogFunc conn)
getStmtConn conn sql
return $ do
stmt <- mkAcquire make stmtReset
stmtQuery stmt vals
rawExecute :: MonadIO m => Text -> [PersistValue] -> ReaderT SqlBackend m ()
rawExecute x y = liftM (const ()) $ rawExecuteCount x y
rawExecuteCount :: MonadIO m => Text -> [PersistValue] -> ReaderT SqlBackend m Int64
rawExecuteCount sql vals = do
conn <- ask
runLoggingT ($logDebugS (pack "SQL") $ pack $ show sql ++ " " ++ show vals)
(connLogFunc conn)
stmt <- getStmt sql
res <- liftIO $ stmtExecute stmt vals
liftIO $ stmtReset stmt
return res
getStmt :: MonadIO m => Text -> ReaderT SqlBackend m Statement
getStmt sql = do
conn <- ask
liftIO $ getStmtConn conn sql
getStmtConn :: SqlBackend -> Text -> IO Statement
getStmtConn conn sql = do
smap <- liftIO $ readIORef $ connStmtMap conn
case Map.lookup sql smap of
Just stmt -> return stmt
Nothing -> do
stmt' <- liftIO $ connPrepare conn sql
iactive <- liftIO $ newIORef True
let stmt = Statement
{ stmtFinalize = do
active <- readIORef iactive
if active
then do
stmtFinalize stmt'
writeIORef iactive False
else return ()
, stmtReset = do
active <- readIORef iactive
when active $ stmtReset stmt'
, stmtExecute = \x -> do
active <- readIORef iactive
if active
then stmtExecute stmt' x
else throwIO $ StatementAlreadyFinalized sql
, stmtQuery = \x -> do
active <- liftIO $ readIORef iactive
if active
then stmtQuery stmt' x
else liftIO $ throwIO $ StatementAlreadyFinalized sql
}
liftIO $ writeIORef (connStmtMap conn) $ Map.insert sql stmt smap
return stmt
-- | Execute a raw SQL statement and return its results as a
-- list.
--
-- If you're using 'Entity'@s@ (which is quite likely), then you
-- /must/ use entity selection placeholders (double question
-- mark, @??@). These @??@ placeholders are then replaced for
-- the names of the columns that we need for your entities.
-- You'll receive an error if you don't use the placeholders.
-- Please see the 'Entity'@s@ documentation for more details.
--
-- You may put value placeholders (question marks, @?@) in your
-- SQL query. These placeholders are then replaced by the values
-- you pass on the second parameter, already correctly escaped.
-- You may want to use 'toPersistValue' to help you constructing
-- the placeholder values.
--
-- Since you're giving a raw SQL statement, you don't get any
-- guarantees regarding safety. If 'rawSql' is not able to parse
-- the results of your query back, then an exception is raised.
-- However, most common problems are mitigated by using the
-- entity selection placeholder @??@, and you shouldn't see any
-- error at all if you're not using 'Single'.
rawSql :: (RawSql a, MonadIO m)
=> Text -- ^ SQL statement, possibly with placeholders.
-> [PersistValue] -- ^ Values to fill the placeholders.
-> ReaderT SqlBackend m [a]
rawSql stmt = run
where
getType :: (x -> m [a]) -> a
getType = error "rawSql.getType"
x = getType run
process = rawSqlProcessRow
withStmt' colSubsts params sink = do
srcRes <- rawQueryRes sql params
liftIO $ with srcRes ($$ sink)
where
sql = T.concat $ makeSubsts colSubsts $ T.splitOn placeholder stmt
placeholder = "??"
makeSubsts (s:ss) (t:ts) = t : s : makeSubsts ss ts
makeSubsts [] [] = []
makeSubsts [] ts = [T.intercalate placeholder ts]
makeSubsts ss [] = error (concat err)
where
err = [ "rawsql: there are still ", show (length ss)
, "'??' placeholder substitutions to be made "
, "but all '??' placeholders have already been "
, "consumed. Please read 'rawSql's documentation "
, "on how '??' placeholders work."
]
run params = do
conn <- ask
let (colCount, colSubsts) = rawSqlCols (connEscapeName conn) x
withStmt' colSubsts params $ firstRow colCount
firstRow colCount = do
mrow <- await
case mrow of
Nothing -> return []
Just row
| colCount == length row -> getter mrow
| otherwise -> fail $ concat
[ "rawSql: wrong number of columns, got "
, show (length row), " but expected ", show colCount
, " (", rawSqlColCountReason x, ")." ]
getter = go id
where
go acc Nothing = return (acc [])
go acc (Just row) =
case process row of
Left err -> fail (T.unpack err)
Right r -> await >>= go (acc . (r:))
| junjihashimoto/persistent | persistent/Database/Persist/Sql/Raw.hs | mit | 6,836 | 0 | 22 | 2,279 | 1,628 | 828 | 800 | 133 | 7 |
{-
**************************************************************
* Filename : RegTypes.hs *
* Author : Markus Forsberg *
* [email protected] *
* Last Modified : 5 July, 2001 *
* Lines : 219 *
**************************************************************
-}
module FST.RegTypes ( Reg(..), -- data type for the regular expression
Combinators, -- Type class for Combinators.
(<|>), -- Union combinator
(|>), -- Concatenation combinator
(<&>), -- Intersection combinator
(<->), -- Minus combinator
s, -- Symbol
eps, -- Epsilon
empty, -- Empty
complement, -- Complement
star, -- Star
plus, -- Plus
allS, -- All Symbol
allToSymbols, -- transform the 'all' symbol to union over
-- alphabet.
allFree, -- free a regular expression from 'all'
-- symbols.
reversal, -- reverse a regular expression.
acceptEps, -- Does the regular expression accept epsilon?
Symbols, -- Type class for Symbols.
symbols -- Collect the symbols in a
-- regular expression.
) where
import Data.List (nub)
{- **********************************************************
* Data type for a regular expression. *
**********************************************************
-}
data Reg a = Empty | -- []
Epsilon | -- 0
All | -- ?
Symbol a | -- a
Reg a :|: Reg a | -- [ r1 | r2 ]
Reg a :.: Reg a | -- [ r1 r2 ]
Reg a :&: Reg a | -- [ r1 & r2 ]
Complement (Reg a) | -- ~[ r1 ]
Star (Reg a) -- [ r2 ]*
deriving (Eq)
{- **********************************************************
* Combinators. *
* The regular expressions are simplified while combined. *
**********************************************************
-}
infixl 5 |> -- Concatenation
infixl 4 <|> -- Union
infixl 3 <&> -- Intersection
infixl 3 <-> -- Set minus
class Combinators a where
(<|>) :: a -> a -> a -- Union
(|>) :: a -> a -> a -- Concatenation
star :: a -> a -- Kleene's star
plus :: a -> a -- Kleene's plus
empty :: a
instance Eq a => Combinators (Reg a) where
Empty <|> b = b -- [ [] | r1 ] = r1
a <|> Empty = a -- [ r1 | [] ] = r1
_ <|> (Star All) = Star All
(Star All) <|> _ = Star All
a1@(a :.: b) <|> a2@(c :.: d)
| a1 == a2 = a1
| a == c = a |> (b <|> d)
| b == d = (a <|> c) |> b
| otherwise = a1 :|: a2
a <|> b
| a == b = a -- [ r1 | r1 ] = r1
| otherwise = a :|: b
Empty |> _ = empty -- [ [] r1 ] = []
_ |> Empty = empty -- [ r1 [] ] = []
Epsilon |> b = b -- [ 0 r1 ] = r1
a |> Epsilon = a -- [ r1 0 ] = r1
a |> b = a :.: b
star (Star a) = star a -- [r1]** = [r1]*
star (Epsilon) = eps -- [0]* = 0
star (Empty) = eps -- [ [] ]* = 0
star a = Star a
plus a = a |> star a
empty = Empty
{- Intersection -}
(<&>) :: Eq a => Reg a -> Reg a -> Reg a
_ <&> Empty = Empty -- [ r1 & [] ] = []
Empty <&> _ = Empty -- [ [] & r1 ] = []
(Star All) <&> a = a
a <&> (Star All) = a
a <&> b
| a == b = a -- [ r1 & r1 ] = r1
| otherwise = a :&: b
{- Minus. Definition A - B = A & ~B -}
(<->) :: Eq a => Reg a -> Reg a -> Reg a
Empty <-> _ = empty -- [ [] - r1 ] = []
a <-> Empty = a -- [ r1 - [] ] = r1
a <-> b
| a == b = empty -- [ r1 - r1 ] = []
| otherwise = a <&> (complement b)
s :: a -> Reg a
s a = Symbol a
eps :: Reg a
eps = Epsilon
allS :: Reg a
allS = All
complement :: Eq a => Reg a -> Reg a
complement Empty = star allS -- ~[ [] ] = ?*
complement Epsilon = plus allS -- ~[ 0 ] = [? ?*]
complement (Star All) = empty
complement (Complement a) = a
complement a = Complement a
{- *******************************************************************
* allToSymbols: ? -> [a|..] with respect to an alphabet [a] *
* allFreeReg: Construct a ?-free regular expression with respect *
* to an alphabet [a] *
*******************************************************************
-}
allToSymbols :: Eq a => [a] -> Reg a
allToSymbols sigma = case sigma of
[] -> empty
ys -> foldr1 (:|:) [s a| a <- ys]
allFree :: Eq a => Reg a -> [a] -> Reg a
allFree (a :|: b) sigma = (allFree a sigma) :|: (allFree b sigma)
allFree (a :.: b) sigma = (allFree a sigma) :.: (allFree b sigma)
allFree (a :&: b) sigma = (allFree a sigma) :&: (allFree b sigma)
allFree (Complement a) sigma = Complement (allFree a sigma)
allFree (Star a) sigma = Star (allFree a sigma)
allFree (All) sigma = allToSymbols sigma
allFree r _ = r
{- **********************************************************
* reversal: reverse the language denoted by the regular *
* expression. *
**********************************************************
-}
reversal :: Eq a => Reg a -> Reg a
reversal (a :|: b) = (reversal a) :|: (reversal b)
reversal (a :.: b) = (reversal b) :.: (reversal a)
reversal (a :&: b) = (reversal a) :&: (reversal b)
reversal (Complement a) = Complement (reversal a)
reversal (Star a) = Star (reversal a)
reversal r = r
{- ***********************************************************
* acceptEps: Examines if a regular expression accepts *
* the empty string. *
***********************************************************
-}
acceptEps :: Eq a => Reg a -> Bool
acceptEps (Epsilon) = True
acceptEps (Star _) = True
acceptEps (a :|: b) = acceptEps a || acceptEps b
acceptEps (a :.: b) = acceptEps a && acceptEps b
acceptEps (a :&: b) = acceptEps a && acceptEps b
acceptEps (Complement a) = not (acceptEps a)
acceptEps _ = False
{- **********************************************************
* Symbols: type class for the collection of symbols in a *
* expression. *
**********************************************************
-}
class Symbols f where
symbols :: Eq a => f a -> [a]
instance Symbols Reg where
symbols (Symbol a) = [a]
symbols (a :.: b) = nub $ (symbols a) ++ (symbols b)
symbols (a :|: b) = nub $ (symbols a) ++ (symbols b)
symbols (a :&: b) = nub $ (symbols a) ++ (symbols b)
symbols (Complement a) = symbols a
symbols (Star a) = symbols a
symbols _ = []
{- **********************************************************
* Instance of Show (Reg a) *
**********************************************************
-}
instance Show a => Show (Reg a) where
show (Empty) = "[0 - 0]"
show (Epsilon) = "0"
show (Symbol a) = show a
show (All) = "?"
show (Complement a) = "~" ++ "[" ++ show a ++ "]"
show (Star a) = "[" ++ show a ++ "]* "
show (a :|: b) = "[" ++ show a ++ " | " ++ show b ++ "]"
show (a :.: b) = "[" ++ show a ++ " " ++ show b ++ "]"
show (a :&: b) = "[" ++ show a ++ " & " ++ show b ++ "]"
| SAdams601/ParRegexSearch | test/fst-0.9.0.1/FST/RegTypes.hs | mit | 7,853 | 54 | 11 | 2,888 | 2,120 | 1,097 | 1,023 | 137 | 2 |
-- Tube strike options calculator
-- http://www.codewars.com/kata/568ade64cfd7a55d9300003e/
module Codewars.Kata.Tube where
import Codewars.Kata.Tube.Types
calculator :: Double -> Double -> Double -> Decision
calculator distance busDrive busWalk | 60 * (distance / 5) < 10 = Walk
| 60 * (distance / 5) > 120 = Bus
| (distance / 5) <= (busWalk / 5) + (busDrive / 8) = Walk
| otherwise = Bus
| gafiatulin/codewars | src/7 kyu/Tube.hs | mit | 504 | 0 | 11 | 177 | 137 | 73 | 64 | 7 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
module RLPTest where
import Control.Monad (sequence)
import Data.Aeson
import Data.Aeson.Types (typeMismatch)
import Data.ByteString
import qualified Data.ByteString.Lazy as BL
import qualified Data.Map.Strict as M
import Data.Maybe (fromJust)
import Data.Semigroup ((<>))
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Vector as V
import Development.IncludeFile
import qualified Data.RLP as RLP
data RLPTestInput = StringInput ByteString
| NumberInput Integer
| ListInput [RLPTestInput]
deriving (Read, Show)
-- The test JSON takes advantage of the fact that you can mix and match types in JSON arrays
-- so naturally, that doesn't play well with haskell, and we can't *REALLY* make FromJSON and ToJSON
-- maintain identity. So we cheat :P. Our biggest issue is that RLP doesn't have an "Integer" type
-- it's effectively stored as a big-endian String. So we need to really handle the case of
-- StringInput == NumberInput and vice versa
instance Eq RLPTestInput where
(StringInput s1) == (StringInput s2) = s1 == s2
(NumberInput n1) == (NumberInput n2) = n1 == n2
(ListInput l1) == (ListInput l2) = l1 == l2
StringInput{} == ListInput{} = False -- impossible
(StringInput s) == (NumberInput n) = RLP.unpackBE (unpack s) == n -- todo this case
NumberInput{} == ListInput{} = False -- also impossible
n@NumberInput{} == s@StringInput{} = s == n -- take advantage of the commutative case
o1 == o2 = False
instance RLP.RLPEncodable RLPTestInput where
rlpEncode (StringInput s) = RLP.String s
rlpEncode (NumberInput n) = RLP.rlpEncode n
rlpEncode (ListInput xs) = RLP.Array $ RLP.rlpEncode <$> xs
rlpDecode (RLP.String s) = Right (StringInput s) -- todo this totes wont work for NumInputs
rlpDecode (RLP.Array xs) = ListInput <$> sequence (RLP.rlpDecode <$> xs)
data RLPTest = RLPTest { input :: RLPTestInput, output :: T.Text }
deriving (Eq, Read, Show)
instance FromJSON RLPTestInput where
parseJSON (String s) | T.null s = return (StringInput "")
| otherwise = case T.head s of
'#' -> return . NumberInput . read . T.unpack $ T.tail s
_ -> return . StringInput $ TE.encodeUtf8 s
parseJSON (Number n) = return . NumberInput $ round n
parseJSON (Array a) = ListInput . V.toList <$> V.forM a parseJSON
parseJSON x = typeMismatch "RLPTestInput" x
instance ToJSON RLPTestInput where
toJSON (StringInput s) = String $ TE.decodeUtf8 s
toJSON (NumberInput n) = Number $ fromIntegral n
toJSON (ListInput xs) = toJSON xs
instance FromJSON RLPTest where
parseJSON (Object o) = RLPTest <$> (o .: "in") <*> (o .: "out")
parseJSON x = typeMismatch "RLPTest" x
instance ToJSON RLPTest where
toJSON RLPTest{..} = object [ "in" .= input, "out" .= output ]
toEncoding RLPTest{..} = pairs ( "in" .= input <> "out" .= output )
$(includeFileInSource "test/resources/rlptest.json" "officialRLPTests'")
officialRLPTests :: Either String [(T.Text, RLPTest)]
officialRLPTests = M.toList <$> eitherDecode (BL.fromStrict officialRLPTests')
| iostat/relapse | test/RLPTest.hs | mit | 3,532 | 0 | 13 | 932 | 962 | 504 | 458 | 59 | 1 |
{-# LANGUAGE FlexibleInstances, TypeSynonymInstances,
MultiParamTypeClasses, DeriveDataTypeable, OverloadedStrings #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.Pane.Trace
-- Copyright : 2007-2011 Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GPL
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module IDE.Pane.Trace (
IDETrace
, TraceState
, showTrace
, fillTraceList
) where
import Data.Typeable (Typeable(..))
import IDE.Core.State
import IDE.Package (tryDebug)
import IDE.Debug
(debugForward, debugBack, debugCommand')
import IDE.Utils.Tool (ToolOutput(..))
import IDE.LogRef (srcSpanParser)
import Text.ParserCombinators.Parsec
(anyChar,
skipMany,
(<|>),
optional,
eof,
try,
parse,
(<?>),
noneOf,
many,
CharParser)
import qualified Text.ParserCombinators.Parsec.Token as P
(integer, whiteSpace, colon, symbol, makeTokenParser)
import Text.ParserCombinators.Parsec.Language (emptyDef)
import System.Log.Logger (debugM)
import IDE.Workspaces (packageTry)
import qualified Data.Conduit.List as CL (consume)
import Control.Applicative ((<$>))
import Control.Monad.Trans.Class (MonadTrans(..))
import Control.Monad.IO.Class (MonadIO(..))
import IDE.Utils.GUIUtils (treeViewContextMenu, __)
import Data.Text (Text)
import Data.Monoid ((<>))
import qualified Data.Text as T (pack, unpack)
import qualified Text.Printf as S (printf)
import Text.Printf (PrintfType)
import GI.Gtk.Objects.ScrolledWindow
(scrolledWindowSetPolicy, scrolledWindowSetShadowType,
scrolledWindowNew, ScrolledWindow(..))
import GI.Gtk.Objects.TreeView
(treeViewGetSelection, treeViewSetHeadersVisible,
treeViewAppendColumn, treeViewSetModel, treeViewNew, TreeView(..))
import Data.GI.Gtk.ModelView.ForestStore
(forestStoreGetValue, ForestStore(..),
forestStoreInsert, forestStoreClear, forestStoreNew)
import GI.Gtk.Objects.Widget (afterWidgetFocusInEvent, toWidget)
import GI.Gtk.Objects.Notebook (Notebook(..))
import GI.Gtk.Objects.Window (Window(..))
import GI.Gtk.Objects.CellRendererToggle
(setCellRendererToggleActive, cellRendererToggleNew)
import GI.Gtk.Objects.TreeViewColumn
(treeViewColumnSetReorderable, treeViewColumnSetResizable,
treeViewColumnSetSizing, treeViewColumnSetTitle, treeViewColumnNew)
import GI.Gtk.Enums
(PolicyType(..), ShadowType(..), SelectionMode(..),
TreeViewColumnSizing(..))
import GI.Gtk.Interfaces.CellLayout (cellLayoutPackStart)
import Data.GI.Gtk.ModelView.CellLayout
(cellLayoutSetDataFunction)
import GI.Gtk.Objects.CellRendererText
(setCellRendererTextText, cellRendererTextNew)
import GI.Gtk.Objects.TreeSelection
(onTreeSelectionChanged, treeSelectionSetMode)
import GI.Gtk.Objects.Adjustment (noAdjustment)
import GI.Gtk.Objects.Container (containerAdd)
import GI.Gtk.Objects.Menu (Menu(..))
import GI.Gtk.Objects.MenuItem
(toMenuItem, onMenuItemActivate, menuItemNewWithLabel)
import GI.Gtk.Objects.SeparatorMenuItem (separatorMenuItemNew)
import GI.Gtk.Objects.MenuShell (menuShellAppend)
import Control.Monad.Reader (MonadReader(..))
import Data.GI.Gtk.ModelView.Types
(treeSelectionGetSelectedRows', treePathNewFromIndices')
printf :: PrintfType r => Text -> r
printf = S.printf . T.unpack
-- | A debugger pane description
--
data IDETrace = IDETrace {
scrolledView :: ScrolledWindow
, treeView :: TreeView
, tracepoints :: ForestStore TraceHist
} deriving Typeable
data TraceState = TraceState {
} deriving(Eq,Ord,Read,Show,Typeable)
data TraceHist = TraceHist {
thSelected :: Bool,
thIndex :: Int,
thFunction :: Text,
thPosition :: SrcSpan
}
instance Pane IDETrace IDEM
where
primPaneName _ = __ "Trace"
getAddedIndex _ = 0
getTopWidget = liftIO . toWidget . scrolledView
paneId b = "*Trace"
instance RecoverablePane IDETrace TraceState IDEM where
saveState p = return (Just TraceState)
recoverState pp TraceState = do
nb <- getNotebook pp
buildPane pp nb builder
builder = builder'
getTrace :: IDEM IDETrace
getTrace = forceGetPane (Right "*Trace")
showTrace :: IDEAction
showTrace = do
pane <- getTrace
displayPane pane False
builder' :: PanePath ->
Notebook ->
Window ->
IDEM (Maybe IDETrace,Connections)
builder' pp nb windows = do
ideR <- ask
tracepoints <- forestStoreNew []
treeView <- treeViewNew
treeViewSetModel treeView (Just tracepoints)
renderer0 <- cellRendererToggleNew
col0 <- treeViewColumnNew
treeViewColumnSetTitle col0 ""
treeViewColumnSetSizing col0 TreeViewColumnSizingAutosize
treeViewColumnSetResizable col0 False
treeViewColumnSetReorderable col0 True
treeViewAppendColumn treeView col0
cellLayoutPackStart col0 renderer0 False
cellLayoutSetDataFunction col0 renderer0 tracepoints
$ setCellRendererToggleActive renderer0 . thSelected
renderer1 <- cellRendererTextNew
col1 <- treeViewColumnNew
treeViewColumnSetTitle col1 (__ "Index")
treeViewColumnSetSizing col1 TreeViewColumnSizingAutosize
treeViewColumnSetResizable col1 True
treeViewColumnSetReorderable col1 True
treeViewAppendColumn treeView col1
cellLayoutPackStart col1 renderer1 False
cellLayoutSetDataFunction col1 renderer1 tracepoints
$ setCellRendererTextText renderer1 . T.pack . show . thIndex
renderer2 <- cellRendererTextNew
col2 <- treeViewColumnNew
treeViewColumnSetTitle col2 (__ "Function")
treeViewColumnSetSizing col2 TreeViewColumnSizingAutosize
treeViewColumnSetResizable col2 True
treeViewColumnSetReorderable col2 True
treeViewAppendColumn treeView col2
cellLayoutPackStart col2 renderer2 False
cellLayoutSetDataFunction col2 renderer2 tracepoints
$ setCellRendererTextText renderer2 . thFunction
renderer3 <- cellRendererTextNew
col3 <- treeViewColumnNew
treeViewColumnSetTitle col3 (__ "Position")
treeViewColumnSetSizing col3 TreeViewColumnSizingAutosize
treeViewColumnSetResizable col3 True
treeViewColumnSetReorderable col3 True
treeViewAppendColumn treeView col3
cellLayoutPackStart col3 renderer3 False
cellLayoutSetDataFunction col3 renderer3 tracepoints
$ setCellRendererTextText renderer3 . T.pack . displaySrcSpan . thPosition
treeViewSetHeadersVisible treeView True
sel <- treeViewGetSelection treeView
treeSelectionSetMode sel SelectionModeSingle
scrolledView <- scrolledWindowNew noAdjustment noAdjustment
scrolledWindowSetShadowType scrolledView ShadowTypeIn
containerAdd scrolledView treeView
scrolledWindowSetPolicy scrolledView PolicyTypeAutomatic PolicyTypeAutomatic
let pane = IDETrace scrolledView treeView tracepoints
cid1 <- onIDE afterWidgetFocusInEvent treeView (do
liftIDE $ makeActive pane
return True)
cids2 <- treeViewContextMenu treeView $ traceContextMenu ideR tracepoints treeView
onTreeSelectionChanged sel $ do
sel <- getSelectedTracepoint treeView tracepoints
case sel of
Just ref -> return () -- TODO reflectIDE (selectRef (Just ref)) ideR
Nothing -> return ()
return (Just pane, cid1 : cids2)
fillTraceList :: IDEAction
fillTraceList = packageTry $ do
currentHist' <- readIDE currentHist
mbTraces <- liftIDE getPane
case mbTraces of
Nothing -> return ()
Just tracePane -> tryDebug $ debugCommand' ":history" $ do
to <- CL.consume
lift $ postAsyncIDE $ do
let parseRes = parse tracesParser "" . T.unpack $ selectString to
r <- case parseRes of
Left err -> do
liftIO $ debugM "leksah" (printf (__ "trace parse error %s\ninput: %s") (show err)
(T.unpack $ selectString to))
return []
Right traces -> return traces
forestStoreClear (tracepoints tracePane)
let r' = map (\h@(TraceHist _ i _ _) -> if i == currentHist'
then h{thSelected = True}
else h) r
mapM_ (insertTrace (tracepoints tracePane))
(zip r' [0..length r'])
where
insertTrace forestStore (tr,index) = do
emptyPath <- treePathNewFromIndices' []
forestStoreInsert forestStore emptyPath index tr
selectString :: [ToolOutput] -> Text
selectString (ToolOutput str:r) = "\n" <> str <> selectString r
selectString (_:r) = selectString r
selectString [] = ""
getSelectedTracepoint :: TreeView
-> ForestStore TraceHist
-> IO (Maybe TraceHist)
getSelectedTracepoint treeView forestStore = do
treeSelection <- treeViewGetSelection treeView
paths <- treeSelectionGetSelectedRows' treeSelection
case paths of
a:r -> do
val <- forestStoreGetValue forestStore a
return (Just val)
_ -> return Nothing
selectStrings :: [ToolOutput] -> [Text]
selectStrings (ToolOutput str:r) = str : selectStrings r
selectStrings (_:r) = selectStrings r
selectStrings [] = []
traceContextMenu :: IDERef
-> ForestStore TraceHist
-> TreeView
-> Menu
-> IO ()
traceContextMenu ideR store treeView theMenu = do
item1 <- menuItemNewWithLabel (__ "Back")
onMenuItemActivate item1 $ reflectIDE debugBack ideR
sep1 <- separatorMenuItemNew >>= liftIO . toMenuItem
item2 <- menuItemNewWithLabel (__ "Forward")
onMenuItemActivate item2 $ reflectIDE debugForward ideR
item3 <- menuItemNewWithLabel (__ "Update")
onMenuItemActivate item3 $ reflectIDE fillTraceList ideR
mapM_ (menuShellAppend theMenu) [item1, sep1, item2, item3]
tracesParser :: CharParser () [TraceHist]
tracesParser = try (do
whiteSpace
symbol (T.unpack $ __ "Empty history.")
skipMany anyChar
eof
return [])
<|> do
traces <- many (try traceParser)
whiteSpace
symbol (T.unpack $ __ "<end of history>")
eof
return traces
<|> do
whiteSpace
symbol (T.unpack $ __ "Not stopped at a breakpoint")
skipMany anyChar
eof
return []
<?>
T.unpack (__ "traces parser")
traceParser :: CharParser () TraceHist
traceParser = do
whiteSpace
index <- int
colon
optional (symbol "\ESC[1m")
function <- T.pack <$> many (noneOf "(\ESC")
optional (symbol "\ESC[0m")
symbol "("
span <- srcSpanParser
symbol ")"
return (TraceHist False index function span)
<?> T.unpack (__ "trace parser")
lexer = P.makeTokenParser emptyDef
colon = P.colon lexer
symbol = P.symbol lexer
whiteSpace = P.whiteSpace lexer
int = fromInteger <$> P.integer lexer
| JPMoresmau/leksah | src/IDE/Pane/Trace.hs | gpl-2.0 | 11,455 | 1 | 29 | 2,754 | 2,803 | 1,444 | 1,359 | 273 | 4 |
module Game.Handlers where
import qualified Graphics.UI.GLFW as GLFW
import Reactive.Banana.Frameworks
charHandler :: GLFW.Window -> AddHandler (GLFW.Window, Char)
charHandler win callback = do
GLFW.setCharCallback win . Just $ \w c -> callback (w, c)
return (GLFW.setCharCallback win Nothing)
keyHandler :: GLFW.Window -> AddHandler ( GLFW.Window
, GLFW.Key
, Int
, GLFW.KeyState
, GLFW.ModifierKeys)
keyHandler win callback = do
GLFW.setKeyCallback win . Just $ \w k num ks mk -> callback (w, k, num, ks, mk)
return (GLFW.setKeyCallback win Nothing)
mouseButtonHandler :: GLFW.Window -> AddHandler ( GLFW.Window
, GLFW.MouseButton
, GLFW.MouseButtonState
, GLFW.ModifierKeys)
mouseButtonHandler win callback = do
GLFW.setMouseButtonCallback win . Just $ \w b bs mk -> callback (w, b, bs, mk)
return (GLFW.setMouseButtonCallback win Nothing)
| theguruofreason/glfw-b-test | Game/Handlers.hs | gpl-2.0 | 1,182 | 0 | 10 | 454 | 313 | 166 | 147 | 22 | 1 |
module Moonbase.Util.Gtk.Widget.Graph
( GraphStyle(..)
, GraphDirection(..)
, GraphConfig(..)
, GraphHistory
, Graph
, graphNew
, graphConfig
, graphHistory
, pollingGraphNew
, defaultGraphConfig
) where
import Control.Monad
import Control.Concurrent
import Control.Exception
import qualified Moonbase.Theme as Moon
import Moonbase.Util.Gtk
import qualified Data.Map as M
import qualified Data.Sequence as S
import Data.Maybe
import System.IO.Unsafe
import Graphics.UI.Gtk
import Graphics.Rendering.Cairo
data GraphStyle = LineGraph
| AreaGraph Bool
data GraphDirection = GraphRightToLeft
| GraphLeftToRight
data GraphConfig = GraphConfig
{ graphPadding :: Int
, graphDirection :: GraphDirection
, graphStyle :: GraphStyle
, graphWidth :: Int
, graphColor :: Moon.Color
, graphBorder :: Maybe (Int, Moon.Color)
, graphBackground :: Moon.Color
}
defaultGraphConfig :: GraphConfig
defaultGraphConfig = GraphConfig
{ graphPadding = 4
, graphDirection = GraphLeftToRight
, graphStyle = LineGraph
, graphWidth = 128
, graphColor = "#ff0000"
, graphBorder = Just (1, "#0000ff")
, graphBackground = "#24ff25"
}
type GraphHistory = S.Seq Double
emptyHistory :: GraphHistory
emptyHistory = S.empty
type Graph = DrawingArea
maybeHistory :: Attr Graph (Maybe GraphHistory)
maybeHistory = unsafePerformIO $ objectCreateAttribute
{-# NOINLINE maybeHistory #-}
maybeConfig :: Attr Graph (Maybe GraphConfig)
maybeConfig = unsafePerformIO $ objectCreateAttribute
{-# NOINLINE maybeConfig #-}
graphHistory :: Attr Graph GraphHistory
graphHistory = newAttr getHistory setHistory
where
getHistory object = do
mHistory <- get object maybeHistory
when (isNothing mHistory) $ putStrLn "Could not get history..."
return $ fromMaybe emptyHistory mHistory
setHistory object history = do
set object [maybeHistory := Just history]
graphConfig :: Attr Graph GraphConfig
graphConfig = newAttr getConfig setConfig
where
getConfig object = do
mConfig <- get object maybeConfig
case mConfig of
Nothing -> error "Could not load graph config"
Just c -> return c
setConfig object config = do
set object [maybeConfig := Just config]
graphNew :: GraphConfig -> IO Graph
graphNew config = do
graph <- drawingAreaNew
set graph [maybeConfig := (Just config)]
set graph [maybeHistory := (Just emptyHistory)]
widgetSetSizeRequest graph (graphWidth config) (-1)
_ <- on graph draw $ do
history <- liftIO $ get graph graphHistory
(w,h) <- liftIO $ getSize graph
let (_, needed) = S.splitAt (S.length history - nSize w) history
drawGraph w h config needed
liftIO $ set graph [graphHistory := needed]
return graph
where
nSize w = w - (graphPadding config * 2)
pollingGraphNew :: GraphConfig -> Int -> (Graph -> IO ()) -> IO Graph
pollingGraphNew conf ms f = do
graph <- graphNew conf
_ <- on graph realize $
void $ forkIO $ forever $ do
f graph
h <- get graph graphHistory
postGUIAsync $ widgetQueueDraw graph
threadDelay $ ms * 1000
return graph
getSize :: Graph -> IO (Int, Int)
getSize graph = do
area <- widgetGetWindow graph
case area of
Nothing -> return (0,0)
Just win -> do
w <- drawWindowGetWidth win
h <- drawWindowGetHeight win
return (w, h)
drawGraph :: Int -> Int -> GraphConfig -> GraphHistory -> Render ()
drawGraph w h conf hist = do
renderBackground w h (graphBackground conf)
when hasBorder $ renderBorder w h (graphPadding conf) (fromJust $ graphBorder conf)
case graphStyle conf of
LineGraph -> renderLineGraph w h (graphPadding conf) (graphColor conf) hist
AreaGraph tran -> renderAreaGraph tran w h (graphPadding conf) (graphColor conf) hist
where
hasBorder = isJust $ graphBorder conf
renderBackground :: Int -> Int -> Moon.Color -> Render ()
renderBackground w h c = setSourceRGB r g b >> rectangle 0 0 (fromIntegral w) (fromIntegral h) >> fill
where
(r, g, b) = parseColor' c
renderBorder :: Int -> Int -> Int -> (Int, Moon.Color) -> Render ()
renderBorder w h padding (wid, c) = setSourceRGB r g b >> setLineWidth wid' >> moveTo j j >> rectangle j j w' h' >> stroke
where
(r, g, b) = parseColor' c
w' = fromIntegral w - 2*j
h' = fromIntegral h - 2*j
pad = fromIntegral padding
wid' = fromIntegral wid
j = pad - wid' / 2
renderLineGraph :: Int -> Int -> Int -> Moon.Color -> GraphHistory -> Render ()
renderLineGraph w h padding color hist = do
setLineWidth 1
setSourceRGB r g b
void $ loopR hist $ \index value -> do
let x = xm - fI index
let y = y0 - (ye * value)
moveTo x y0
lineTo x y
stroke
where
pad = fI padding
xm = fI w - pad
y0 = fI h - pad
ye = fI h - (2 * pad)
(r,g,b) = parseColor' color
fI = fromIntegral
renderAreaGraph :: Bool -> Int -> Int -> Int -> Moon.Color -> GraphHistory -> Render ()
renderAreaGraph trans w h pad' color hist = do
setLineWidth 1
setTrans trans
moveTo pad y0
lineTo xm y0
void $ loopR hist $ \index value -> do
let x = xm - fI index
let y = y0 - (ye * value)
lineTo x y
paint
when trans $ do
setLineWidth 2
setSourceRGB r g b
let (first, hist') = S.splitAt 1 hist
moveTo xm (y0 - (ye * (check first)))
void $ loopR hist' $ \index value -> do
let x = xm - fI index
let y = y0 - (ye * value)
lineTo x y
stroke
where
pad = fI pad'
y0 = fI h - pad
ye = fI h - (2 * pad)
xm = fI w - pad
setTrans True = setSourceRGBA r g b 125
setTrans False = setSourceRGB r g b
(r, g, b) = parseColor' color
fI = fromIntegral
check x
| S.null x = 0.0
| otherwise = S.index x 1
loopR :: (Monad m) => S.Seq Double -> (Int -> Double -> m a) -> m [a]
loopR s f = sequence $ loop' 0 $ S.viewr s
where
loop' i (xs S.:> x) = (f i x) : (loop' (i+1) $ S.viewr xs)
loop' _ S.EmptyR = []
| felixsch/moonbase-gtk | src/Moonbase/Util/Gtk/Widget/Graph.hs | gpl-2.0 | 6,590 | 0 | 19 | 2,064 | 2,260 | 1,128 | 1,132 | 174 | 2 |
{-# Language DoAndIfThenElse #-}
-- | Configuración de la lista de símbolos.
module GUI.SymbolList where
import Equ.Theories
import Equ.Syntax
import Graphics.UI.Gtk hiding (eventButton, eventSent,get)
import qualified Graphics.UI.Gtk as Gtk
import Data.Text(unpack)
import Control.Lens hiding (set)
import Control.Monad.Trans.RWS
import Control.Applicative ((<$>))
import qualified Data.Foldable as F
import GUI.GState
import GUI.EditBook
import GUI.Utils
type SymItem = String
listSymbols :: IO (ListStore SymItem)
listSymbols = listStoreNew $ map addEncloseItem quantifiersList
++ map addItem operatorsList
++ map addItem constantsList
where addItem :: Syntactic s => s -> SymItem
addItem syn = unpack $ tRepr syn
addEncloseItem :: Syntactic s => s -> SymItem
addEncloseItem q = "〈"++ addItem q ++ ":" ++ ":" ++ "〉"
configSymFrameButton :: GuiMonad ()
configSymFrameButton = do
content <- ask
let sf = content ^. (gFunSymbolList . gSymFrame)
let (sfButton,sfItem) = content ^. (gFunToolbar . symFrameCtrl)
visible <- io $ Gtk.get sf widgetVisible
io $ toggleToolButtonSetActive sfButton (not visible)
io $ Gtk.set sfItem [ checkMenuItemActive := not visible ]
if visible
then io $ widgetHide sf
else io $ widgetShowAll sf
configSymbolList :: GuiMonad ()
configSymbolList = do
content <- ask
let sf = content ^. (gFunSymbolList . gSymFrame)
let iv = content ^. (gFunSymbolList . gSymIconView)
list <- io listSymbols
_ <- io $ setupSymbolList iv list
eventsSymbolList iv list
io $ widgetHide sf
return ()
-- | La configuración de la lista de símbolos propiamente hablando.
setupSymbolList :: IconView -> ListStore SymItem -> IO (ListStore SymItem)
setupSymbolList iv list =
return (makeColumnIdString 1) >>= \scol ->
return (makeColumnIdPixbuf (-1)) >>= \pcol ->
iconViewSetTextColumn iv scol >>
iconViewSetPixbufColumn iv pcol >>
customStoreSetColumn list scol id >>
set iv [ iconViewModel := Just list
, iconViewPixbufColumn := pcol
, iconViewTextColumn := scol
, iconViewRowSpacing := 0
, iconViewMargin := 0
, iconViewSelectionMode := SelectionSingle
] >>
widgetShowAll iv >>
return list
eventsSymbolList :: IconView -> ListStore SymItem -> GuiMonad ()
eventsSymbolList iv list = do
content <- ask
s <- get
_ <- io $ iv `on` focusInEvent $ (io $ iconViewSelectPath iv [0] >> return True)
_ <- io $ iv `on` itemActivated $ \path ->
evalRWST (oneSelection list path) content s >> return ()
return ()
oneSelection :: ListStore SymItem -> TreePath -> GuiMonad ()
oneSelection list path = do
s <- getGState
configSelection $ s ^. gFunEditBook
where
configSelection :: FunEditBook -> GuiMonad ()
configSelection editBook =
getTextEditFromFunEditBook editBook >>=
maybe (return ()) (\(_,_,tv) ->
io (getElem list path) >>=
F.mapM_ (addToCursorBuffer tv))
addToCursorBuffer :: TextView -> String -> GuiMonad ()
addToCursorBuffer tv repr = io $ do
buf <- textViewGetBuffer tv
textBufferInsertAtCursor buf repr
widgetGrabFocus tv
getElem :: ListStore a -> TreePath -> IO (Maybe a)
getElem l p = treeModelGetIter l p >>= \i ->
flip (maybe (return Nothing)) i $ \it ->
(\idx -> listStoreGetSize l >>= \len ->
if idx < len
then Just <$> listStoreGetValue l idx
else return Nothing) (listStoreIterToIndex it)
| alexgadea/fun-gui | GUI/SymbolList.hs | gpl-3.0 | 4,198 | 0 | 16 | 1,471 | 1,140 | 573 | 567 | 90 | 2 |
{-# LANGUAGE TemplateHaskell #-}
module Lamdu.GUI.ExpressionGui.Types
( ExpressionGui, egWidget, egAlignment
, SugarExpr
, Payload(..)
, plStoredEntityIds, plInjected, plNearestHoles, plShowAnnotation
, emptyPayload
, EvalModeShow(..)
, FuncApplyLimit(..)
, ShowAnnotation(..), showExpanded, showInTypeMode, showInEvalMode
, funcApplyLimit
, showAnnotationWhenVerbose
, neverShowAnnotations, alwaysShowAnnotations
, nextHolesBefore
, plOfHoleResult
) where
import Control.Lens (Lens, Lens')
import qualified Control.Lens as Lens
import Control.Lens.Operators
import Data.Store.Transaction (Transaction)
import Graphics.UI.Bottle.Widget (Widget)
import qualified Graphics.UI.Bottle.Widget as Widget
import Graphics.UI.Bottle.Widgets.Layout (Layout)
import qualified Graphics.UI.Bottle.Widgets.Layout as Layout
import qualified Lamdu.Sugar.Lens as SugarLens
import Lamdu.Sugar.Names.Types (ExpressionN)
import Lamdu.Sugar.NearestHoles (NearestHoles)
import qualified Lamdu.Sugar.NearestHoles as NearestHoles
import qualified Lamdu.Sugar.Types as Sugar
type T = Transaction
type ExpressionGui m = Layout (Transaction m Widget.EventResult)
{-# INLINE egWidget #-}
egWidget ::
Lens
(ExpressionGui m)
(ExpressionGui n)
(Widget (T m Widget.EventResult))
(Widget (T n Widget.EventResult))
egWidget = Layout.widget
{-# INLINE egAlignment #-}
egAlignment :: Lens' (ExpressionGui m) Layout.Alignment
egAlignment = Layout.alignment
data EvalModeShow = EvalModeShowNothing | EvalModeShowType | EvalModeShowEval
deriving (Eq, Ord, Show)
-- This is only relevant for function subexprs, and means their
-- parameter can only ever get one scope per parent scope id, meaning
-- we may avoid showing their scope nav altogether.
data FuncApplyLimit = UnlimitedFuncApply | AtMostOneFuncApply
deriving (Eq, Ord, Show)
data ShowAnnotation = ShowAnnotation
{ -- showExpanded means we:
-- A) Show even in concise-mode & eval-mode without val
-- B) Do not shrink the annotation to fit
_showExpanded :: Bool
, _showInTypeMode :: Bool
, _showInEvalMode :: EvalModeShow
, _funcApplyLimit :: FuncApplyLimit
} deriving (Eq, Ord, Show)
Lens.makeLenses ''ShowAnnotation
showAnnotationWhenVerbose :: ShowAnnotation
showAnnotationWhenVerbose = ShowAnnotation
{ _showExpanded = False
, _showInTypeMode = True
, _showInEvalMode = EvalModeShowEval
, _funcApplyLimit = UnlimitedFuncApply
}
neverShowAnnotations :: ShowAnnotation
neverShowAnnotations = ShowAnnotation False False EvalModeShowNothing UnlimitedFuncApply
alwaysShowAnnotations :: ShowAnnotation
alwaysShowAnnotations = ShowAnnotation True True EvalModeShowEval UnlimitedFuncApply
-- GUI input payload on sugar exprs
data Payload = Payload
{ _plStoredEntityIds :: [Sugar.EntityId]
, _plInjected :: [Bool]
, _plNearestHoles :: NearestHoles
, _plShowAnnotation :: ShowAnnotation
}
Lens.makeLenses ''Payload
plOfHoleResult :: Sugar.Payload m Payload -> Bool
plOfHoleResult =
Lens.nullOf (Sugar.plData . plStoredEntityIds . Lens.traversed)
type SugarExpr m = ExpressionN m Payload
emptyPayload :: NearestHoles -> Payload
emptyPayload nearestHoles = Payload
{ _plStoredEntityIds = []
, _plInjected = []
, _plNearestHoles = nearestHoles
, _plShowAnnotation = showAnnotationWhenVerbose
}
nextHolesBefore :: Sugar.Expression name m Payload -> NearestHoles
nextHolesBefore val =
node ^. Sugar.rPayload . Sugar.plData . plNearestHoles
& if Lens.has (Sugar.rBody . Sugar._BodyHole) node
then NearestHoles.next .~ Just (node ^. Sugar.rPayload . Sugar.plEntityId)
else id
where
node = SugarLens.leftMostLeaf val
| da-x/lamdu | Lamdu/GUI/ExpressionGui/Types.hs | gpl-3.0 | 3,839 | 0 | 12 | 725 | 796 | 476 | 320 | -1 | -1 |
module Config
where
import System.Console.GetOpt
import Global
import Common
configOptions :: [(OptDescr (ConfigOptions -> ConfigOptions))]
configOptions = [
Option ['n'] ["name"] (ReqArg setConfigName "name") "your name"
]
data ConfigOptions = ConfigOptions { configName :: String }
setConfigName :: String -> ConfigOptions -> ConfigOptions
setConfigName s c = c{configName = s}
defaultConfigOptions = ConfigOptions ""
handleConfig args = do
(opts, _) <- doArgs configOptions defaultConfigOptions [(\o -> not (null (configName o)))] "config" args False
putStrLn $ "Your name: " ++ (configName opts)
setGlobalName (configName opts)
| anttisalonen/nix | src/Config.hs | gpl-3.0 | 653 | 0 | 16 | 105 | 211 | 114 | 97 | 15 | 1 |
{- ============================================================================
| Copyright 2011 Matthew D. Steele <[email protected]> |
| |
| This file is part of Fallback. |
| |
| Fallback is free software: you can redistribute it and/or modify it under |
| the terms of the GNU General Public License as published by the Free |
| Software Foundation, either version 3 of the License, or (at your option) |
| any later version. |
| |
| Fallback is distributed in the hope that it will be useful, but WITHOUT |
| ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for |
| more details. |
| |
| You should have received a copy of the GNU General Public License along |
| with Fallback. If not, see <http://www.gnu.org/licenses/>. |
============================================================================ -}
module Fallback.State.Status
(-- * Status effects
StatusEffects, initStatusEffects, decayStatusEffects,
HarmOrBenefit(..), isBeneficial, isHarmful,
-- ** Getters
seBlessing, seAttackAgilityModifier, seAttackDamageMultiplier,
seDefense, seArmorMultiplier,
seHaste, seSpeedMultiplier,
seRegenPoison,
seInvisibility,
seMentalEffect,
seIsEntangled,
seIsShielded, seMagicShieldMultiplier,
-- ** Setters and modifiers
seApplyBlessing, seReduceBlessing, seReduceCurse,
seApplyDefense, seReduceDefense, seReduceWeakness,
seApplyHaste, seReduceHaste, seReduceSlow,
seAlterRegenPoison,
seSetInvisibility,
seApplyMentalEffect, sePurgeMentalEffects, seWakeFromDaze,
seApplyEntanglement, sePurgeEntanglement,
seApplyMagicShield, seReduceMagicShield,
sePurgeAllBadEffects,
-- * Status deltas
StatusDelta, zeroStatusDelta, makeStatusDelta, applyStatusDelta,
addStatusDeltas, sumStatusDeltas, divStatusDelta,
-- * Utilities
townifyStatus)
where
import Control.Exception (assert)
import Data.Maybe (fromMaybe, isJust)
import Data.List (foldl')
import Fallback.State.Simple (Invisibility(..), MentalEffect(..))
import Fallback.Utility (isFinite)
-------------------------------------------------------------------------------
-- | Represents the state of a status effect that can be negative (harmful) or
-- postitive (beneficial). The 'Double' represents the time remaining, in
-- combat rounds, of the effect, and must always be finite and positive.
data HarmOrBenefit = Harmful Double
| Unaffected
| Beneficial Double
deriving (Eq, Read, Show)
isBeneficial :: HarmOrBenefit -> Bool
isBeneficial (Beneficial _) = True
isBeneficial _ = False
isHarmful :: HarmOrBenefit -> Bool
isHarmful (Harmful _) = True
isHarmful _ = False
mergeHarmOrBenefit :: HarmOrBenefit -> HarmOrBenefit -> HarmOrBenefit
mergeHarmOrBenefit hb1 hb2 = merge (validate hb1) (validate hb2) where
merge hb Unaffected = hb
merge Unaffected hb = hb
merge (Harmful h) (Beneficial b) = counter h b
merge (Beneficial b) (Harmful h) = counter h b
merge (Harmful x) (Harmful y) = Harmful (stack x y)
merge (Beneficial x) (Beneficial y) = Beneficial (stack x y)
counter harm bene =
case compare harm bene of
GT -> Harmful (harm - bene)
EQ -> Unaffected
LT -> Beneficial (bene - harm)
validate (Harmful a) = assert (isFinitePositive a) (Harmful a)
validate Unaffected = Unaffected
validate (Beneficial a) = assert (isFinitePositive a) (Beneficial a)
mergeMaybe :: Double -> Maybe Double -> Maybe Double
mergeMaybe 0 mb = mb
mergeMaybe a mb = assert (isFinitePositive a) $
case mb of
Nothing -> Just a
Just b -> assert (isFinitePositive b) $ Just $ stack a b
hobToDouble :: HarmOrBenefit -> Double
hobToDouble (Harmful x) = assert (isFinitePositive x) $ negate x
hobToDouble Unaffected = 0
hobToDouble (Beneficial x) = assert (isFinitePositive x) x
hobFromDouble :: Double -> HarmOrBenefit
hobFromDouble x = assert (isFinite x) $ if x == 0 then Unaffected else
if x < 0 then Harmful (negate x) else Beneficial x
reduceBenefit :: Double -> HarmOrBenefit -> HarmOrBenefit
reduceBenefit x (Beneficial y) =
if x < y then Beneficial (y - x) else Unaffected
reduceBenefit _ hob = hob
reduceHarm :: Double -> HarmOrBenefit -> HarmOrBenefit
reduceHarm x (Harmful y) = if x < y then Harmful (y - x) else Unaffected
reduceHarm _ hob = hob
reduceMaybe :: Double -> Maybe Double -> Maybe Double
reduceMaybe x (Just y) = if x < y then Just (y - x) else Nothing
reduceMaybe _ Nothing = Nothing
-------------------------------------------------------------------------------
data StatusEffects = StatusEffects
{ seBlessing :: HarmOrBenefit,
seDefense :: HarmOrBenefit,
seEntanglement :: Maybe Double, -- rounds remaining
seHaste :: HarmOrBenefit,
seInvisibility :: Invisibility,
seMagicShield :: Maybe Double, -- rounds remaining
seMental :: Maybe (MentalEffect, Double {-rounds remaining-}),
seRegenPoison :: Int } -- health delta remaining
deriving (Read, Show)
initStatusEffects :: StatusEffects
initStatusEffects = StatusEffects
{ seBlessing = Unaffected,
seDefense = Unaffected,
seEntanglement = Nothing,
seHaste = Unaffected,
seInvisibility = NoInvisibility,
seMagicShield = Nothing,
seMental = Nothing,
seRegenPoison = 0 }
decayStatusEffects :: Double -> StatusEffects -> StatusEffects
decayStatusEffects rounds se =
se { seBlessing = decayHarmOrBenefit (seBlessing se),
seDefense = decayHarmOrBenefit (seDefense se),
seEntanglement = decayMaybe (seEntanglement se),
seHaste = decayHarmOrBenefit (seHaste se),
seMagicShield = decayMaybe (seMagicShield se),
seMental =
case seMental se of
Nothing -> Nothing
Just (eff, t) -> decay (Just . (,) eff) Nothing t }
where
decayMaybe = (>>= decay Just Nothing)
decayHarmOrBenefit (Harmful x) = decay Harmful Unaffected x
decayHarmOrBenefit Unaffected = Unaffected
decayHarmOrBenefit (Beneficial x) = decay Beneficial Unaffected x
decay just none x = if x > rounds then just (x - rounds) else none
-------------------------------------------------------------------------------
-- Getters:
seAttackAgilityModifier :: StatusEffects -> Int
seAttackAgilityModifier se =
case seBlessing se of
Harmful _ -> negate 20
Unaffected -> 0
Beneficial _ -> 20
seAttackDamageMultiplier :: StatusEffects -> Double
seAttackDamageMultiplier se =
case seBlessing se of
Harmful _ -> 0.9
Unaffected -> 1
Beneficial _ -> 1.15
seArmorMultiplier :: StatusEffects -> Double
seArmorMultiplier se =
case seDefense se of
Harmful _ -> 1.25
Unaffected -> 1
Beneficial _ -> 0.75
seSpeedMultiplier :: StatusEffects -> Double
seSpeedMultiplier se =
if fmap fst (seMental se) == Just Dazed then 0 else
case seHaste se of
Harmful _ -> 2/3
Unaffected -> 1
Beneficial _ -> 1.5
seMentalEffect :: StatusEffects -> Maybe MentalEffect
seMentalEffect = fmap fst . seMental
seIsEntangled :: StatusEffects -> Bool
seIsEntangled = isJust . seEntanglement
seIsShielded :: StatusEffects -> Bool
seIsShielded = isJust . seMagicShield
seMagicShieldMultiplier :: StatusEffects -> Double
seMagicShieldMultiplier = maybe 1 (const 0.5) . seMagicShield
-------------------------------------------------------------------------------
-- Setters:
seAlterRegenPoison :: (Int -> Int) -> StatusEffects -> StatusEffects
seAlterRegenPoison fn status =
status { seRegenPoison = fn (seRegenPoison status) }
seApplyBlessing :: HarmOrBenefit -> StatusEffects -> StatusEffects
seApplyBlessing hb se =
se { seBlessing = mergeHarmOrBenefit hb (seBlessing se) }
seReduceBlessing :: Double -> StatusEffects -> StatusEffects
seReduceBlessing x se = se { seBlessing = reduceBenefit x (seBlessing se) }
seReduceCurse :: Double -> StatusEffects -> StatusEffects
seReduceCurse x se = se { seBlessing = reduceHarm x (seBlessing se) }
seApplyDefense :: HarmOrBenefit -> StatusEffects -> StatusEffects
seApplyDefense hb se = se { seDefense = mergeHarmOrBenefit hb (seDefense se) }
seReduceDefense :: Double -> StatusEffects -> StatusEffects
seReduceDefense x se = se { seDefense = reduceBenefit x (seDefense se) }
seReduceWeakness :: Double -> StatusEffects -> StatusEffects
seReduceWeakness x se = se { seDefense = reduceHarm x (seDefense se) }
-- | Apply the given number of rounds of entanglement. The argument must be
-- finite and non-negative.
seApplyEntanglement :: Double -> StatusEffects -> StatusEffects
seApplyEntanglement x se =
se { seEntanglement = mergeMaybe x (seEntanglement se) }
-- | Completely remove all entanglement.
sePurgeEntanglement :: StatusEffects -> StatusEffects
sePurgeEntanglement se = se { seEntanglement = Nothing }
seApplyHaste :: HarmOrBenefit -> StatusEffects -> StatusEffects
seApplyHaste hb se = se { seHaste = mergeHarmOrBenefit hb (seHaste se) }
seReduceHaste :: Double -> StatusEffects -> StatusEffects
seReduceHaste x se = se { seHaste = reduceBenefit x (seHaste se) }
seReduceSlow :: Double -> StatusEffects -> StatusEffects
seReduceSlow x se = se { seHaste = reduceHarm x (seHaste se) }
-- | Apply the given number of rounds of magical shielding. The argument must
-- be finite and non-negative.
seApplyMagicShield :: Double -> StatusEffects -> StatusEffects
seApplyMagicShield x se =
se { seMagicShield = mergeMaybe x (seMagicShield se) }
seReduceMagicShield :: Double -> StatusEffects -> StatusEffects
seReduceMagicShield x se =
se { seMagicShield = reduceMaybe x (seMagicShield se) }
seApplyMentalEffect :: MentalEffect -> Double -> StatusEffects -> StatusEffects
seApplyMentalEffect eff dur se = se { seMental = Just me } where
me = case seMental se of
Just (eff', dur') | eff' == eff -> (eff, stack dur dur')
_ -> (eff, dur)
-- | Completely remove all mental effects.
sePurgeMentalEffects :: StatusEffects -> StatusEffects
sePurgeMentalEffects se = se { seMental = Nothing }
seSetInvisibility :: Invisibility -> StatusEffects -> StatusEffects
seSetInvisibility invis se = se { seInvisibility = invis }
seWakeFromDaze :: StatusEffects -> StatusEffects
seWakeFromDaze se =
case seMental se of
Just (Dazed, _) -> se { seMental = Nothing }
_ -> se
sePurgeAllBadEffects :: StatusEffects -> StatusEffects
sePurgeAllBadEffects se =
se { seBlessing = purgeHarmful (seBlessing se),
seDefense = purgeHarmful (seDefense se),
seEntanglement = Nothing, seHaste = purgeHarmful (seHaste se),
seMental = Nothing, seRegenPoison = max 0 (seRegenPoison se) }
where purgeHarmful (Harmful _) = Unaffected
purgeHarmful hob = hob
-------------------------------------------------------------------------------
-- Status deltas:
data StatusDelta = StatusDelta
{ sdBlessing :: !Double,
sdDefense :: !Double,
sdEntanglement :: !Double,
sdHaste :: !Double,
sdMagicShield :: !Double,
sdRegenPoison :: !Int }
deriving Show
zeroStatusDelta :: StatusDelta
zeroStatusDelta = StatusDelta
{ sdBlessing = 0, sdDefense = 0, sdEntanglement = 0, sdHaste = 0,
sdMagicShield = 0, sdRegenPoison = 0 }
addStatusDeltas :: StatusDelta -> StatusDelta -> StatusDelta
addStatusDeltas sd1 sd2 = StatusDelta
{ sdBlessing = sdBlessing sd1 + sdBlessing sd2,
sdDefense = sdDefense sd1 + sdDefense sd2,
sdEntanglement = sdEntanglement sd1 + sdEntanglement sd2,
sdHaste = sdHaste sd1 + sdHaste sd2,
sdMagicShield = sdMagicShield sd1 + sdMagicShield sd2,
sdRegenPoison = sdRegenPoison sd1 + sdRegenPoison sd2 }
sumStatusDeltas :: [StatusDelta] -> StatusDelta
sumStatusDeltas = foldl' addStatusDeltas zeroStatusDelta
-- | Divide the status delta by the given positive integer.
divStatusDelta :: StatusDelta -> Int -> StatusDelta
divStatusDelta sd di = assert (di > 0) $ StatusDelta
{ sdBlessing = sdBlessing sd / dd, sdDefense = sdDefense sd / dd,
sdEntanglement = sdEntanglement sd / dd, sdHaste = sdHaste sd / dd,
sdMagicShield = sdMagicShield sd / dd,
sdRegenPoison = sdRegenPoison sd `quot` di }
where dd = fromIntegral di
-- | Make a status delta by subtracting the second set of status effects from
-- the first.
makeStatusDelta :: StatusEffects -> StatusEffects -> StatusDelta
makeStatusDelta se1 se2 = StatusDelta
{ sdBlessing = hobToDouble (seBlessing se1) - hobToDouble (seBlessing se2),
sdDefense = hobToDouble (seDefense se1) - hobToDouble (seDefense se2),
sdEntanglement = fromMaybe 0 (seEntanglement se1) -
fromMaybe 0 (seEntanglement se2),
sdHaste = hobToDouble (seHaste se1) - hobToDouble (seHaste se2),
sdMagicShield = fromMaybe 0 (seMagicShield se1) -
fromMaybe 0 (seMagicShield se2),
sdRegenPoison = seRegenPoison se1 - seRegenPoison se2 }
applyStatusDelta :: StatusDelta -> StatusEffects -> StatusEffects
applyStatusDelta sd =
(seApplyBlessing $ hobFromDouble $ sdBlessing sd) .
(seApplyDefense $ hobFromDouble $ sdDefense sd) .
(seApplyEntanglement $ sdEntanglement sd) .
(seApplyHaste $ hobFromDouble $ sdHaste sd) .
(seApplyMagicShield $ sdMagicShield sd) .
(seAlterRegenPoison (+ sdRegenPoison sd))
-------------------------------------------------------------------------------
-- Utilities:
-- | When stacking two harms (or two benefits), the resulting duration is the
-- geometric mean of the max and the sum. Thus, if one of the effects is zero
-- (or rather, nearly zero), you get the full effect of the other, but
-- otherwise you get a reduced effect.
stack :: Double -> Double -> Double
stack a b = sqrt (max a b * (a + b))
townifyStatus :: StatusEffects -> StatusEffects
townifyStatus status = status
{ seInvisibility = NoInvisibility,
seMental = Nothing }
isFinitePositive :: Double -> Bool
isFinitePositive x = isFinite x && x > 0
-------------------------------------------------------------------------------
| mdsteele/fallback | src/Fallback/State/Status.hs | gpl-3.0 | 14,461 | 51 | 17 | 3,063 | 3,471 | 1,859 | 1,612 | 269 | 10 |
{-# OPTIONS -O2 -Wall #-}
{-# LANGUAGE TemplateHaskell, Rank2Types #-}
module Editor.Data (
Definition(..), atDefBody,
Builtin(..),
Parameter(..),
VariableRef(..), onVariableIRef,
TypedParam(..),
Lambda(..), atLambdaParam, atLambdaBody,
Apply(..), atApplyFunc, atApplyArg,
HoleState(..),
emptyHoleState, atHoleSearchTerm, --atHoleCachedSearchResults,
Expression(..))
where
import Data.Binary (Binary(..))
import Data.Binary.Get (getWord8)
import Data.Binary.Put (putWord8)
import Data.Derive.Binary(makeBinary)
import Data.DeriveTH(derive)
import Data.Store.IRef (IRef)
import qualified Data.AtFieldTH as AtFieldTH
data Parameter = Parameter
deriving (Eq, Ord, Read, Show)
data TypedParam = TypedParam {
tpParam :: IRef Parameter,
tpType :: IRef Expression
}
deriving (Eq, Ord, Read, Show)
data Lambda = Lambda {
lambdaParam :: TypedParam,
lambdaBody :: IRef Expression
}
deriving (Eq, Ord, Read, Show)
data Apply = Apply {
applyFunc :: IRef Expression,
applyArg :: IRef Expression
}
deriving (Eq, Ord, Read, Show)
data HoleState = HoleState
{ holeSearchTerm :: String
--, holeCachedSearchResults :: [VariableRef]
}
deriving (Eq, Ord, Read, Show)
emptyHoleState :: HoleState
emptyHoleState = HoleState ""
data Expression =
ExpressionLambda Lambda |
ExpressionApply Apply |
ExpressionGetVariable VariableRef |
ExpressionHole HoleState |
ExpressionLiteralInteger Integer
deriving (Eq, Ord, Read, Show)
newtype Definition = Definition {
defBody :: IRef Expression
}
deriving (Eq, Ord, Read, Show)
data Builtin = Builtin {
biModule :: [String],
biName :: String
}
deriving (Eq, Ord, Read, Show)
data VariableRef =
ParameterRef (IRef Parameter) |
DefinitionRef (IRef Definition) |
BuiltinRef (IRef Builtin)
deriving (Eq, Ord, Read, Show)
onVariableIRef :: (forall a. IRef a -> b) -> VariableRef -> b
onVariableIRef f (ParameterRef i) = f i
onVariableIRef f (DefinitionRef i) = f i
onVariableIRef f (BuiltinRef i) = f i
derive makeBinary ''TypedParam
derive makeBinary ''Apply
derive makeBinary ''Lambda
derive makeBinary ''Builtin
derive makeBinary ''HoleState
derive makeBinary ''Expression
derive makeBinary ''Parameter
derive makeBinary ''Definition
derive makeBinary ''VariableRef
AtFieldTH.make ''Definition
AtFieldTH.make ''Lambda
AtFieldTH.make ''Apply
AtFieldTH.make ''HoleState
| nimia/bottle | codeedit/Editor/Data.hs | gpl-3.0 | 2,385 | 0 | 9 | 387 | 780 | 429 | 351 | 75 | 1 |
{- Grldd is a dependency tracking tool.
Copyright (C) 2009-2013 Laszlo Nagy
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
module Ldd
( parse'
, getDependencies
) where
import System.IO (hGetContents)
import System.Exit (ExitCode(..))
import System.Process
import Text.ParserCombinators.Parsec
---------- dependency lookup with exec ldd
getDependencies :: FilePath -> IO [FilePath]
getDependencies fn = do
(_, Just outh, Just errh, pid) <-
createProcess (proc "ldd" [fn]) { std_out = CreatePipe
, std_err = CreatePipe }
out <- hGetContents outh
err <- hGetContents errh
exit <- waitForProcess pid
case exit of
ExitSuccess -> case parse' fn out of
Right result -> return $ filter (not . null) result
Left e -> fail ("Internal error: " ++ show e)
_ -> fail err
---------- parse ldd output
--
-- possible output of successful running
--
-- statically linked
-- path (address)
-- name => path (address)
-- name => (address)
hexadecimal :: Parser ()
hexadecimal = do { _ <- char '0'
; _ <- char 'x'
; _ <- many1 hexDigit
; return ()
}
address :: Parser ()
address = do { _ <- char '('
; _ <- hexadecimal
; _ <- char ')'
; return ()
}
filechar :: Parser Char
filechar = alphaNum <|> oneOf ".,_-+"
filename :: Parser String
filename = many1 filechar
separator :: Parser Char
separator = char '/'
path :: Parser String
path = do { _ <- separator
; f <- sepBy filename separator
; return $ foldr (\w res -> '/' : w ++ res) "" f
}
arrow :: Parser ()
arrow = do { spaces
; _ <- string "=>"
; spaces
; return ()
}
entry :: Parser FilePath
entry = do { _ <- string "statically linked"
; return ""
}
<|>
do { _ <- try path
; spaces
; address
; return ""
}
<|>
do { _ <- filename
; arrow
; do { p <- try path
; spaces
; address
; return p
}
<|> do { address
; return ""
}
}
line :: Parser FilePath
line = do { spaces
; entry
}
<|> return ""
eol :: Parser Char
eol = char '\n'
ldd :: Parser [FilePath]
ldd = sepBy line eol
parse' :: String -> String -> Either ParseError [FilePath]
parse' file = parse ldd ("(ldd output on " ++ file ++ ")")
| rizsotto/Grldd | Src/Ldd.hs | gpl-3.0 | 3,353 | 0 | 16 | 1,250 | 756 | 387 | 369 | 72 | 3 |
-- project euler Problem 2
{--
Each new term in the Fibonacci sequence is generated by adding the previous two terms. By starting with 1 and 2, the first 10 terms will be:
1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ...
Find the sum of all the even-valued terms in the sequence which do not exceed four million.
--}
--fibonacci sequence.. in reverse order
genFib end list
| (head list) > end = list
| otherwise = genFib end $ (head list) + (head $ tail list) : list
main::IO()
main = do
print $ sum $ filter (not.odd) (genFib 4000000 [2,1])
| goalieca/haskelling | 002.hs | gpl-3.0 | 549 | 2 | 11 | 122 | 122 | 60 | 62 | 6 | 1 |
import Development.Hake
import Development.Hake.FunSetRaw
import HakeDefaults
allT = "all"
mods = [ "fill.o", "rotat.o" ]
main = hake $ defaultRules ++ [
dflt [ allT ]
,
file [ allT ] mods $ const []
,
task "clean" $ [ [ "rm", "-f" ] ++ mods ]
]
| YoshikuniJujo/hake_haskell | examples/nutshell/chap5/2/disp/fig/hakeMain.hs | gpl-3.0 | 256 | 0 | 10 | 58 | 96 | 54 | 42 | 9 | 1 |
{-|
Module : Parser.Functional.Library
Description : Functional parser library.
Copyright : 2014, Jonas Cleve
2015, Tay Phuong Ho
License : GPL-3
-}
module Parser.Functional.Library (
Parser (..),
next, eof, chainl1, mzero, return, mplus,
(<|>), (>>=)
) where
import Prelude (
Monad (..),
Maybe (..)
)
import Control.Applicative (
Alternative(..), Applicative(..)
)
import Data.Functor(Functor(..))
import Control.Monad (
MonadPlus (..), liftM, ap
)
-- * Functional parser library
-- ** Basic definition
-- | Create new parser type with @s@ being the input stream and @r@ being the
-- result the parser generates.
newtype Parser s r = Parser { runParser :: s -> Maybe (r, s) }
{-# ANN module "HLint: ignore Use const" #-}
-- | Let the parser be an instance of the monad to ease creating and joining
-- together parsers.
instance Monad (Parser s) where
-- | This is a parser which does not use its input and returns what is given
return r = Parser (\s -> Just (r, s))
-- | Chaining two parsers together is running the first one and then the
-- second on the output of the first if it was successful.
p >>= f = Parser (\s -> case runParser p s of
Just (r, s') -> runParser (f r) s'
Nothing -> Nothing)
instance Functor (Parser s) where -- § added
fmap = liftM
instance Applicative (Parser s) where -- § added
pure = return
(<*>) = ap
-- | Being an instance of the plus monad gives support for choice and failure.
instance MonadPlus (Parser s) where
-- | Failure: a parser that always fails.
mzero = Parser (\_ -> Nothing)
-- | Choice: Return the result of the first parser if successful, else the
-- result of the second parser.
p1 `mplus` p2 = Parser (\s -> case runParser p1 s of
Nothing -> runParser p2 s
r -> r)
instance Alternative (Parser s) where -- § added
(<|>) = mplus
empty = mzero
-- ** Combinators
-- | Chains a parser @p@ in a left associative way separated by parser @op@
-- which returns the combining function.
chainl1 :: Parser s r -> Parser s (r -> r -> r) -> Parser s r
p `chainl1` op = p >>= rest
where
rest x = (do
f <- op
y <- p
rest (f x y)
) <|> return x
-- -- | Matches the given parser @p@ any number of times (including 0) making it
-- -- @p*@.
-- many :: Parser s r -> Parser s [r]
-- many p = many1 p <|> return []
-- -- | Matches the given parser @p@ any number of times (at least once) making it
-- -- @p+@. This effectively runs @p@ and then @'many' p@.
-- many1 :: Parser s r -> Parser s [r]
-- many1 p = do
-- x <- p
-- xs <- many p
-- return (x:xs)
-- ** Basic parsers
-- | Matches the first element from the input stream.
next :: Parser [r] r
next = Parser (
\s -> case s of
[] -> Nothing
t:rest -> Just (t, rest)
)
-- | Matches only the end of file. Fails otherwise.
eof :: Parser [se] ()
eof = Parser (\s -> case s of
[] -> Just ((), s)
_ -> Nothing)
| Potregon/while | src/Parser/Functional/Library.hs | gpl-3.0 | 3,162 | 0 | 14 | 928 | 661 | 381 | 280 | 48 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Coordinate.Team.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a list of teams for a user.
--
-- /See:/ <https://developers.google.com/coordinate/ Google Maps Coordinate API Reference> for @coordinate.team.list@.
module Network.Google.Resource.Coordinate.Team.List
(
-- * REST Resource
TeamListResource
-- * Creating a Request
, teamList
, TeamList
-- * Request Lenses
, tlDispatcher
, tlAdmin
, tlWorker
) where
import Network.Google.MapsCoordinate.Types
import Network.Google.Prelude
-- | A resource alias for @coordinate.team.list@ method which the
-- 'TeamList' request conforms to.
type TeamListResource =
"coordinate" :>
"v1" :>
"teams" :>
QueryParam "dispatcher" Bool :>
QueryParam "admin" Bool :>
QueryParam "worker" Bool :>
QueryParam "alt" AltJSON :>
Get '[JSON] TeamListResponse
-- | Retrieves a list of teams for a user.
--
-- /See:/ 'teamList' smart constructor.
data TeamList = TeamList'
{ _tlDispatcher :: !(Maybe Bool)
, _tlAdmin :: !(Maybe Bool)
, _tlWorker :: !(Maybe Bool)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TeamList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tlDispatcher'
--
-- * 'tlAdmin'
--
-- * 'tlWorker'
teamList
:: TeamList
teamList =
TeamList'
{ _tlDispatcher = Nothing
, _tlAdmin = Nothing
, _tlWorker = Nothing
}
-- | Whether to include teams for which the user has the Dispatcher role.
tlDispatcher :: Lens' TeamList (Maybe Bool)
tlDispatcher
= lens _tlDispatcher (\ s a -> s{_tlDispatcher = a})
-- | Whether to include teams for which the user has the Admin role.
tlAdmin :: Lens' TeamList (Maybe Bool)
tlAdmin = lens _tlAdmin (\ s a -> s{_tlAdmin = a})
-- | Whether to include teams for which the user has the Worker role.
tlWorker :: Lens' TeamList (Maybe Bool)
tlWorker = lens _tlWorker (\ s a -> s{_tlWorker = a})
instance GoogleRequest TeamList where
type Rs TeamList = TeamListResponse
type Scopes TeamList =
'["https://www.googleapis.com/auth/coordinate",
"https://www.googleapis.com/auth/coordinate.readonly"]
requestClient TeamList'{..}
= go _tlDispatcher _tlAdmin _tlWorker (Just AltJSON)
mapsCoordinateService
where go
= buildClient (Proxy :: Proxy TeamListResource)
mempty
| rueshyna/gogol | gogol-maps-coordinate/gen/Network/Google/Resource/Coordinate/Team/List.hs | mpl-2.0 | 3,307 | 0 | 14 | 812 | 469 | 278 | 191 | 67 | 1 |
{-
Copyright (C) 2010, 2011, 2012 Jeroen Ketema and Jakob Grue Simonsen
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
-- This module implements a Church-Rosser property.
module ChurchRosser (
churchRosser
) where
import SignatureAndVariables
import PositionAndSubterm
import RuleAndSystem
import Reduction
import ParallelReduction
import Omega
import Compression
import Confluence
import Prelude
import Data.List
-- A conversion is defined as a finite sequence of valleys, i.e., as a sequence
-- of the form: s (->>.<<-)^+ t. The sequence may not be empty; this would make
-- it impossible to output a pair of reductions, as we can no longer compute
-- any initial terms of the reductions.
type Conversion s v r = [(CReduction s v r, CReduction s v r)]
-- The function interleaveList computes the interleaving of a pair of
-- reductions that can be concatenated. The steps of the reduction are returned
-- as a list of lists of steps, where it is ensured for the ith item in the
-- list that all its steps occur at depth at least i.
--
-- The function gather has three arguments: depth, previous steps, and previous
-- parallel steps.
interleaveList :: RewriteSystem s v r
=> CReduction s v r -> CReduction s v r -> [[Step s v]]
interleaveList reduction_0 reduction_1 = gather 0 [] []
where final = finalTerm reduction_1
gather d prev psteps = steps_d : gather (d + 1) total psteps'
where (steps_d, psteps') = filterNeededSteps prev psteps total ps
total = total_0 ++ total_1
total_0 = neededSteps reduction_0 ps'
total_1 = neededSteps reduction_1 ps
ps' = origins reduction_1 ps
ps = posToDepth final d
-- Concatenate the lists produced by interleaveList to obtain all steps.
interleaveSteps :: RewriteSystem s v r
=> CReduction s v r -> CReduction s v r -> [Step s v]
interleaveSteps reduction_0 reduction_1 = concat steps_list
where steps_list = interleaveList reduction_0 reduction_1
-- Compute the modulus using that the ith element of the list produced by
-- interleaveList contains only steps at depth at least i.
interleaveModulus :: RewriteSystem s v r
=> CReduction s v r -> CReduction s v r -> Modulus Omega
interleaveModulus reduction_0 reduction_1 = constructModulus phi
where phi n = genericLength $ concat $ genericTake (n + 1) steps_list
steps_list = interleaveList reduction_0 reduction_1
-- Yield the interleaving of a pair of reductions that can be concatenated,
-- i.e. given s ->>.->> t a reduction s ->> t is returned.
interleave :: RewriteSystem s v r
=> r -> CReduction s v r -> CReduction s v r -> CReduction s v r
interleave _ reduction_0 reduction_1 = CRCons (RCons ts ss) phi
where ts = constructSequence terms
ss = constructSequence steps
terms = rewriteSteps (initialTerm reduction_0) steps
steps = interleaveSteps reduction_0 reduction_1
phi = interleaveModulus reduction_0 reduction_1
-- Church-Rosser of orthogonal, non-collapsing rewrite systems. The function
-- implements the classic proof except for the concatenation of reductions
-- which is replaced by an interleaving scheme.
churchRosser :: (Signature s, Variables v, RewriteSystem s v r)
=> r -> Conversion s v r -> (CReduction s v r, CReduction s v r)
churchRosser system conversion = churchRosser' (map compress conversion)
where compress (s, t) = (compression system s, compression system t)
churchRosser' []
= error "Conversion without reductions"
churchRosser' ((s, t):[])
= (s, t)
churchRosser' ((s_1, t_1):(s_2, t_2):cs)
= churchRosser' ((s_new, t_new) : cs)
where s_new = interleave system s_1 (fst confl)
t_new = interleave system t_2 (snd confl)
confl = confluence system (t_1, s_2)
| jeroenk/iTRSsImplemented | ChurchRosser.hs | agpl-3.0 | 4,556 | 0 | 11 | 1,060 | 830 | 437 | 393 | 54 | 3 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -Wall #-}
module Reactive.Impulse.Internal.Types
where
import Reactive.Impulse.Core
import Reactive.Impulse.Internal.RWST hiding ((<>))
import Reactive.Impulse.STM.Fence
import Control.Applicative
import Control.Concurrent.STM
import Control.Lens
import Data.IntMap (IntMap)
import qualified Data.IntMap as IM
import qualified Data.IntSet as IntSet
import qualified Data.Monoid as Monoid
import Data.Tree
import Data.Semigroup
import Unsafe.Coerce
import System.Mem.Weak
simpleEdgeMap :: Label -> ChainSet -> ChainEdgeMap
simpleEdgeMap fromLbl toSet = ChainEdgeMap $ IM.singleton fromLbl toSet
newtype MkWeak = MkWeak {unMkWeak :: forall a. a -> Maybe (IO ()) -> IO (Weak a)}
data PrevSwchRef = PrevSwchRef
{ _psrEdgeMap :: ChainEdgeMap
, _psrMkWeaks :: IM.IntMap MkWeak
}
emptyPrevSwchRef :: PrevSwchRef
emptyPrevSwchRef = PrevSwchRef mempty mempty
data ChainNode t = ChainNode
{ _cnChildren :: [ t ] -- direct children of this node
, _cnPushSet :: ChainSet -- all transitive children of this node
} deriving Functor
data Chain r a where
CEvent :: Label -> ChainNode (Chain r a) -> Chain r a
CMap :: Label -> (a -> b) -> ChainNode (Chain r b) -> Chain r a
CFilt :: Label -> (a -> Maybe b) -> ChainNode (Chain r b) -> Chain r a
COut :: Label -> Chain (IO ()) (IO ())
CAcc :: Label -> CBehavior a -> Chain r (a->a)
CApply :: Label -> CBehavior (a -> b) -> ChainNode (Chain r b) -> Chain r a
CSwch :: Label -> CBSwitch (CBehavior a) -> Chain r (Behavior a)
CSwchE :: Label -> TVar PrevSwchRef -> CBehavior (Event a)
-> ChainNode (Chain r a) -> Chain r ()
CJoin :: (a ~ Event b) => Label -> TVar PrevSwchRef
-> ChainNode (Chain r b) -> Chain r a
CDyn :: (a ~ SGen b) => Label -> ChainNode (Chain r b) -> Chain r a
-- A CBehavior is the representation of a Behavior within a Chain.
data CBehavior a =
ReadCB (STM a)
| PushCB (TVar a) -- (IO a) ((a -> a) -> IO ())
| SwchCB { _swchcb :: {-# UNPACK #-} !(CBSwitch (CBehavior a)) }
-- only used for dynamic network switching
data CBSwitch a = CBSwitch (TVar a) -- (IO a) (a -> IO ())
instance Labelled (Chain r a) where
label f (CEvent lbl a) = fmap (\l' -> CEvent l' a ) (f lbl)
label f (CMap lbl a b) = fmap (\l' -> CMap l' a b ) (f lbl)
label f (CFilt lbl a b) = fmap (\l' -> CFilt l' a b ) (f lbl)
label f (COut lbl) = fmap (\l' -> COut l' ) (f lbl)
label f (CAcc lbl a) = fmap (\l' -> CAcc l' a ) (f lbl)
label f (CApply lbl a b) = fmap (\l' -> CApply l' a b) (f lbl)
label f (CDyn lbl a) = fmap (\l' -> CDyn l' a ) (f lbl)
label f (CJoin lbl a b) = fmap (\l' -> CJoin l' a b ) (f lbl)
label f (CSwch lbl a) = fmap (\l' -> CSwch l' a ) (f lbl)
label f (CSwchE lbl tv a b) = fmap (\l' -> CSwchE l' tv a b) (f lbl)
type PermHead = Bool
-- wrap chains to put them in a map
data EChain where
EChain :: PermHead -> Chain (IO ()) a -> EChain
permHead :: Lens' EChain PermHead
permHead = lens (\(EChain p _) -> p) (\(EChain _ c) p -> EChain p c)
instance Labelled EChain where
label = from echain . label
-- alt def. would work if the unsafeCoerce ever causes issues,
-- but I think it's ok here...
-- label = lens (\(EChain c) -> c^.label) (\(EChain c) l' -> EChain $ set label l' c)
data EBehavior where
EBehavior :: Label -> CBehavior a -> EBehavior
instance Labelled EBehavior where
label f (EBehavior lbl b) = fmap (\l' -> EBehavior l' b) (f lbl)
-- haha this is super-sketchy.
echain :: Iso' (Chain (IO ()) a) EChain
echain = iso (EChain False) (\(EChain _ c) -> unsafeCoerce c)
-----------------------------------------------------------
-- A map from heads into a boundary region. Used when constructing sub-graphs
newtype BoundaryMap = BoundaryMap (IntMap ChainSet)
-- a DynGraph is a collection of chains that can be compiled
-- and/or executed. It is basically a map of chain heads.
data DynGraph f w = DynGraph
{ _dgHeads :: f (IntMap (w EChain))
, _dgBehaviors :: f (IntMap (w EBehavior))
, _dgBoundMap :: !BoundaryMap
, _dgMkWeaks :: !(IntMap MkWeak)
-- the following may only be available in a BuildingDynGraph
-- a map from each label to its most recently known parent.
, _dgChainHeads :: IntMap Label
}
-- a pure, immutable structure that is purely modifiable. Useful for creating
-- the initial graph and sub-graphs.
type BuildingDynGraph = DynGraph Identity Identity
startBuildingGraph :: Applicative t => DynGraph t a
startBuildingGraph =
DynGraph (pure IM.empty) (pure IM.empty) mempty mempty mempty
-- a running graph, using weak references and mutable refs.
type RunningDynGraph = DynGraph TVar Weak
{- for a RunningDynGraph, dgBehaviors need to be weak refs keyed off the
- underlying tvar
- dgHeads should be alive so long as the underlying Event is live.
-}
-- A running graph, frozen so that multiple functions can modify it.
data FrozenDynGraph = FrozenDynGraph
{ _frozenSource :: DynGraph Identity Weak
, _frozenMutGraph :: DynGraph Identity Identity
}
emptyFrozenGraph :: FrozenDynGraph
emptyFrozenGraph = FrozenDynGraph startBuildingGraph startBuildingGraph
-- a Network is a set of input nodes and a RunningDynGraph. These structures
-- are designed to use Weak references so they don't retain the internal graph
-- structure or input nodes, and to be adjustable at runtime.
data Network = Network
{ _nInputs :: NetHeadMap
, _nDynGraph :: RunningDynGraph
, _nActions :: TVar (IO ()) -- actions to be run on init.
, _nTManager :: TransactionManager
}
data EInput where
EInput :: Weak (TVar (a -> IO ())) -> EInput
data PInput where
PInput :: (TVar (a -> IO ())) -> (a -> IO ()) -> PInput
type NetHeadMap = TVar (IntMap EInput)
newtype BoundarySet = BoundarySet ChainSet deriving (Eq, Ord, Monoid, Semigroup)
-- The ModGraphM monad keeps track of a BuildingDynGraph during construction,
-- with access to a FrozenDynGraph.
type ModGraphM = RWST FrozenDynGraph DirtyLog BuildingDynGraph STM
-- The ChainM monad keeps track of a BuildingDynGraph during construction.
-- Only need the W param for CSwchE chains, to mark chains where the output
-- was removed, and to fire the initial event switch.
type ChainM = RWST BoundarySet DirtyLog BuildingDynGraph STM
-- Takes two inputs, the final sink and a value. Performs all real terminal
-- actions and returns an action to be performed afterwards.
type CompiledChain r a = (r -> IO ()) -> a -> STM UpdateBuilder
data UpdateBuilder = UpdateBuilder
{ _readSteps :: [STM UpdateBuilder]
, _modSteps :: [UpdateStep]
, _ubOutputs :: [IO ()]
}
instance Semigroup UpdateBuilder where
l <> r = UpdateBuilder
(_readSteps l <> _readSteps r)
(_modSteps l <> _modSteps r)
(_ubOutputs l <> _ubOutputs r)
instance Monoid UpdateBuilder where
mempty = UpdateBuilder [] [] []
mappend = (<>)
-- There are 2 phases to updates:
-- 1. Write to behaviors/update network
-- 2. Read from behaviors
--
-- We attempt to find a fixpoint of traversing the graph.
--
data UpdateStep =
Mod (ChainM ())
| DynMod (IO (ChainM (), STM UpdateBuilder))
-- for DynMod, we first need to run the ChainM action to update the graph.
-- then we continue with running the update steps. In this case, we need to
-- grab the extra IO layer to do the initial behavior readings after the
-- graph has been updated (since updating the graph may cause events to fire
-- and behaviors to update).
useUpdateStep :: (ChainM () -> b) -> (IO (ChainM (), STM UpdateBuilder) -> b) -> UpdateStep -> b
useUpdateStep f g u = case u of
Mod x -> f x
DynMod akt -> g akt
emptyCompiledChain :: CompiledChain r a
emptyCompiledChain _ _ = return mempty
$(makePrisms ''ChainEdgeMap)
$(makePrisms ''DirtyChains)
$(makePrisms ''CBehavior)
$(makePrisms ''UpdateStep)
$(makeLenses ''PrevSwchRef)
$(makeLenses ''Network)
$(makeLenses ''FrozenDynGraph)
$(makeLenses ''DynGraph)
$(makeLenses ''DirtyLog)
$(makeLenses ''UpdateBuilder)
dirtyChains :: Iso' ChainSet DirtyChains
dirtyChains = from _DirtyChains
chainEdgeMap :: Iso' (IntMap ChainSet) ChainEdgeMap
chainEdgeMap = from _ChainEdgeMap
markDirty :: Label -> ModGraphM ()
markDirty l = scribe dlChains $ DirtyChains $ IntSet.singleton l
markDirties :: ChainSet -> ModGraphM ()
markDirties = scribe dlChains . DirtyChains
$(makeLenses ''ChainNode)
-- update the node children, and also update the pushSet to match.
alterChildren :: ChainNode t -> ([t] -> [Chain r a]) -> ChainNode (Chain r a)
alterChildren cn f =
let newChildren = cn^.cnChildren.to f
in cn & cnChildren .~ newChildren
& cnPushSet .~ (newChildren^.folded.cPushSet')
instance Semigroup (ChainNode t) where
l <> r = l & cnChildren <>~ r^.cnChildren
& cnPushSet %~ ( flip IntSet.union $ r^.cnPushSet)
instance Monoid.Monoid (ChainNode t) where
mempty = ChainNode [] IntSet.empty
mappend = (<>)
cPushSet' :: IndexPreservingGetter (Chain r a) ChainSet
cPushSet' = to f
where
f (CEvent l n) = IntSet.singleton l <> n^.cnPushSet
f (CMap l _ n) = IntSet.singleton l <> n^.cnPushSet
f (CFilt l _ n) = IntSet.singleton l <> n^.cnPushSet
f (CApply l _ n) = IntSet.singleton l <> n^.cnPushSet
f (CSwchE l _ _ n) = IntSet.singleton l <> n^.cnPushSet
f (CJoin l _ n) = IntSet.singleton l <> n^.cnPushSet
f c = IntSet.singleton (c^.label)
cPushSet :: IndexPreservingGetter (Chain r a) (Maybe ChainSet)
cPushSet = to f
where
f (CEvent _ n) = Just $ n^.cnPushSet
f (CMap _ _ n) = Just $ n^.cnPushSet
f (CFilt _ _ n) = Just $ n^.cnPushSet
f (CApply _ _ n) = Just $ n^.cnPushSet
f (CSwchE _ _ _ n) = Just $ n^.cnPushSet
f (CJoin _ _ n) = Just $ n^.cnPushSet
f _ = Nothing
$(makePrisms ''BoundaryMap)
boundaryMap :: Iso' (IntMap ChainSet) BoundaryMap
boundaryMap = from _BoundaryMap
instance Semigroup BoundaryMap where
l <> r = under boundaryMap (IM.unionWith (<>) (r^.from boundaryMap)) l
instance Monoid BoundaryMap where
mappend = (<>)
mempty = BoundaryMap mempty
$(makePrisms ''BoundarySet)
boundarySet :: Iso' ChainSet BoundarySet
boundarySet = from _BoundarySet
cBoundarySet :: IndexPreservingGetter EChain BoundarySet
cBoundarySet = to f.boundarySet
where
f (EChain _ (CEvent l n)) = IntSet.insert l $ n^.cnPushSet
f (EChain _ (CMap l _ n)) = IntSet.insert l $ n^.cnPushSet
f (EChain _ (CFilt l _ n)) = IntSet.insert l $ n^.cnPushSet
f (EChain _ (CApply l _ n)) = IntSet.insert l $ n^.cnPushSet
f (EChain _ (CJoin l _ n)) = IntSet.insert l $ n^.cnPushSet
f e = IntSet.singleton (e^.label)
chainLabelTree :: Chain r a -> Tree String
chainLabelTree c =
Node {rootLabel = thisLbl ++ (c^.label.to show), subForest = mkForest }
where
(thisLbl,mkForest) = case c of
(CEvent _ n) -> ("CEvent ", map chainLabelTree $ n^.cnChildren)
(CMap _ _ n) -> ("CMap " , map chainLabelTree $ n^.cnChildren)
(CFilt _ _ n) -> ("CFilt " , map chainLabelTree $ n^.cnChildren)
(CApply _ _ n) -> ("CApply ", map chainLabelTree $ n^.cnChildren)
(CSwchE _ _ _ n) -> ("CSwchE ", map chainLabelTree $ n^.cnChildren)
(CJoin _ _ n) -> ("CJoin " , map chainLabelTree $ n^.cnChildren)
CDyn _ n -> ("CDyn ", map chainLabelTree $ n^.cnChildren)
COut{} -> ("COut " , [])
CSwch{} -> ("CSwch ", [])
CAcc{} -> ("CAcc " , [])
showChainTree :: Chain r a -> String
showChainTree = drawTree . chainLabelTree
takeModStep :: UpdateBuilder -> (UpdateBuilder, Maybe UpdateStep)
takeModStep ub = case ub^.modSteps of
[] -> (ub,Nothing)
(x:xs) -> (ub & modSteps .~ xs, Just x)
| JohnLato/impulse | src/Reactive/Impulse/Internal/Types.hs | lgpl-3.0 | 12,111 | 0 | 13 | 2,739 | 3,909 | 2,030 | 1,879 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
module Spark.Core.ColumnSpec where
import Test.Hspec
import Data.List.NonEmpty(NonEmpty( (:|) ))
import Spark.Core.Context
import Spark.Core.Dataset
import Spark.Core.Column
import Spark.Core.Row
import Spark.Core.Functions
import Spark.Core.ColumnFunctions
import Spark.Core.SimpleAddSpec(run)
import Spark.Core.Internal.LocalDataFunctions(iPackTupleObs)
import Spark.Core.Internal.DatasetFunctions(untypedLocalData)
myScaler :: Column ref Double -> Column ref Double
myScaler col =
let cnt = asDouble (countCol col)
m = sumCol col / cnt
centered = col .- m
stdDev = sumCol (centered * centered) / cnt
in centered ./ stdDev
spec :: Spec
spec = do
describe "local data operations" $ do
run "broadcastPair_struct" $ do
let ds = dataset [1] :: Dataset Int
let cnt = countCol (asCol ds)
let c = collect (asCol ds .+ cnt)
res <- exec1Def c
res `shouldBe` [2]
run "LocalPack (doubles)" $ do
let x = untypedLocalData (1 :: LocalData Double)
let x2 = iPackTupleObs (x :| [x])
res <- exec1Def x2
res `shouldBe` rowArray [DoubleElement 1, DoubleElement 1]
run "LocalPack" $ do
let x = untypedLocalData (1 :: LocalData Int)
let x2 = iPackTupleObs (x :| [x])
res <- exec1Def x2
res `shouldBe` rowArray [IntElement 1, IntElement 1]
run "BroadcastPair" $ do
let x = 1 :: LocalData Int
let ds = dataset [2, 3] :: Dataset Int
let ds2 = broadcastPair ds x
res <- exec1Def (collect (asCol ds2))
res `shouldBe` [(2, 1), (3, 1)]
-- TODO: this combines a lot of elements together.
describe "columns - integration" $ do
run "mean" $ do
let ds = dataset [-1, 1] :: Dataset Double
let c = myScaler (asCol ds)
res <- exec1Def (collect c)
res `shouldBe` [-1, 1]
| krapsh/kraps-haskell | test-integration/Spark/Core/ColumnSpec.hs | apache-2.0 | 1,901 | 0 | 19 | 453 | 690 | 352 | 338 | 52 | 1 |
import Prelude ((+),(-),(==),(/=),(*),($),(.),(++),(&&),(||),(!!),div,mod,map,take,splitAt,replicate,length,fromIntegral,drop,head,Eq,Show)
import Data.ByteString (ByteString(..),append,cons,pack)
import Data.Word (Word8(..))
import Crypto.Hash.SHA256
type Bool = Word8
data Node = Terminal {h::ByteString, s::[Bool], v::ByteString}
| Branch {h::ByteString, s::[Bool], l::Node, r::Node} deriving (Eq,Show)
byte [a,b,c,d, e,f,g,h] = 0x80*a + 0x40*b + 0x20*c + 0x10*d + 8*e + 4*f + 2*g + h
packBits bs = if bs == [] then pack [] else (byte l) `cons` (packBits rr)
where (l, rr) = splitAt 8 $ bs ++ replicate (7 - (length bs - 1) `mod` 8) 0
bitArr bs = ((l+7)`div`8) `cons` (l`mod`8) `cons` (packBits bs) where l = fromIntegral (length bs)
terminal bs v = Terminal (hash $ bitArr bs `append` (pack $ replicate 64 0) `append` v) bs v
branch bs l r = Branch (hash $ bitArr bs `append` (h l) `append` (h r)) bs l r
withS s (Terminal _ _ v) = terminal s v; withS s (Branch _ _ l r) = branch s l r
commonPrefix (x:xs) (y:ys) = if x == y then x : commonPrefix xs ys else []; commonPrefix _ _ = []
empty = Terminal (pack $ replicate 32 0) [] (pack [])
set k v n = if s n == k || n == empty then terminal k v else
if s n == common then case k!!(length common) of -- a child of n will take (k,v), now n
{0 -> branch common (set new v (l n)) (r n); 1 -> branch common (l n) (set new v (r n))}
else case k!!(length common) of -- k branches of somewhere along (s n)
{0 -> branch common (terminal new v) (withS old n); 1 -> branch common (withS old n) (terminal new v)}
where new = drop (length common+1) k; old = drop (length common+1) (s n); common = commonPrefix k (s n)
| andres-erbsen/dename | utils/hsverify/cbht.hs | apache-2.0 | 1,686 | 2 | 18 | 333 | 1,014 | 566 | 448 | 22 | 5 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.