code
stringlengths
5
1.03M
repo_name
stringlengths
5
90
path
stringlengths
4
158
license
stringclasses
15 values
size
int64
5
1.03M
n_ast_errors
int64
0
53.9k
ast_max_depth
int64
2
4.17k
n_whitespaces
int64
0
365k
n_ast_nodes
int64
3
317k
n_ast_terminals
int64
1
171k
n_ast_nonterminals
int64
1
146k
loc
int64
-1
37.3k
cycloplexity
int64
-1
1.31k
-- mathsprimitives.hs module Math.MathsPrimitives where -- primitive operations on sequences (lists) of numbers -- used in implementation of vectors, matrices, polynomials, cyclotomic fields, etc import List (transpose) infixr 8 */, *// infixl 7 $*, $., $$* infixl 6 $+, $-, $$+, $$- -- addition of sequences (a:as) $+ (b:bs) = (a+b) : (as $+ bs) as $+ [] = as [] $+ bs = bs as $- bs = as $+ (map negate bs) -- scalar multiplication a */ bs = map (a*) bs -- polynomial multiplication [] $* _ = [] _ $* [] = [] (a:as) $* (b:bs) = [a*b] $+ shift (map (a*) bs $+ map (*b) as) $+ shift (shift (as $* bs)) shift [] = [] shift as = 0 : as -- dot product of vectors (also called inner or scalar product) u $. v = sum (zipWith (*) u v) -- tensor product of vectors (also called outer or matrix product) (a:as) $** v = map (a*) v : (as $** v) [] $** _ = [] -- matrix operations a $$+ b = zipWith (zipWith (+)) a b a $$- b = zipWith (zipWith (-)) a b a $$* b = doMultMx a (transpose b) where doMultMx [] _ = [] -- doMultMx (u:us) bT = map (u $.) bT : doMultMx us bT doMultMx (u:us) bT = ((:) $! (map (u $.) bT)) (doMultMx us bT) -- scalar multiplication k *// m = map (map (k*)) m fMatrix f n = [[f i j | j <- [1..n]] | i <- [1..n]] partialSums xs = scanl1 (+) xs partialProducts xs = scanl1 (*) xs factorials :: [Integer] factorials = scanl (*) 1 [1..] -- A class for types which represent mathematical functions class FunctionRep f where compose :: f -> f -> f deriv :: f -> f integ :: f -> f nthderiv :: Int -> f -> f nthderiv n f = iterate deriv f !! n {- -- action on the left [] <. _ = [] (row:rows) <. xs = sum (zipWith (*) row xs) : (rows <. xs) -- action on the right v .> m = doApplyRightMx [] v m where doApplyRightMx ys [] [] = foldl1 (zipWith (+)) ys doApplyRightMx ys (x:xs) (row:rows) = doApplyRightMx (map (x *) row : ys) xs rows -}
nfjinjing/bench-euler
src/Math/MathsPrimitives.hs
bsd-3-clause
2,012
0
12
563
728
386
342
35
2
{- | Module : ./GUI/UDGUtils.hs Description : wrapper for uDrawGraph utilities from the uniform workbench Copyright : (c) Christian Maeder DFKI Bremen 2008 License : GPLv2 or higher, see LICENSE.txt Maintainer : [email protected] Stability : provisional Portability : non-portable uDrawGraph display -} module GUI.UDGUtils ( module X ) where import UDrawGraph.Graph as X import UDrawGraph.Basic as X import Graphs.GraphDisp as X import Graphs.GraphConfigure as X import Util.Broadcaster as X import Util.Sources as X
spechub/Hets
GUI/UDGUtils.hs
gpl-2.0
551
0
4
100
55
39
16
8
0
{-# LANGUAGE DeriveDataTypeable #-} {- | Module : $Header$ Description : Abstract syntax fo CspCASL Copyright : (c) Markus Roggenbach and Till Mossakowski and Uni Bremen 2004 License : GPLv2 or higher, see LICENSE.txt Maintainer : [email protected] Stability : provisional Portability : portable Abstract syntax of CSP-CASL processes. -} module CspCASL.AS_CspCASL where import Common.Id import Common.AS_Annotation (Annoted) import CASL.AS_Basic_CASL (SORT, VAR, VAR_DECL) import CspCASL.AS_CspCASL_Process import Data.Data -- DrIFT command {-! global: GetRange !-} data CspBasicExt = Channels [Annoted CHANNEL_DECL] Range | ProcItems [Annoted PROC_ITEM] Range deriving (Show, Typeable, Data) data CHANNEL_DECL = ChannelDecl [CHANNEL_NAME] SORT deriving (Show, Typeable, Data) data PROC_ITEM = Proc_Decl PROCESS_NAME PROC_ARGS PROC_ALPHABET | Proc_Defn PROCESS_NAME [VAR_DECL] PROC_ALPHABET PROCESS | Proc_Eq PARM_PROCNAME PROCESS deriving (Show, Typeable, Data) data PARM_PROCNAME = ParmProcname FQ_PROCESS_NAME [VAR] deriving (Show, Typeable, Data)
mariefarrell/Hets
CspCASL/AS_CspCASL.der.hs
gpl-2.0
1,114
0
8
187
203
116
87
21
0
{-# LANGUAGE DeriveDataTypeable #-} module Repos where import Control.Arrow (left) import Control.Applicative ((<*>)) import Data.Binary import Data.Binary.Generic import Data.Data import Data.Functor ((<$>)) import qualified Github.Repos as GH import qualified Github.Users as GH import qualified Github.Users.Followers as GH import System.Environment (getArgs) data GHProfile = GHProfile [GH.Repo] [GH.GithubOwner] [GH.GithubOwner] GH.DetailedOwner deriving (Show, Data, Typeable) instance (Binary GHProfile) where get = getGeneric put = putGeneric fetchGHProfile :: String -> IO (Either String GHProfile) fetchGHProfile username = do possibleRepos <- GH.userRepos username GH.Owner possibleFollowers <- GH.usersFollowing username possibleFollowing <- GH.usersFollowedBy username possibleUserInfo <- GH.userInfoFor username return $ left show $ GHProfile <$> possibleRepos <*> possibleFollowers <*> possibleFollowing <*> possibleUserInfo main :: IO () main = do [username] <- getArgs result <- fetchGHProfile username print result
hackclub/orthanc
Repos.hs
gpl-3.0
1,162
1
13
250
309
166
143
33
1
module TupleReorder1 where f :: (Int,Bool) -> Int f (i,b) = if b then i else 0 main :: Int main = f (True,5)
roberth/uu-helium
test/typeerrors/Heuristics/TupleReorder1.hs
gpl-3.0
111
0
6
26
60
36
24
5
2
{-# LANGUAGE TypeSynonymInstances #-} module Rewriting.DPO.TypedGraph.GraphProcess.OccurrenceRelation ( RelationItem(..) , Relation , AbstractRelation , AbstractType (..) , isRuleAndElement , filterRulesOccurrenceRelation , filterElementsOccurrenceRelation , filterCreationRelation , filterDeletionRelation , isCreation , isDeletion , isNode , happensAfterAction , happensBeforeAction , relatedItens , neverDeleted , present , findOrder , buildTransitivity , relationToString , restrictionToString) where import Data.Graphs (EdgeId, NodeId) import Data.Maybe (isJust, isNothing) import Data.Partition import Data.Set as S import Util.Closures as C data RelationItem = Node NodeId | Edge EdgeId | Rule String deriving (Eq, Ord, Show) type Relation = S.Set(RelationItem, RelationItem) data AbstractType = AbstractProduceForbid | AbstractDeleteForbid deriving (Eq, Ord, Show) type AbstractRelation = S.Set (AbstractType, (RelationItem, RelationItem), (RelationItem, RelationItem)) relationToString :: Relation -> String relationToString rel = "[" ++ concatSet (toList rel) ++"]" where concatSet [] = "" concatSet [x] = format x concatSet (x:xs) = format x ++ "," ++ concatSet xs format (a,b) = "(" ++ show a ++ " < " ++ show b ++")" restrictionToString :: AbstractRelation -> String restrictionToString res = "[" ++ concatSet (toList res) ++"]" where concatSet [] = "" concatSet [x] = format x concatSet (x:xs) = format x ++ ",\n" ++ concatSet xs format (t,(a,b),(_,d)) = "(" ++ show t ++ ": " ++ show b ++ " not in between "++ "[" ++ show a ++ " < " ++ show d ++"])" isRuleAndElement :: (RelationItem, RelationItem) -> Bool isRuleAndElement (a,b) = case (a,b) of (Rule _, Rule _) -> False (Rule _, _) -> True (_, Rule _) -> True _ -> False filterRulesOccurrenceRelation :: Relation -> Relation filterRulesOccurrenceRelation = S.filter bothRules where bothRules (x,y) = case (x,y) of (Rule _, Rule _) -> True _ -> False filterElementsOccurrenceRelation :: Relation -> Relation filterElementsOccurrenceRelation = S.filter bothElements where bothElements (x,y) = case (x,y) of (Rule _, _) -> False (_, Rule _) -> False _ -> True filterCreationRelation :: Relation -> Relation filterCreationRelation = S.filter bothElements where bothElements (x,y) = case (x,y) of (Rule _, Node _) -> True (Rule _, Edge _) -> True _ -> False filterDeletionRelation :: Relation -> Relation filterDeletionRelation = S.filter bothElements where bothElements (x,y) = case (x,y) of (Node _, Rule _) -> True (Edge _, Rule _) -> True _ -> False isCreation :: (RelationItem, RelationItem) -> Bool isCreation (a,b) = case (a,b) of (Rule _, Node _) -> True (Rule _, Edge _) -> True _ -> False isDeletion :: (RelationItem, RelationItem) -> Bool isDeletion (a,b) = case (a,b) of (Node _, Rule _) -> True (Edge _, Rule _) -> True _ -> False isNode :: RelationItem -> Bool isNode x = case x of Node _ -> True _ -> False -- | Tests wether an item appears before a rule in a given occurrence relation happensBeforeAction :: Relation -> RelationItem -> String -> Bool happensBeforeAction rel item name = member (item, Rule name) rel relatedItens :: Relation -> (RelationItem, RelationItem) -> Bool relatedItens rel (i1,i2) = member (i1,i2) rel || member (i2,i1) rel -- | Tests wether an item appears after a rule in a given occurrence relation happensAfterAction :: Relation -> RelationItem -> String -> Bool happensAfterAction rel item name = member (Rule name,item) rel -- | Given a relation item @i@ and the deletion relation of an doubly typed grammar, -- it returns True if the item is deleted by some rule in this relation and False otherwise neverDeleted :: RelationItem -> Relation -> Bool neverDeleted e rel = isNothing (lookup e $ toList rel) present :: RelationItem -> Relation -> Bool present e rel = isJust (lookup e $ toList rel) findOrder :: Relation -> Set RelationItem -> Maybe [RelationItem] findOrder = tsort buildTransitivity :: Relation -> Relation buildTransitivity = monadToSet . transitiveClosure . setToMonad
rodrigo-machado/verigraph
src/library/Rewriting/DPO/TypedGraph/GraphProcess/OccurrenceRelation.hs
gpl-3.0
4,851
0
16
1,474
1,420
771
649
104
4
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE OverloadedStrings #-} module Database.Design.Ampersand.Output.ToPandoc.ChapterProcessAnalysis where import Database.Design.Ampersand.Output.ToPandoc.SharedAmongChapters import Data.List --DESCR -> the process analysis contains a section for each process in the fSpec -- If an Ampersand script contains no reference to any role whatsoever, a process analysis is meaningless. -- In that case it will not be printed. To detect whether this is the case, we can look whether the -- mayEdit attributes remain empty. noProcesses :: FSpec -> Bool noProcesses fSpec = null (fRoles fSpec) chpProcessAnalysis :: Int -> FSpec -> Blocks chpProcessAnalysis lev fSpec = if null procs then mempty else headerBlocks <> roleRuleBlocks <> fromList roleRelationBlocks <> processSections where procs = if null (themes fSpec) then vpatterns fSpec else [ p | p<-vpatterns fSpec, name p `elem` themes fSpec ] processSections :: Blocks processSections = mconcat (procSections procs) headerBlocks :: Blocks headerBlocks = (chptHeader (fsLang fSpec) ProcessAnalysis) <> purposes2Blocks (getOpts fSpec) purps <> -- This explains the purpose of this context. fromList( [ case fsLang fSpec of Dutch -> Plain [ Str $ upCap (name fSpec)++" benoemt geen enkele rol. " , Str "Een generieke rol, User, zal worden gedefinieerd om al het werk te doen wat in het bedrijfsproces moet worden uitgevoerd." ] English -> Plain [ Str $ upCap (name fSpec)++" does not mention any role. " , Str "A generic role, User, will be defined to do all the work that is necessary in the business process." ] | null (fRoles fSpec)] ++ [ case fsLang fSpec of Dutch -> Plain [ Str $ upCap (name fSpec)++" specificeert niet welke rollen de inhoud van welke relaties mogen wijzigen. " , Str "" ] English -> Plain [ Str $ upCap (name fSpec)++" does not specify which roles may change the contents of which relations. " , Str "" ] | null (fRoleRels fSpec)]) where purps = purposesDefinedIn fSpec (fsLang fSpec) fSpec roleRuleBlocks :: Blocks roleRuleBlocks = if null (fRoleRuls fSpec) && (not.null.vrules) fSpec then mempty else (case fsLang fSpec of Dutch -> para ( (str.upCap.name) fSpec <> " kent regels aan rollen toe. " <> "De volgende tabel toont de regels die door een bepaalde rol worden gehandhaafd." ) English -> para ( (str.upCap.name) fSpec <> " assigns rules to roles. " <> "The following table shows the rules that are being maintained by a given role." ) -- the table containing the role-rule assignments )<> fromList [ Para $ [ RawInline (Format "latex") "\\begin{tabular}{|l|l|}\\hline\n" , case fsLang fSpec of Dutch -> RawInline (Format "latex") "Rol&Regel\\\\ \\hline\n" English -> RawInline (Format "latex") "Role&Rule\\\\ \\hline\n" ]++ [ RawInline (Format "latex") $ intercalate "\\\\ \\hline\n " [ latexEscShw (name role)++" & "++latexEscShw (name r)++ concat[ "\\\\\n &"++latexEscShw (name rul) | rul<-map snd (tail rrClass)] | rrClass<-eqCl fst (fRoleRuls fSpec) , let role=fst (head rrClass), let r=snd (head rrClass) ] ]++ [ RawInline (Format "latex") "\\\\ \\hline\n\\end{tabular}" ] ] -- the table containing the role-relation assignments roleRelationBlocks :: [Block] roleRelationBlocks = if null (fRoleRels fSpec) then [] else [ case fsLang fSpec of Dutch -> Para [ Str $ upCap (name fSpec)++" kent rollen aan relaties toe. " , Str "De volgende tabel toont de relaties waarvan de inhoud gewijzigd kan worden door iemand die een bepaalde rol vervult." ] English -> Para [ Str $ upCap (name fSpec)++" assigns roles to relations. " , Str "The following table shows the relations, the content of which can be altered by anyone who fulfills a given role." ] , Para $ [ RawInline (Format "latex") "\\begin{tabular}{|l|l|}\\hline\n" , RawInline (Format "latex") (case fsLang fSpec of Dutch -> "Rol&Relatie\\\\ \\hline\n" English -> "Role&Relation\\\\ \\hline\n") ]++ [ RawInline (Format "latex") $ intercalate "\\\\ \\hline\n " [ name role++" & $"++showMath r++"$"++ concat[ "\\\\\n &$"++showMath (snd rs)++"$" | rs<-tail rrClass] | rrClass<-eqCl fst (fRoleRels fSpec) , let role=fst (head rrClass), let r=snd (head rrClass) ] ]++ [ RawInline (Format "latex") "\\\\ \\hline\n" | not (null rolelessRels)]++ [ RawInline (Format "latex") $ intercalate "\\\\\n " [ "&$"++showMath d++"$" | d<-rolelessRels] | not (null rolelessRels)]++ [ RawInline (Format "latex") "\\\\ \\hline\n\\end{tabular}" ] ] where rolelessRels = [ d | d<-vrels fSpec, d `notElem` (nub.map snd) (fRoleRels fSpec) ] -- the sections in which processes are analyzed procSections :: [Pattern] -> [Blocks] procSections fprocs = iterat [fp |fp<-fprocs, (not.null.udefrules) fp] 1 declaredConcepts declaredRelations where declaredRelations = (concatMap relsDefdIn.vpatterns) fSpec declaredConcepts = (concs.vpatterns) fSpec iterat :: [Pattern] -> Int -> [A_Concept] -> [Declaration] -> [Blocks] iterat [] _ _ _ = mempty iterat (fproc:fps) i seenConcepts seenDeclarations = ( headerWithLabel (XRefProcessAnalysis fproc) (lev+2) (text(name fproc)) <> (purposes2Blocks (getOpts fSpec) (purposesDefinedIn fSpec (fsLang fSpec) fproc)) -- <> (txtProcessModel fproc) <> (if null sctRules then mempty else definitionList sctRules) ): iterat fps i' seenCrs seenDrs where sctRules :: [(Inlines, [Blocks])] (sctRules,i',seenCrs,seenDrs) = dpRule' fSpec(udefrules fproc) i seenConcepts seenDeclarations
guoy34/ampersand
src/Database/Design/Ampersand/Output/ToPandoc/ChapterProcessAnalysis.hs
gpl-3.0
6,645
0
24
2,076
1,581
805
776
103
13
{-# LANGUAGE TypeSynonymInstances,GeneralizedNewtypeDeriving,MultiParamTypeClasses,FlexibleInstances #-} module MigrationsTest ( tests ) where import Test.HUnit import Control.Monad.Identity ( runIdentity, Identity ) import qualified Data.Map as Map import Data.Time.Clock ( UTCTime ) import Database.Schema.Migrations import Database.Schema.Migrations.Store import Database.Schema.Migrations.Migration import Database.Schema.Migrations.Backend tests :: [Test] tests = migrationsToApplyTests type TestBackend = [Migration] newtype TestM a = TestM (Identity a) deriving (Monad) instance MonadMigration TestM where getCurrentTime = undefined instance Backend TestBackend TestM where getBootstrapMigration _ = undefined isBootstrapped _ = return True applyMigration _ _ = undefined revertMigration _ _ = undefined getMigrations b = return $ map mId b -- |Given a backend and a store, what are the list of migrations -- missing in the backend that are available in the store? type MissingMigrationTestCase = (MigrationMap, TestBackend, Migration, [Migration]) ts :: UTCTime ts = read "2009-04-15 10:02:06 UTC" blankMigration :: Migration blankMigration = Migration { mTimestamp = ts , mId = undefined , mDesc = Nothing , mApply = "" , mRevert = Nothing , mDeps = [] } missingMigrationsTestcases :: [MissingMigrationTestCase] missingMigrationsTestcases = [ (m, [], one, [one]) , (m, [one], one, []) , (m, [one], two, [two]) , (m, [one, two], one, []) , (m, [one, two], two, []) ] where one = blankMigration { mId = "one" } two = blankMigration { mId = "two", mDeps = ["one"] } m = Map.fromList [ (mId e, e) | e <- [one, two] ] mkTest :: MissingMigrationTestCase -> Test mkTest (mapping, backend, theMigration, expected) = let Right graph = depGraphFromMapping mapping storeData = StoreData mapping graph TestM act = migrationsToApply storeData backend theMigration result = runIdentity act in expected ~=? result migrationsToApplyTests :: [Test] migrationsToApplyTests = map mkTest missingMigrationsTestcases
creswick/dbmigrations
test/MigrationsTest.hs
bsd-3-clause
2,438
0
11
710
580
345
235
52
1
module ParsecExpr {-# DEPRECATED "This module has moved to Text.ParserCombinators.Parsec.Expr" #-} (module Text.ParserCombinators.Parsec.Expr) where import Text.ParserCombinators.Parsec.Expr
FranklinChen/hugs98-plus-Sep2006
fptools/hslibs/text/parsec/ParsecExpr.hs
bsd-3-clause
191
0
5
16
22
16
6
4
0
-- Copyright 2004-present Facebook. All Rights Reserved. {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE CPP #-} module Haxl.Core.CallGraph where import Data.Map.Strict (Map) import qualified Data.Map.Strict as Map #if __GLASGOW_HASKELL__ < 804 import Data.Monoid #endif import Data.Text (Text) import qualified Data.Text as Text type ModuleName = Text -- | An unqualified function type Function = Text -- | A qualified function data QualFunction = QualFunction ModuleName Function deriving (Eq, Ord) instance Show QualFunction where show (QualFunction mn nm) = Text.unpack $ mn <> Text.pack "." <> nm -- | Represents an edge between a parent function which calls a child function -- in the call graph type FunctionCall = (QualFunction, QualFunction) -- | An edge list which represents the dependencies between function calls type CallGraph = ([FunctionCall], Map QualFunction Text) -- | Used as the root of all function calls mainFunction :: QualFunction mainFunction = QualFunction "MAIN" "main" emptyCallGraph :: CallGraph emptyCallGraph = ([], Map.empty)
facebook/Haxl
Haxl/Core/CallGraph.hs
bsd-3-clause
1,071
0
9
165
205
126
79
19
1
{- (c) The University of Glasgow 2006 (c) The AQUA Project, Glasgow University, 1996-1998 TcTyClsDecls: Typecheck type and class declarations -} {-# LANGUAGE TupleSections, CPP #-} module ETA.TypeCheck.TcTyClsDecls ( tcTyAndClassDecls, tcAddImplicits, -- Functions used by TcInstDcls to check -- data/type family instance declarations kcDataDefn, tcConDecls, dataDeclChecks, checkValidTyCon, tcFamTyPats, tcTyFamInstEqn, famTyConShape, tcAddTyFamInstCtxt, tcAddDataFamInstCtxt, wrongKindOfFamily, dataConCtxt, badDataConTyCon ) where import ETA.HsSyn.HsSyn import ETA.Main.HscTypes import ETA.Iface.BuildTyCl import ETA.TypeCheck.TcRnMonad import ETA.TypeCheck.TcEnv import ETA.TypeCheck.TcValidity import ETA.TypeCheck.TcHsSyn import ETA.TypeCheck.TcSimplify( growThetaTyVars ) import ETA.TypeCheck.TcBinds( tcRecSelBinds ) import ETA.TypeCheck.TcTyDecls import ETA.TypeCheck.TcClassDcl import ETA.TypeCheck.TcHsType import ETA.TypeCheck.TcMType import ETA.TypeCheck.TcType import ETA.Prelude.TysWiredIn( unitTy ) import ETA.TypeCheck.FamInst import ETA.Types.FamInstEnv( isDominatedBy, mkCoAxBranch, mkBranchedCoAxiom ) import ETA.Types.Coercion( pprCoAxBranch, ltRole ) import ETA.Types.Type import ETA.Types.TypeRep -- for checkValidRoles import ETA.Types.Kind import ETA.Types.Class import ETA.Types.CoAxiom import ETA.Types.TyCon import ETA.BasicTypes.DataCon import ETA.BasicTypes.Id import ETA.Core.MkCore ( rEC_SEL_ERROR_ID ) import ETA.BasicTypes.IdInfo import ETA.BasicTypes.Var import ETA.BasicTypes.VarEnv import ETA.BasicTypes.VarSet import ETA.BasicTypes.Module import ETA.BasicTypes.Name import ETA.BasicTypes.NameSet import ETA.BasicTypes.NameEnv import ETA.Utils.Outputable import qualified ETA.Utils.Outputable as Outputable import ETA.Utils.Maybes import ETA.Types.Unify import ETA.Utils.Util import ETA.BasicTypes.SrcLoc import ETA.Utils.ListSetOps import ETA.Utils.Digraph import ETA.Main.DynFlags import ETA.Utils.FastString import ETA.BasicTypes.Unique ( mkBuiltinUnique ) import ETA.BasicTypes.BasicTypes import ETA.Utils.Bag import Control.Monad import Data.List #include "HsVersions.h" {- ************************************************************************ * * \subsection{Type checking for type and class declarations} * * ************************************************************************ Note [Grouping of type and class declarations] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ tcTyAndClassDecls is called on a list of `TyClGroup`s. Each group is a strongly connected component of mutually dependent types and classes. We kind check and type check each group separately to enhance kind polymorphism. Take the following example: type Id a = a data X = X (Id Int) If we were to kind check the two declarations together, we would give Id the kind * -> *, since we apply it to an Int in the definition of X. But we can do better than that, since Id really is kind polymorphic, and should get kind forall (k::BOX). k -> k. Since it does not depend on anything else, it can be kind-checked by itself, hence getting the most general kind. We then kind check X, which works fine because we then know the polymorphic kind of Id, and simply instantiate k to *. Note [Check role annotations in a second pass] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Role inference potentially depends on the types of all of the datacons declared in a mutually recursive group. The validity of a role annotation, in turn, depends on the result of role inference. Because the types of datacons might be ill-formed (see #7175 and Note [Checking GADT return types]) we must check *all* the tycons in a group for validity before checking *any* of the roles. Thus, we take two passes over the resulting tycons, first checking for general validity and then checking for valid role annotations. -} tcTyAndClassDecls :: ModDetails -> [TyClGroup Name] -- Mutually-recursive groups in dependency order -> TcM TcGblEnv -- Input env extended by types and classes -- and their implicit Ids,DataCons -- Fails if there are any errors tcTyAndClassDecls boot_details tyclds_s = checkNoErrs $ -- The code recovers internally, but if anything gave rise to -- an error we'd better stop now, to avoid a cascade fold_env tyclds_s -- Type check each group in dependency order folding the global env where fold_env :: [TyClGroup Name] -> TcM TcGblEnv fold_env [] = getGblEnv fold_env (tyclds:tyclds_s) = do { tcg_env <- tcTyClGroup boot_details tyclds ; setGblEnv tcg_env $ fold_env tyclds_s } -- remaining groups are typecheck in the extended global env tcTyClGroup :: ModDetails -> TyClGroup Name -> TcM TcGblEnv -- Typecheck one strongly-connected component of type and class decls tcTyClGroup boot_details tyclds = do { -- Step 1: kind-check this group and returns the final -- (possibly-polymorphic) kind of each TyCon and Class -- See Note [Kind checking for type and class decls] names_w_poly_kinds <- kcTyClGroup tyclds ; traceTc "tcTyAndCl generalized kinds" (ppr names_w_poly_kinds) -- Step 2: type-check all groups together, returning -- the final TyCons and Classes ; let role_annots = extractRoleAnnots tyclds decls = group_tyclds tyclds ; tyclss <- fixM $ \ rec_tyclss -> do { is_boot <- tcIsHsBootOrSig ; let rec_flags = calcRecFlags boot_details is_boot role_annots rec_tyclss -- Populate environment with knot-tied ATyCon for TyCons -- NB: if the decls mention any ill-staged data cons -- (see Note [Recusion and promoting data constructors] -- we will have failed already in kcTyClGroup, so no worries here ; tcExtendRecEnv (zipRecTyClss names_w_poly_kinds rec_tyclss) $ -- Also extend the local type envt with bindings giving -- the (polymorphic) kind of each knot-tied TyCon or Class -- See Note [Type checking recursive type and class declarations] tcExtendKindEnv names_w_poly_kinds $ -- Kind and type check declarations for this group concatMapM (tcTyClDecl rec_flags) decls } -- Step 3: Perform the validity check -- We can do this now because we are done with the recursive knot -- Do it before Step 4 (adding implicit things) because the latter -- expects well-formed TyCons ; tcExtendGlobalEnv tyclss $ do { traceTc "Starting validity check" (ppr tyclss) ; checkNoErrs $ mapM_ (recoverM (return ()) . checkValidTyCl) tyclss -- We recover, which allows us to report multiple validity errors -- the checkNoErrs is necessary to fix #7175. ; mapM_ (recoverM (return ()) . checkValidRoleAnnots role_annots) tyclss -- See Note [Check role annotations in a second pass] -- Step 4: Add the implicit things; -- we want them in the environment because -- they may be mentioned in interface files ; tcExtendGlobalValEnv (mkDefaultMethodIds tyclss) $ tcAddImplicits tyclss } } tcAddImplicits :: [TyThing] -> TcM TcGblEnv tcAddImplicits tyclss = tcExtendGlobalEnvImplicit implicit_things $ tcRecSelBinds rec_sel_binds where implicit_things = concatMap implicitTyThings tyclss rec_sel_binds = mkRecSelBinds tyclss zipRecTyClss :: [(Name, Kind)] -> [TyThing] -- Knot-tied -> [(Name,TyThing)] -- Build a name-TyThing mapping for the things bound by decls -- being careful not to look at the [TyThing] -- The TyThings in the result list must have a visible ATyCon, -- because typechecking types (in, say, tcTyClDecl) looks at this outer constructor zipRecTyClss kind_pairs rec_things = [ (name, ATyCon (get name)) | (name, _kind) <- kind_pairs ] where rec_type_env :: TypeEnv rec_type_env = mkTypeEnv rec_things get name = case lookupTypeEnv rec_type_env name of Just (ATyCon tc) -> tc other -> pprPanic "zipRecTyClss" (ppr name <+> ppr other) {- ************************************************************************ * * Kind checking * * ************************************************************************ Note [Kind checking for type and class decls] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Kind checking is done thus: 1. Make up a kind variable for each parameter of the *data* type, class, and closed type family decls, and extend the kind environment (which is in the TcLclEnv) 2. Dependency-analyse the type *synonyms* (which must be non-recursive), and kind-check them in dependency order. Extend the kind envt. 3. Kind check the data type and class decls Synonyms are treated differently to data type and classes, because a type synonym can be an unboxed type type Foo = Int# and a kind variable can't unify with UnboxedTypeKind So we infer their kinds in dependency order We need to kind check all types in the mutually recursive group before we know the kind of the type variables. For example: class C a where op :: D b => a -> b -> b class D c where bop :: (Monad c) => ... Here, the kind of the locally-polymorphic type variable "b" depends on *all the uses of class D*. For example, the use of Monad c in bop's type signature means that D must have kind Type->Type. However type synonyms work differently. They can have kinds which don't just involve (->) and *: type R = Int# -- Kind # type S a = Array# a -- Kind * -> # type T a b = (# a,b #) -- Kind * -> * -> (# a,b #) So we must infer their kinds from their right-hand sides *first* and then use them, whereas for the mutually recursive data types D we bring into scope kind bindings D -> k, where k is a kind variable, and do inference. Open type families ~~~~~~~~~~~~~~~~~~ This treatment of type synonyms only applies to Haskell 98-style synonyms. General type functions can be recursive, and hence, appear in `alg_decls'. The kind of an open type family is solely determinded by its kind signature; hence, only kind signatures participate in the construction of the initial kind environment (as constructed by `getInitialKind'). In fact, we ignore instances of families altogether in the following. However, we need to include the kinds of *associated* families into the construction of the initial kind environment. (This is handled by `allDecls'). -} kcTyClGroup :: TyClGroup Name -> TcM [(Name,Kind)] -- Kind check this group, kind generalize, and return the resulting local env -- This bindds the TyCons and Classes of the group, but not the DataCons -- See Note [Kind checking for type and class decls] kcTyClGroup (TyClGroup { group_tyclds = decls }) = do { mod <- getModule ; traceTc "kcTyClGroup" (ptext (sLit "module") <+> ppr mod $$ vcat (map ppr decls)) -- Kind checking; -- 1. Bind kind variables for non-synonyms -- 2. Kind-check synonyms, and bind kinds of those synonyms -- 3. Kind-check non-synonyms -- 4. Generalise the inferred kinds -- See Note [Kind checking for type and class decls] -- Step 1: Bind kind variables for non-synonyms ; let (syn_decls, non_syn_decls) = partition (isSynDecl . unLoc) decls ; initial_kinds <- getInitialKinds non_syn_decls ; traceTc "kcTyClGroup: initial kinds" (ppr initial_kinds) -- Step 2: Set initial envt, kind-check the synonyms ; lcl_env <- tcExtendKindEnv2 initial_kinds $ kcSynDecls (calcSynCycles syn_decls) -- Step 3: Set extended envt, kind-check the non-synonyms ; setLclEnv lcl_env $ mapM_ kcLTyClDecl non_syn_decls -- Step 4: generalisation -- Kind checking done for this group -- Now we have to kind generalize the flexis ; res <- concatMapM (generaliseTCD (tcl_env lcl_env)) decls ; traceTc "kcTyClGroup result" (ppr res) ; return res } where generalise :: TcTypeEnv -> Name -> TcM (Name, Kind) -- For polymorphic things this is a no-op generalise kind_env name = do { let kc_kind = case lookupNameEnv kind_env name of Just (AThing k) -> k _ -> pprPanic "kcTyClGroup" (ppr name $$ ppr kind_env) ; kvs <- kindGeneralize (tyVarsOfType kc_kind) ; kc_kind' <- zonkTcKind kc_kind -- Make sure kc_kind' has the final, -- skolemised kind variables ; traceTc "Generalise kind" (vcat [ ppr name, ppr kc_kind, ppr kvs, ppr kc_kind' ]) ; return (name, mkForAllTys kvs kc_kind') } generaliseTCD :: TcTypeEnv -> LTyClDecl Name -> TcM [(Name, Kind)] generaliseTCD kind_env (L _ decl) | ClassDecl { tcdLName = (L _ name), tcdATs = ats } <- decl = do { first <- generalise kind_env name ; rest <- mapM ((generaliseFamDecl kind_env) . unLoc) ats ; return (first : rest) } | FamDecl { tcdFam = fam } <- decl = do { res <- generaliseFamDecl kind_env fam ; return [res] } | otherwise = do { res <- generalise kind_env (tcdName decl) ; return [res] } generaliseFamDecl :: TcTypeEnv -> FamilyDecl Name -> TcM (Name, Kind) generaliseFamDecl kind_env (FamilyDecl { fdLName = L _ name }) = generalise kind_env name mk_thing_env :: [LTyClDecl Name] -> [(Name, TcTyThing)] mk_thing_env [] = [] mk_thing_env (decl : decls) | L _ (ClassDecl { tcdLName = L _ nm, tcdATs = ats }) <- decl = (nm, APromotionErr ClassPE) : (map (, APromotionErr TyConPE) $ map (unLoc . fdLName . unLoc) ats) ++ (mk_thing_env decls) | otherwise = (tcdName (unLoc decl), APromotionErr TyConPE) : (mk_thing_env decls) getInitialKinds :: [LTyClDecl Name] -> TcM [(Name, TcTyThing)] getInitialKinds decls = tcExtendKindEnv2 (mk_thing_env decls) $ do { pairss <- mapM (addLocM getInitialKind) decls ; return (concat pairss) } getInitialKind :: TyClDecl Name -> TcM [(Name, TcTyThing)] -- Allocate a fresh kind variable for each TyCon and Class -- For each tycon, return (tc, AThing k) -- where k is the kind of tc, derived from the LHS -- of the definition (and probably including -- kind unification variables) -- Example: data T a b = ... -- return (T, kv1 -> kv2 -> kv3) -- -- This pass deals with (ie incorporates into the kind it produces) -- * The kind signatures on type-variable binders -- * The result kinds signature on a TyClDecl -- -- ALSO for each datacon, return (dc, APromotionErr RecDataConPE) -- Note [ARecDataCon: Recursion and promoting data constructors] -- -- No family instances are passed to getInitialKinds getInitialKind decl@(ClassDecl { tcdLName = L _ name, tcdTyVars = ktvs, tcdATs = ats }) = do { (cl_kind, inner_prs) <- kcHsTyVarBndrs (hsDeclHasCusk decl) ktvs $ do { inner_prs <- getFamDeclInitialKinds ats ; return (constraintKind, inner_prs) } ; let main_pr = (name, AThing cl_kind) ; return (main_pr : inner_prs) } getInitialKind decl@(DataDecl { tcdLName = L _ name , tcdTyVars = ktvs , tcdDataDefn = HsDataDefn { dd_kindSig = m_sig , dd_cons = cons' } }) = let cons = cons' -- AZ list monad coming in do { (decl_kind, _) <- kcHsTyVarBndrs (hsDeclHasCusk decl) ktvs $ do { res_k <- case m_sig of Just ksig -> tcLHsKind ksig Nothing -> return liftedTypeKind ; return (res_k, ()) } ; let main_pr = (name, AThing decl_kind) inner_prs = [ (unLoc con, APromotionErr RecDataConPE) | L _ con' <- cons, con <- con_names con' ] ; return (main_pr : inner_prs) } getInitialKind (FamDecl { tcdFam = decl }) = getFamDeclInitialKind decl getInitialKind decl@(SynDecl {}) = pprPanic "getInitialKind" (ppr decl) --------------------------------- getFamDeclInitialKinds :: [LFamilyDecl Name] -> TcM [(Name, TcTyThing)] getFamDeclInitialKinds decls = tcExtendKindEnv2 [ (n, APromotionErr TyConPE) | L _ (FamilyDecl { fdLName = L _ n }) <- decls] $ concatMapM (addLocM getFamDeclInitialKind) decls getFamDeclInitialKind :: FamilyDecl Name -> TcM [(Name, TcTyThing)] getFamDeclInitialKind decl@(FamilyDecl { fdLName = L _ name , fdTyVars = ktvs , fdKindSig = ksig }) = do { (fam_kind, _) <- kcHsTyVarBndrs (famDeclHasCusk decl) ktvs $ do { res_k <- case ksig of Just k -> tcLHsKind k Nothing | famDeclHasCusk decl -> return liftedTypeKind | otherwise -> newMetaKindVar ; return (res_k, ()) } ; return [ (name, AThing fam_kind) ] } ---------------- kcSynDecls :: [SCC (LTyClDecl Name)] -> TcM TcLclEnv -- Kind bindings kcSynDecls [] = getLclEnv kcSynDecls (group : groups) = do { (n,k) <- kcSynDecl1 group ; lcl_env <- tcExtendKindEnv [(n,k)] (kcSynDecls groups) ; return lcl_env } kcSynDecl1 :: SCC (LTyClDecl Name) -> TcM (Name,TcKind) -- Kind bindings kcSynDecl1 (AcyclicSCC (L _ decl)) = kcSynDecl decl kcSynDecl1 (CyclicSCC decls) = do { recSynErr decls; failM } -- Fail here to avoid error cascade -- of out-of-scope tycons kcSynDecl :: TyClDecl Name -> TcM (Name, TcKind) kcSynDecl decl@(SynDecl { tcdTyVars = hs_tvs, tcdLName = L _ name , tcdRhs = rhs }) -- Returns a possibly-unzonked kind = tcAddDeclCtxt decl $ do { (syn_kind, _) <- kcHsTyVarBndrs (hsDeclHasCusk decl) hs_tvs $ do { traceTc "kcd1" (ppr name <+> brackets (ppr hs_tvs)) ; (_, rhs_kind) <- tcLHsType rhs ; traceTc "kcd2" (ppr name) ; return (rhs_kind, ()) } ; return (name, syn_kind) } kcSynDecl decl = pprPanic "kcSynDecl" (ppr decl) ------------------------------------------------------------------------ kcLTyClDecl :: LTyClDecl Name -> TcM () -- See Note [Kind checking for type and class decls] kcLTyClDecl (L loc decl) = setSrcSpan loc $ tcAddDeclCtxt decl $ kcTyClDecl decl kcTyClDecl :: TyClDecl Name -> TcM () -- This function is used solely for its side effect on kind variables -- NB kind signatures on the type variables and -- result kind signature have aready been dealt with -- by getInitialKind, so we can ignore them here. kcTyClDecl (DataDecl { tcdLName = L _ name, tcdTyVars = hs_tvs, tcdDataDefn = defn }) | HsDataDefn { dd_cons = cons, dd_kindSig = Just _ } <- defn = mapM_ (wrapLocM kcConDecl) cons -- hs_tvs and dd_kindSig already dealt with in getInitialKind -- If dd_kindSig is Just, this must be a GADT-style decl, -- (see invariants of DataDefn declaration) -- so (a) we don't need to bring the hs_tvs into scope, because the -- ConDecls bind all their own variables -- (b) dd_ctxt is not allowed for GADT-style decls, so we can ignore it | HsDataDefn { dd_ctxt = ctxt, dd_cons = cons } <- defn = kcTyClTyVars name hs_tvs $ do { _ <- tcHsContext ctxt ; mapM_ (wrapLocM kcConDecl) cons } kcTyClDecl decl@(SynDecl {}) = pprPanic "kcTyClDecl" (ppr decl) kcTyClDecl (ClassDecl { tcdLName = L _ name, tcdTyVars = hs_tvs , tcdCtxt = ctxt, tcdSigs = sigs }) = kcTyClTyVars name hs_tvs $ do { _ <- tcHsContext ctxt ; mapM_ (wrapLocM kc_sig) sigs } where kc_sig (TypeSig _ op_ty _) = discardResult (tcHsLiftedType op_ty) kc_sig (GenericSig _ op_ty) = discardResult (tcHsLiftedType op_ty) kc_sig _ = return () -- closed type families look at their equations, but other families don't -- do anything here kcTyClDecl (FamDecl (FamilyDecl { fdLName = L _ fam_tc_name , fdTyVars = hs_tvs , fdInfo = ClosedTypeFamily eqns })) = do { tc_kind <- kcLookupKind fam_tc_name ; let fam_tc_shape = ( fam_tc_name, length (hsQTvBndrs hs_tvs), tc_kind) ; mapM_ (kcTyFamInstEqn fam_tc_shape) eqns } kcTyClDecl (FamDecl {}) = return () ------------------- kcConDecl :: ConDecl Name -> TcM () kcConDecl (ConDecl { con_names = names, con_qvars = ex_tvs , con_cxt = ex_ctxt, con_details = details , con_res = res }) = addErrCtxt (dataConCtxtName names) $ -- the 'False' says that the existentials don't have a CUSK, as the -- concept doesn't really apply here. We just need to bring the variables -- into scope! do { _ <- kcHsTyVarBndrs False ex_tvs $ do { _ <- tcHsContext ex_ctxt ; mapM_ (tcHsOpenType . getBangType) (hsConDeclArgTys details) ; _ <- tcConRes res ; return (panic "kcConDecl", ()) } ; return () } {- Note [Recursion and promoting data constructors] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We don't want to allow promotion in a strongly connected component when kind checking. Consider: data T f = K (f (K Any)) When kind checking the `data T' declaration the local env contains the mappings: T -> AThing <some initial kind> K -> ARecDataCon ANothing is only used for DataCons, and only used during type checking in tcTyClGroup. ************************************************************************ * * \subsection{Type checking} * * ************************************************************************ Note [Type checking recursive type and class declarations] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ At this point we have completed *kind-checking* of a mutually recursive group of type/class decls (done in kcTyClGroup). However, we discarded the kind-checked types (eg RHSs of data type decls); note that kcTyClDecl returns (). There are two reasons: * It's convenient, because we don't have to rebuild a kinded HsDecl (a fairly elaborate type) * It's necessary, because after kind-generalisation, the TyCons/Classes may now be kind-polymorphic, and hence need to be given kind arguments. Example: data T f a = MkT (f a) (T f a) During kind-checking, we give T the kind T :: k1 -> k2 -> * and figure out constraints on k1, k2 etc. Then we generalise to get T :: forall k. (k->*) -> k -> * So now the (T f a) in the RHS must be elaborated to (T k f a). However, during tcTyClDecl of T (above) we will be in a recursive "knot". So we aren't allowed to look at the TyCon T itself; we are only allowed to put it (lazily) in the returned structures. But when kind-checking the RHS of T's decl, we *do* need to know T's kind (so that we can correctly elaboarate (T k f a). How can we get T's kind without looking at T? Delicate answer: during tcTyClDecl, we extend *Global* env with T -> ATyCon (the (not yet built) TyCon for T) *Local* env with T -> AThing (polymorphic kind of T) Then: * During TcHsType.kcTyVar we look in the *local* env, to get the known kind for T. * But in TcHsType.ds_type (and ds_var_app in particular) we look in the *global* env to get the TyCon. But we must be careful not to force the TyCon or we'll get a loop. This fancy footwork (with two bindings for T) is only necesary for the TyCons or Classes of this recursive group. Earlier, finished groups, live in the global env only. -} tcTyClDecl :: RecTyInfo -> LTyClDecl Name -> TcM [TyThing] tcTyClDecl rec_info (L loc decl) = setSrcSpan loc $ tcAddDeclCtxt decl $ traceTc "tcTyAndCl-x" (ppr decl) >> tcTyClDecl1 NoParentTyCon rec_info decl -- "type family" declarations tcTyClDecl1 :: TyConParent -> RecTyInfo -> TyClDecl Name -> TcM [TyThing] tcTyClDecl1 parent _rec_info (FamDecl { tcdFam = fd }) = tcFamDecl1 parent fd -- "type" synonym declaration tcTyClDecl1 _parent rec_info (SynDecl { tcdLName = L _ tc_name, tcdTyVars = tvs, tcdRhs = rhs }) = ASSERT( isNoParent _parent ) tcTyClTyVars tc_name tvs $ \ tvs' kind -> tcTySynRhs rec_info tc_name tvs' kind rhs -- "data/newtype" declaration tcTyClDecl1 _parent rec_info (DataDecl { tcdLName = L _ tc_name, tcdTyVars = tvs, tcdDataDefn = defn }) = ASSERT( isNoParent _parent ) tcTyClTyVars tc_name tvs $ \ tvs' kind -> tcDataDefn rec_info tc_name tvs' kind defn tcTyClDecl1 _parent rec_info (ClassDecl { tcdLName = L _ class_name, tcdTyVars = tvs , tcdCtxt = ctxt, tcdMeths = meths , tcdFDs = fundeps, tcdSigs = sigs , tcdATs = ats, tcdATDefs = at_defs }) = ASSERT( isNoParent _parent ) do { (clas, tvs', gen_dm_env) <- fixM $ \ ~(clas,_,_) -> tcTyClTyVars class_name tvs $ \ tvs' kind -> do { MASSERT( isConstraintKind kind ) -- This little knot is just so we can get -- hold of the name of the class TyCon, which we -- need to look up its recursiveness ; let tycon_name = tyConName (classTyCon clas) tc_isrec = rti_is_rec rec_info tycon_name roles = rti_roles rec_info tycon_name ; ctxt' <- tcHsContext ctxt ; ctxt' <- zonkTcTypeToTypes emptyZonkEnv ctxt' -- Squeeze out any kind unification variables ; fds' <- mapM (addLocM tc_fundep) fundeps ; (sig_stuff, gen_dm_env) <- tcClassSigs class_name sigs meths ; at_stuff <- tcClassATs class_name (AssocFamilyTyCon clas) ats at_defs ; mindef <- tcClassMinimalDef class_name sigs sig_stuff ; clas <- buildClass class_name tvs' roles ctxt' fds' at_stuff sig_stuff mindef tc_isrec ; traceTc "tcClassDecl" (ppr fundeps $$ ppr tvs' $$ ppr fds') ; return (clas, tvs', gen_dm_env) } ; let { gen_dm_ids = [ AnId (mkExportedLocalId VanillaId gen_dm_name gen_dm_ty) | (sel_id, GenDefMeth gen_dm_name) <- classOpItems clas , let gen_dm_tau = expectJust "tcTyClDecl1" $ lookupNameEnv gen_dm_env (idName sel_id) , let gen_dm_ty = mkSigmaTy tvs' [mkClassPred clas (mkTyVarTys tvs')] gen_dm_tau ] ; class_ats = map ATyCon (classATs clas) } ; return (ATyCon (classTyCon clas) : gen_dm_ids ++ class_ats ) } -- NB: Order is important due to the call to `mkGlobalThings' when -- tying the the type and class declaration type checking knot. where tc_fundep (tvs1, tvs2) = do { tvs1' <- mapM (tc_fd_tyvar . unLoc) tvs1 ; ; tvs2' <- mapM (tc_fd_tyvar . unLoc) tvs2 ; ; return (tvs1', tvs2') } tc_fd_tyvar name -- Scoped kind variables are bound to unification variables -- which are now fixed, so we can zonk = do { tv <- tcLookupTyVar name ; ty <- zonkTyVarOcc emptyZonkEnv tv -- Squeeze out any kind unification variables ; case getTyVar_maybe ty of Just tv' -> return tv' Nothing -> pprPanic "tc_fd_tyvar" (ppr name $$ ppr tv $$ ppr ty) } tcFamDecl1 :: TyConParent -> FamilyDecl Name -> TcM [TyThing] tcFamDecl1 parent (FamilyDecl {fdInfo = OpenTypeFamily, fdLName = L _ tc_name, fdTyVars = tvs}) = tcTyClTyVars tc_name tvs $ \ tvs' kind -> do { traceTc "open type family:" (ppr tc_name) ; checkFamFlag tc_name ; tycon <- buildFamilyTyCon tc_name tvs' OpenSynFamilyTyCon kind parent ; return [ATyCon tycon] } tcFamDecl1 parent (FamilyDecl { fdInfo = ClosedTypeFamily eqns , fdLName = lname@(L _ tc_name), fdTyVars = tvs }) -- Closed type families are a little tricky, because they contain the definition -- of both the type family and the equations for a CoAxiom. -- Note: eqns might be empty, in a hs-boot file! = do { traceTc "closed type family:" (ppr tc_name) -- the variables in the header have no scope: ; (tvs', kind) <- tcTyClTyVars tc_name tvs $ \ tvs' kind -> return (tvs', kind) ; checkFamFlag tc_name -- make sure we have -XTypeFamilies -- Process the equations, creating CoAxBranches ; tc_kind <- kcLookupKind tc_name ; let fam_tc_shape = (tc_name, length (hsQTvBndrs tvs), tc_kind) ; branches <- mapM (tcTyFamInstEqn fam_tc_shape) eqns -- we need the tycon that we will be creating, but it's in scope. -- just look it up. ; fam_tc <- tcLookupLocatedTyCon lname -- create a CoAxiom, with the correct src location. It is Vitally -- Important that we do not pass the branches into -- newFamInstAxiomName. They have types that have been zonked inside -- the knot and we will die if we look at them. This is OK here -- because there will only be one axiom, so we don't need to -- differentiate names. -- See [Zonking inside the knot] in TcHsType ; loc <- getSrcSpanM ; co_ax_name <- newFamInstAxiomName loc tc_name [] -- mkBranchedCoAxiom will fail on an empty list of branches, but -- we'll never look at co_ax in this case ; let co_ax = mkBranchedCoAxiom co_ax_name fam_tc branches -- now, finally, build the TyCon ; let syn_rhs = if null eqns then AbstractClosedSynFamilyTyCon else ClosedSynFamilyTyCon co_ax ; tycon <- buildFamilyTyCon tc_name tvs' syn_rhs kind parent ; let result = if null eqns then [ATyCon tycon] else [ATyCon tycon, ACoAxiom co_ax] ; return result } -- We check for instance validity later, when doing validity checking for -- the tycon tcFamDecl1 parent (FamilyDecl {fdInfo = DataFamily, fdLName = L _ tc_name, fdTyVars = tvs}) = tcTyClTyVars tc_name tvs $ \ tvs' kind -> do { traceTc "data family:" (ppr tc_name) ; checkFamFlag tc_name ; extra_tvs <- tcDataKindSig kind ; let final_tvs = tvs' ++ extra_tvs -- we may not need these roles = map (const Nominal) final_tvs tycon = buildAlgTyCon tc_name final_tvs roles Nothing [] DataFamilyTyCon Recursive False -- Not promotable to the kind level True -- GADT syntax parent ; return [ATyCon tycon] } tcTySynRhs :: RecTyInfo -> Name -> [TyVar] -> Kind -> LHsType Name -> TcM [TyThing] tcTySynRhs rec_info tc_name tvs kind hs_ty = do { env <- getLclEnv ; traceTc "tc-syn" (ppr tc_name $$ ppr (tcl_env env)) ; rhs_ty <- tcCheckLHsType hs_ty kind ; rhs_ty <- zonkTcTypeToType emptyZonkEnv rhs_ty ; let roles = rti_roles rec_info tc_name ; tycon <- buildSynonymTyCon tc_name tvs roles rhs_ty kind ; return [ATyCon tycon] } tcDataDefn :: RecTyInfo -> Name -> [TyVar] -> Kind -> HsDataDefn Name -> TcM [TyThing] -- NB: not used for newtype/data instances (whether associated or not) tcDataDefn rec_info tc_name tvs kind (HsDataDefn { dd_ND = new_or_data, dd_cType = cType , dd_ctxt = ctxt, dd_kindSig = mb_ksig , dd_cons = cons' }) = let cons = cons' -- AZ List monad coming in do { extra_tvs <- tcDataKindSig kind ; let final_tvs = tvs ++ extra_tvs roles = rti_roles rec_info tc_name ; stupid_tc_theta <- tcHsContext ctxt ; stupid_theta <- zonkTcTypeToTypes emptyZonkEnv stupid_tc_theta ; kind_signatures <- xoptM Opt_KindSignatures ; is_boot <- tcIsHsBootOrSig -- Are we compiling an hs-boot file? -- Check that we don't use kind signatures without Glasgow extensions ; case mb_ksig of Nothing -> return () Just hs_k -> do { checkTc (kind_signatures) (badSigTyDecl tc_name) ; tc_kind <- tcLHsKind hs_k ; checkKind kind tc_kind ; return () } ; gadt_syntax <- dataDeclChecks tc_name new_or_data stupid_theta cons ; tycon <- fixM $ \ tycon -> do { let res_ty = mkTyConApp tycon (mkTyVarTys final_tvs) ; data_cons <- tcConDecls new_or_data tycon (final_tvs, res_ty) cons ; tc_rhs <- if null cons && is_boot -- In a hs-boot file, empty cons means then return totallyAbstractTyConRhs -- "don't know"; hence totally Abstract else case new_or_data of DataType -> return (mkDataTyConRhs data_cons) NewType -> ASSERT( not (null data_cons) ) mkNewTyConRhs tc_name tycon (head data_cons) ; return (buildAlgTyCon tc_name final_tvs roles (fmap unLoc cType) stupid_theta tc_rhs (rti_is_rec rec_info tc_name) (rti_promotable rec_info) gadt_syntax NoParentTyCon) } ; return [ATyCon tycon] } {- ************************************************************************ * * Typechecking associated types (in class decls) (including the associated-type defaults) * * ************************************************************************ Note [Associated type defaults] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The following is an example of associated type defaults: class C a where data D a type F a b :: * type F a Z = [a] -- Default type F a (S n) = F a n -- Default Note that: - We can have more than one default definition for a single associated type, as long as they do not overlap (same rules as for instances) - We can get default definitions only for type families, not data families -} tcClassATs :: Name -- The class name (not knot-tied) -> TyConParent -- The class parent of this associated type -> [LFamilyDecl Name] -- Associated types. -> [LTyFamDefltEqn Name] -- Associated type defaults. -> TcM [ClassATItem] tcClassATs class_name parent ats at_defs = do { -- Complain about associated type defaults for non associated-types sequence_ [ failWithTc (badATErr class_name n) | n <- map at_def_tycon at_defs , not (n `elemNameSet` at_names) ] ; mapM tc_at ats } where at_def_tycon :: LTyFamDefltEqn Name -> Name at_def_tycon (L _ eqn) = unLoc (tfe_tycon eqn) at_fam_name :: LFamilyDecl Name -> Name at_fam_name (L _ decl) = unLoc (fdLName decl) at_names = mkNameSet (map at_fam_name ats) at_defs_map :: NameEnv [LTyFamDefltEqn Name] -- Maps an AT in 'ats' to a list of all its default defs in 'at_defs' at_defs_map = foldr (\at_def nenv -> extendNameEnv_C (++) nenv (at_def_tycon at_def) [at_def]) emptyNameEnv at_defs tc_at at = do { [ATyCon fam_tc] <- addLocM (tcFamDecl1 parent) at ; let at_defs = lookupNameEnv at_defs_map (at_fam_name at) `orElse` [] ; atd <- tcDefaultAssocDecl fam_tc at_defs ; return (ATI fam_tc atd) } ------------------------- tcDefaultAssocDecl :: TyCon -- ^ Family TyCon -> [LTyFamDefltEqn Name] -- ^ Defaults -> TcM (Maybe (Type, SrcSpan)) -- ^ Type checked RHS tcDefaultAssocDecl _ [] = return Nothing -- No default declaration tcDefaultAssocDecl _ (d1:_:_) = failWithTc (ptext (sLit "More than one default declaration for") <+> ppr (tfe_tycon (unLoc d1))) tcDefaultAssocDecl fam_tc [L loc (TyFamEqn { tfe_tycon = L _ tc_name , tfe_pats = hs_tvs , tfe_rhs = rhs })] = setSrcSpan loc $ tcAddFamInstCtxt (ptext (sLit "default type instance")) tc_name $ tcTyClTyVars tc_name hs_tvs $ \ tvs rhs_kind -> do { traceTc "tcDefaultAssocDecl" (ppr tc_name) ; checkTc (isTypeFamilyTyCon fam_tc) (wrongKindOfFamily fam_tc) ; let (fam_name, fam_pat_arity, _) = famTyConShape fam_tc ; ASSERT( fam_name == tc_name ) checkTc (length (hsQTvBndrs hs_tvs) == fam_pat_arity) (wrongNumberOfParmsErr fam_pat_arity) ; rhs_ty <- tcCheckLHsType rhs rhs_kind ; rhs_ty <- zonkTcTypeToType emptyZonkEnv rhs_ty ; let fam_tc_tvs = tyConTyVars fam_tc subst = zipTopTvSubst tvs (mkTyVarTys fam_tc_tvs) ; return ( ASSERT( equalLength fam_tc_tvs tvs ) Just (substTy subst rhs_ty, loc) ) } -- We check for well-formedness and validity later, in checkValidClass ------------------------- kcTyFamInstEqn :: FamTyConShape -> LTyFamInstEqn Name -> TcM () kcTyFamInstEqn fam_tc_shape (L loc (TyFamEqn { tfe_pats = pats, tfe_rhs = hs_ty })) = setSrcSpan loc $ discardResult $ tc_fam_ty_pats fam_tc_shape pats (discardResult . (tcCheckLHsType hs_ty)) tcTyFamInstEqn :: FamTyConShape -> LTyFamInstEqn Name -> TcM CoAxBranch -- Needs to be here, not in TcInstDcls, because closed families -- (typechecked here) have TyFamInstEqns tcTyFamInstEqn fam_tc_shape@(fam_tc_name,_,_) (L loc (TyFamEqn { tfe_tycon = L _ eqn_tc_name , tfe_pats = pats , tfe_rhs = hs_ty })) = setSrcSpan loc $ tcFamTyPats fam_tc_shape pats (discardResult . (tcCheckLHsType hs_ty)) $ \tvs' pats' res_kind -> do { checkTc (fam_tc_name == eqn_tc_name) (wrongTyFamName fam_tc_name eqn_tc_name) ; rhs_ty <- tcCheckLHsType hs_ty res_kind ; rhs_ty <- zonkTcTypeToType emptyZonkEnv rhs_ty ; traceTc "tcTyFamInstEqn" (ppr fam_tc_name <+> ppr tvs') -- don't print out the pats here, as they might be zonked inside the knot ; return (mkCoAxBranch tvs' pats' rhs_ty loc) } kcDataDefn :: HsDataDefn Name -> TcKind -> TcM () -- Used for 'data instance' only -- Ordinary 'data' is handled by kcTyClDec kcDataDefn (HsDataDefn { dd_ctxt = ctxt, dd_cons = cons, dd_kindSig = mb_kind }) res_k = do { _ <- tcHsContext ctxt ; checkNoErrs $ mapM_ (wrapLocM kcConDecl) cons -- See Note [Failing early in kcDataDefn] ; kcResultKind mb_kind res_k } ------------------ kcResultKind :: Maybe (LHsKind Name) -> Kind -> TcM () kcResultKind Nothing res_k = checkKind res_k liftedTypeKind -- type family F a -- defaults to type family F a :: * kcResultKind (Just k) res_k = do { k' <- tcLHsKind k ; checkKind k' res_k } {- Kind check type patterns and kind annotate the embedded type variables. type instance F [a] = rhs * Here we check that a type instance matches its kind signature, but we do not check whether there is a pattern for each type index; the latter check is only required for type synonym instances. Note [tc_fam_ty_pats vs tcFamTyPats] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ tc_fam_ty_pats does the type checking of the patterns, but it doesn't zonk or generate any desugaring. It is used when kind-checking closed type families. tcFamTyPats type checks the patterns, zonks, and then calls thing_inside to generate a desugaring. It is used during type-checking (not kind-checking). Note [Type-checking type patterns] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When typechecking the patterns of a family instance declaration, we can't rely on using the family TyCon, because this is sometimes called from within a type-checking knot. (Specifically for closed type families.) The type FamTyConShape gives just enough information to do the job. The "arity" field of FamTyConShape is the *visible* arity of the family type constructor, i.e. what the users sees and writes, not including kind arguments. See also Note [tc_fam_ty_pats vs tcFamTyPats] Note [Failing early in kcDataDefn] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We need to use checkNoErrs when calling kcConDecl. This is because kcConDecl calls tcConDecl, which checks that the return type of a GADT-like constructor is actually an instance of the type head. Without the checkNoErrs, potentially two bad things could happen: 1) Duplicate error messages, because tcConDecl will be called again during *type* checking (as opposed to kind checking) 2) If we just keep blindly forging forward after both kind checking and type checking, we can get a panic in rejigConRes. See Trac #8368. -} ----------------- type FamTyConShape = (Name, Arity, Kind) -- See Note [Type-checking type patterns] famTyConShape :: TyCon -> FamTyConShape famTyConShape fam_tc = ( tyConName fam_tc , length (filterOut isKindVar (tyConTyVars fam_tc)) , tyConKind fam_tc ) tc_fam_ty_pats :: FamTyConShape -> HsWithBndrs Name [LHsType Name] -- Patterns -> (TcKind -> TcM ()) -- Kind checker for RHS -- result is ignored -> TcM ([Kind], [Type], Kind) -- Check the type patterns of a type or data family instance -- type instance F <pat1> <pat2> = <type> -- The 'tyvars' are the free type variables of pats -- -- NB: The family instance declaration may be an associated one, -- nested inside an instance decl, thus -- instance C [a] where -- type F [a] = ... -- In that case, the type variable 'a' will *already be in scope* -- (and, if C is poly-kinded, so will its kind parameter). tc_fam_ty_pats (name, arity, kind) (HsWB { hswb_cts = arg_pats, hswb_kvs = kvars, hswb_tvs = tvars }) kind_checker = do { let (fam_kvs, fam_body) = splitForAllTys kind -- We wish to check that the pattern has the right number of arguments -- in checkValidFamPats (in TcValidity), so we can do the check *after* -- we're done with the knot. But, the splitKindFunTysN below will panic -- if there are *too many* patterns. So, we do a preliminary check here. -- Note that we don't have enough information at hand to do a full check, -- as that requires the full declared arity of the family, which isn't -- nearby. ; checkTc (length arg_pats == arity) $ wrongNumberOfParmsErr arity -- Instantiate with meta kind vars ; fam_arg_kinds <- mapM (const newMetaKindVar) fam_kvs ; loc <- getSrcSpanM ; let (arg_kinds, res_kind) = splitKindFunTysN (length arg_pats) $ substKiWith fam_kvs fam_arg_kinds fam_body hs_tvs = HsQTvs { hsq_kvs = kvars , hsq_tvs = userHsTyVarBndrs loc tvars } -- Kind-check and quantify -- See Note [Quantifying over family patterns] ; typats <- tcHsTyVarBndrs hs_tvs $ \ _ -> do { kind_checker res_kind ; tcHsArgTys (quotes (ppr name)) arg_pats arg_kinds } ; return (fam_arg_kinds, typats, res_kind) } -- See Note [tc_fam_ty_pats vs tcFamTyPats] tcFamTyPats :: FamTyConShape -> HsWithBndrs Name [LHsType Name] -- patterns -> (TcKind -> TcM ()) -- kind-checker for RHS -> ([TKVar] -- Kind and type variables -> [TcType] -- Kind and type arguments -> Kind -> TcM a) -> TcM a tcFamTyPats fam_shape@(name,_,_) pats kind_checker thing_inside = do { (fam_arg_kinds, typats, res_kind) <- tc_fam_ty_pats fam_shape pats kind_checker ; let all_args = fam_arg_kinds ++ typats -- Find free variables (after zonking) and turn -- them into skolems, so that we don't subsequently -- replace a meta kind var with AnyK -- Very like kindGeneralize ; qtkvs <- quantifyTyVars emptyVarSet (tyVarsOfTypes all_args) -- Zonk the patterns etc into the Type world ; (ze, qtkvs') <- zonkTyBndrsX emptyZonkEnv qtkvs ; all_args' <- zonkTcTypeToTypes ze all_args ; res_kind' <- zonkTcTypeToType ze res_kind ; traceTc "tcFamTyPats" (ppr name) -- don't print out too much, as we might be in the knot ; tcExtendTyVarEnv qtkvs' $ thing_inside qtkvs' all_args' res_kind' } {- Note [Quantifying over family patterns] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We need to quantify over two different lots of kind variables: First, the ones that come from the kinds of the tyvar args of tcTyVarBndrsKindGen, as usual data family Dist a -- Proxy :: forall k. k -> * data instance Dist (Proxy a) = DP -- Generates data DistProxy = DP -- ax8 k (a::k) :: Dist * (Proxy k a) ~ DistProxy k a -- The 'k' comes from the tcTyVarBndrsKindGen (a::k) Second, the ones that come from the kind argument of the type family which we pick up using the (tyVarsOfTypes typats) in the result of the thing_inside of tcHsTyvarBndrsGen. -- Any :: forall k. k data instance Dist Any = DA -- Generates data DistAny k = DA -- ax7 k :: Dist k (Any k) ~ DistAny k -- The 'k' comes from kindGeneralizeKinds (Any k) Note [Quantified kind variables of a family pattern] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider type family KindFam (p :: k1) (q :: k1) data T :: Maybe k1 -> k2 -> * type instance KindFam (a :: Maybe k) b = T a b -> Int The HsBSig for the family patterns will be ([k], [a]) Then in the family instance we want to * Bring into scope [ "k" -> k:BOX, "a" -> a:k ] * Kind-check the RHS * Quantify the type instance over k and k', as well as a,b, thus type instance [k, k', a:Maybe k, b:k'] KindFam (Maybe k) k' a b = T k k' a b -> Int Notice that in the third step we quantify over all the visibly-mentioned type variables (a,b), but also over the implicitly mentioned kind variables (k, k'). In this case one is bound explicitly but often there will be none. The role of the kind signature (a :: Maybe k) is to add a constraint that 'a' must have that kind, and to bring 'k' into scope. ************************************************************************ * * Data types * * ************************************************************************ -} dataDeclChecks :: Name -> NewOrData -> ThetaType -> [LConDecl Name] -> TcM Bool dataDeclChecks tc_name new_or_data stupid_theta cons = do { -- Check that we don't use GADT syntax in H98 world gadtSyntax_ok <- xoptM Opt_GADTSyntax ; let gadt_syntax = consUseGadtSyntax cons ; checkTc (gadtSyntax_ok || not gadt_syntax) (badGadtDecl tc_name) -- Check that the stupid theta is empty for a GADT-style declaration ; checkTc (null stupid_theta || not gadt_syntax) (badStupidTheta tc_name) -- Check that a newtype has exactly one constructor -- Do this before checking for empty data decls, so that -- we don't suggest -XEmptyDataDecls for newtypes ; checkTc (new_or_data == DataType || isSingleton cons) (newtypeConError tc_name (length cons)) -- Check that there's at least one condecl, -- or else we're reading an hs-boot file, or -XEmptyDataDecls ; empty_data_decls <- xoptM Opt_EmptyDataDecls ; is_boot <- tcIsHsBootOrSig -- Are we compiling an hs-boot file? ; checkTc (not (null cons) || empty_data_decls || is_boot) (emptyConDeclsErr tc_name) ; return gadt_syntax } ----------------------------------- consUseGadtSyntax :: [LConDecl a] -> Bool consUseGadtSyntax (L _ (ConDecl { con_res = ResTyGADT _ _ }) : _) = True consUseGadtSyntax _ = False -- All constructors have same shape ----------------------------------- tcConDecls :: NewOrData -> TyCon -> ([TyVar], Type) -> [LConDecl Name] -> TcM [DataCon] tcConDecls new_or_data rep_tycon (tmpl_tvs, res_tmpl) cons = concatMapM (addLocM $ tcConDecl new_or_data rep_tycon tmpl_tvs res_tmpl) cons tcConDecl :: NewOrData -> TyCon -- Representation tycon -> [TyVar] -> Type -- Return type template (with its template tyvars) -- (tvs, T tys), where T is the family TyCon -> ConDecl Name -> TcM [DataCon] tcConDecl new_or_data rep_tycon tmpl_tvs res_tmpl -- Data types (ConDecl { con_names = names , con_qvars = hs_tvs, con_cxt = hs_ctxt , con_details = hs_details, con_res = hs_res_ty }) = addErrCtxt (dataConCtxtName names) $ do { traceTc "tcConDecl 1" (ppr names) ; (ctxt, arg_tys, res_ty, field_lbls, stricts) <- tcHsTyVarBndrs hs_tvs $ \ _ -> do { ctxt <- tcHsContext hs_ctxt ; details <- tcConArgs new_or_data hs_details ; res_ty <- tcConRes hs_res_ty ; let (field_lbls, btys) = details (arg_tys, stricts) = unzip btys ; return (ctxt, arg_tys, res_ty, field_lbls, stricts) } -- Generalise the kind variables (returning quantified TcKindVars) -- and quantify the type variables (substituting their kinds) -- REMEMBER: 'tkvs' are: -- ResTyH98: the *existential* type variables only -- ResTyGADT: *all* the quantified type variables -- c.f. the comment on con_qvars in HsDecls ; tkvs <- case res_ty of ResTyH98 -> quantifyTyVars (mkVarSet tmpl_tvs) (tyVarsOfTypes (ctxt++arg_tys)) ResTyGADT _ res_ty -> quantifyTyVars emptyVarSet (tyVarsOfTypes (res_ty:ctxt++arg_tys)) -- Zonk to Types ; (ze, qtkvs) <- zonkTyBndrsX emptyZonkEnv tkvs ; arg_tys <- zonkTcTypeToTypes ze arg_tys ; ctxt <- zonkTcTypeToTypes ze ctxt ; res_ty <- case res_ty of ResTyH98 -> return ResTyH98 ResTyGADT ls ty -> ResTyGADT ls <$> zonkTcTypeToType ze ty ; let (univ_tvs, ex_tvs, eq_preds, res_ty') = rejigConRes tmpl_tvs res_tmpl qtkvs res_ty ; fam_envs <- tcGetFamInstEnvs ; let buildOneDataCon (L _ name) = do { is_infix <- tcConIsInfix name hs_details res_ty ; buildDataCon fam_envs name is_infix stricts field_lbls univ_tvs ex_tvs eq_preds ctxt arg_tys res_ty' rep_tycon -- NB: we put data_tc, the type constructor gotten from the -- constructor type signature into the data constructor; -- that way checkValidDataCon can complain if it's wrong. } ; mapM buildOneDataCon names } tcConIsInfix :: Name -> HsConDetails (LHsType Name) (Located [LConDeclField Name]) -> ResType Type -> TcM Bool tcConIsInfix _ details ResTyH98 = case details of InfixCon {} -> return True _ -> return False tcConIsInfix con details (ResTyGADT _ _) = case details of InfixCon {} -> return True RecCon {} -> return False PrefixCon arg_tys -- See Note [Infix GADT cons] | isSymOcc (getOccName con) , [_ty1,_ty2] <- arg_tys -> do { fix_env <- getFixityEnv ; return (con `elemNameEnv` fix_env) } | otherwise -> return False tcConArgs :: NewOrData -> HsConDeclDetails Name -> TcM ([Name], [(TcType, HsSrcBang)]) tcConArgs new_or_data (PrefixCon btys) = do { btys' <- mapM (tcConArg new_or_data) btys ; return ([], btys') } tcConArgs new_or_data (InfixCon bty1 bty2) = do { bty1' <- tcConArg new_or_data bty1 ; bty2' <- tcConArg new_or_data bty2 ; return ([], [bty1', bty2']) } tcConArgs new_or_data (RecCon fields) = do { btys' <- mapM (tcConArg new_or_data) btys ; return (field_names, btys') } where -- We need a one-to-one mapping from field_names to btys combined = map (\(L _ f) -> (cd_fld_names f,cd_fld_type f)) (unLoc fields) explode (ns,ty) = zip (map unLoc ns) (repeat ty) exploded = concatMap explode combined (field_names,btys) = unzip exploded tcConArg :: NewOrData -> LHsType Name -> TcM (TcType, HsSrcBang) tcConArg new_or_data bty = do { traceTc "tcConArg 1" (ppr bty) ; arg_ty <- tcHsConArgType new_or_data bty ; traceTc "tcConArg 2" (ppr bty) ; return (arg_ty, getBangStrictness bty) } tcConRes :: ResType (LHsType Name) -> TcM (ResType Type) tcConRes ResTyH98 = return ResTyH98 tcConRes (ResTyGADT ls res_ty) = do { res_ty' <- tcHsLiftedType res_ty ; return (ResTyGADT ls res_ty') } {- Note [Infix GADT constructors] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We do not currently have syntax to declare an infix constructor in GADT syntax, but it makes a (small) difference to the Show instance. So as a slightly ad-hoc solution, we regard a GADT data constructor as infix if a) it is an operator symbol b) it has two arguments c) there is a fixity declaration for it For example: infix 6 (:--:) data T a where (:--:) :: t1 -> t2 -> T Int Note [Checking GADT return types] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ There is a delicacy around checking the return types of a datacon. The central problem is dealing with a declaration like data T a where MkT :: a -> Q a Note that the return type of MkT is totally bogus. When creating the T tycon, we also need to create the MkT datacon, which must have a "rejigged" return type. That is, the MkT datacon's type must be transformed to have a uniform return type with explicit coercions for GADT-like type parameters. This rejigging is what rejigConRes does. The problem is, though, that checking that the return type is appropriate is much easier when done over *Type*, not *HsType*. So, we want to make rejigConRes lazy and then check the validity of the return type in checkValidDataCon. But, if the return type is bogus, rejigConRes can't work -- it will have a failed pattern match. Luckily, if we run checkValidDataCon before ever looking at the rejigged return type (checkValidDataCon checks the dataConUserType, which is not rejigged!), we catch the error before forcing the rejigged type and panicking. -} -- Example -- data instance T (b,c) where -- TI :: forall e. e -> T (e,e) -- -- The representation tycon looks like this: -- data :R7T b c where -- TI :: forall b1 c1. (b1 ~ c1) => b1 -> :R7T b1 c1 -- In this case orig_res_ty = T (e,e) rejigConRes :: [TyVar] -> Type -- Template for result type; e.g. -- data instance T [a] b c = ... -- gives template ([a,b,c], T [a] b c) -> [TyVar] -- where MkT :: forall x y z. ... -> ResType Type -> ([TyVar], -- Universal [TyVar], -- Existential (distinct OccNames from univs) [(TyVar,Type)], -- Equality predicates Type) -- Typechecked return type -- We don't check that the TyCon given in the ResTy is -- the same as the parent tycon, because checkValidDataCon will do it rejigConRes tmpl_tvs res_ty dc_tvs ResTyH98 = (tmpl_tvs, dc_tvs, [], res_ty) -- In H98 syntax the dc_tvs are the existential ones -- data T a b c = forall d e. MkT ... -- The {a,b,c} are tc_tvs, and {d,e} are dc_tvs rejigConRes tmpl_tvs res_tmpl dc_tvs (ResTyGADT _ res_ty) -- E.g. data T [a] b c where -- MkT :: forall x y z. T [(x,y)] z z -- Then we generate -- Univ tyvars Eq-spec -- a a~(x,y) -- b b~z -- z -- Existentials are the leftover type vars: [x,y] -- So we return ([a,b,z], [x,y], [a~(x,y),b~z], T [(x,y)] z z) = (univ_tvs, ex_tvs, eq_spec, res_ty) where Just subst = tcMatchTy (mkVarSet tmpl_tvs) res_tmpl res_ty -- This 'Just' pattern is sure to match, because if not -- checkValidDataCon will complain first. -- See Note [Checking GADT return types] -- /Lazily/ figure out the univ_tvs etc -- Each univ_tv is either a dc_tv or a tmpl_tv (univ_tvs, eq_spec) = foldr choose ([], []) tmpl_tvs choose tmpl (univs, eqs) | Just ty <- lookupTyVar subst tmpl = case tcGetTyVar_maybe ty of Just tv | not (tv `elem` univs) -> (tv:univs, eqs) _other -> (new_tmpl:univs, (new_tmpl,ty):eqs) where -- see Note [Substitution in template variables kinds] new_tmpl = updateTyVarKind (substTy subst) tmpl | otherwise = pprPanic "tcResultType" (ppr res_ty) ex_tvs = dc_tvs `minusList` univ_tvs {- Note [Substitution in template variables kinds] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ data List a = Nil | Cons a (List a) data SList s as where SNil :: SList s Nil We call tcResultType with tmpl_tvs = [(k :: BOX), (s :: k -> *), (as :: List k)] res_tmpl = SList k s as res_ty = ResTyGADT (SList k1 (s1 :: k1 -> *) (Nil k1)) We get subst: k -> k1 s -> s1 as -> Nil k1 Now we want to find out the universal variables and the equivalences between some of them and types (GADT). In this example, k and s are mapped to exactly variables which are not already present in the universal set, so we just add them without any coercion. But 'as' is mapped to 'Nil k1', so we add 'as' to the universal set, and add the equivalence with 'Nil k1' in 'eqs'. The problem is that with kind polymorphism, as's kind may now contain kind variables, and we have to apply the template substitution to it, which is why we create new_tmpl. The template substitution only maps kind variables to kind variables, since GADTs are not kind indexed. ************************************************************************ * * Validity checking * * ************************************************************************ Validity checking is done once the mutually-recursive knot has been tied, so we can look at things freely. -} checkClassCycleErrs :: Class -> TcM () checkClassCycleErrs cls = mapM_ recClsErr (calcClassCycles cls) checkValidTyCl :: TyThing -> TcM () checkValidTyCl thing = setSrcSpan (getSrcSpan thing) $ addTyThingCtxt thing $ case thing of ATyCon tc -> checkValidTyCon tc AnId _ -> return () -- Generic default methods are checked -- with their parent class ACoAxiom _ -> return () -- Axioms checked with their parent -- closed family tycon _ -> pprTrace "checkValidTyCl" (ppr thing) $ return () ------------------------- -- For data types declared with record syntax, we require -- that each constructor that has a field 'f' -- (a) has the same result type -- (b) has the same type for 'f' -- modulo alpha conversion of the quantified type variables -- of the constructor. -- -- Note that we allow existentials to match because the -- fields can never meet. E.g -- data T where -- T1 { f1 :: b, f2 :: a, f3 ::Int } :: T -- T2 { f1 :: c, f2 :: c, f3 ::Int } :: T -- Here we do not complain about f1,f2 because they are existential checkValidTyCon :: TyCon -> TcM () checkValidTyCon tc | Just cl <- tyConClass_maybe tc = checkValidClass cl | Just syn_rhs <- synTyConRhs_maybe tc = checkValidType syn_ctxt syn_rhs | Just fam_flav <- famTyConFlav_maybe tc = case fam_flav of { ClosedSynFamilyTyCon ax -> checkValidClosedCoAxiom ax ; AbstractClosedSynFamilyTyCon -> do { hsBoot <- tcIsHsBootOrSig ; checkTc hsBoot $ ptext (sLit "You may omit the equations in a closed type family") $$ ptext (sLit "only in a .hs-boot file") } ; OpenSynFamilyTyCon -> return () ; BuiltInSynFamTyCon _ -> return () } | otherwise = do { -- Check the context on the data decl traceTc "cvtc1" (ppr tc) ; checkValidTheta (DataTyCtxt name) (tyConStupidTheta tc) ; traceTc "cvtc2" (ppr tc) ; dflags <- getDynFlags ; existential_ok <- xoptM Opt_ExistentialQuantification ; gadt_ok <- xoptM Opt_GADTs ; let ex_ok = existential_ok || gadt_ok -- Data cons can have existential context ; mapM_ (checkValidDataCon dflags ex_ok tc) data_cons -- Check that fields with the same name share a type ; mapM_ check_fields groups } where syn_ctxt = TySynCtxt name name = tyConName tc data_cons = tyConDataCons tc groups = equivClasses cmp_fld (concatMap get_fields data_cons) cmp_fld (f1,_) (f2,_) = f1 `compare` f2 get_fields con = dataConFieldLabels con `zip` repeat con -- dataConFieldLabels may return the empty list, which is fine -- See Note [GADT record selectors] in MkId.lhs -- We must check (a) that the named field has the same -- type in each constructor -- (b) that those constructors have the same result type -- -- However, the constructors may have differently named type variable -- and (worse) we don't know how the correspond to each other. E.g. -- C1 :: forall a b. { f :: a, g :: b } -> T a b -- C2 :: forall d c. { f :: c, g :: c } -> T c d -- -- So what we do is to ust Unify.tcMatchTys to compare the first candidate's -- result type against other candidates' types BOTH WAYS ROUND. -- If they magically agrees, take the substitution and -- apply them to the latter ones, and see if they match perfectly. check_fields ((label, con1) : other_fields) -- These fields all have the same name, but are from -- different constructors in the data type = recoverM (return ()) $ mapM_ checkOne other_fields -- Check that all the fields in the group have the same type -- NB: this check assumes that all the constructors of a given -- data type use the same type variables where (tvs1, _, _, res1) = dataConSig con1 ts1 = mkVarSet tvs1 fty1 = dataConFieldType con1 label checkOne (_, con2) -- Do it bothways to ensure they are structurally identical = do { checkFieldCompat label con1 con2 ts1 res1 res2 fty1 fty2 ; checkFieldCompat label con2 con1 ts2 res2 res1 fty2 fty1 } where (tvs2, _, _, res2) = dataConSig con2 ts2 = mkVarSet tvs2 fty2 = dataConFieldType con2 label check_fields [] = panic "checkValidTyCon/check_fields []" checkValidClosedCoAxiom :: CoAxiom Branched -> TcM () checkValidClosedCoAxiom (CoAxiom { co_ax_branches = branches, co_ax_tc = tc }) = tcAddClosedTypeFamilyDeclCtxt tc $ do { brListFoldlM_ check_accessibility [] branches ; void $ brListMapM (checkValidTyFamInst Nothing tc) branches } where check_accessibility :: [CoAxBranch] -- prev branches (in reverse order) -> CoAxBranch -- cur branch -> TcM [CoAxBranch] -- cur : prev -- Check whether the branch is dominated by earlier -- ones and hence is inaccessible check_accessibility prev_branches cur_branch = do { when (cur_branch `isDominatedBy` prev_branches) $ addWarnAt (coAxBranchSpan cur_branch) $ inaccessibleCoAxBranch tc cur_branch ; return (cur_branch : prev_branches) } checkFieldCompat :: Name -> DataCon -> DataCon -> TyVarSet -> Type -> Type -> Type -> Type -> TcM () checkFieldCompat fld con1 con2 tvs1 res1 res2 fty1 fty2 = do { checkTc (isJust mb_subst1) (resultTypeMisMatch fld con1 con2) ; checkTc (isJust mb_subst2) (fieldTypeMisMatch fld con1 con2) } where mb_subst1 = tcMatchTy tvs1 res1 res2 mb_subst2 = tcMatchTyX tvs1 (expectJust "checkFieldCompat" mb_subst1) fty1 fty2 ------------------------------- checkValidDataCon :: DynFlags -> Bool -> TyCon -> DataCon -> TcM () checkValidDataCon dflags existential_ok tc con = setSrcSpan (srcLocSpan (getSrcLoc con)) $ addErrCtxt (dataConCtxt con) $ do { -- Check that the return type of the data constructor -- matches the type constructor; eg reject this: -- data T a where { MkT :: Bogus a } -- c.f. Note [Check role annotations in a second pass] -- and Note [Checking GADT return types] let tc_tvs = tyConTyVars tc res_ty_tmpl = mkFamilyTyConApp tc (mkTyVarTys tc_tvs) orig_res_ty = dataConOrigResTy con ; traceTc "checkValidDataCon" (vcat [ ppr con, ppr tc, ppr tc_tvs , ppr res_ty_tmpl <+> dcolon <+> ppr (typeKind res_ty_tmpl) , ppr orig_res_ty <+> dcolon <+> ppr (typeKind orig_res_ty)]) ; checkTc (isJust (tcMatchTy (mkVarSet tc_tvs) res_ty_tmpl orig_res_ty)) (badDataConTyCon con res_ty_tmpl orig_res_ty) -- Check that the result type is a *monotype* -- e.g. reject this: MkT :: T (forall a. a->a) -- Reason: it's really the argument of an equality constraint ; checkValidMonoType orig_res_ty -- Check all argument types for validity ; checkValidType ctxt (dataConUserType con) -- Extra checks for newtype data constructors ; when (isNewTyCon tc) (checkNewDataCon con) -- Check that UNPACK pragmas and bangs work out -- E.g. reject data T = MkT {-# UNPACK #-} Int -- No "!" -- data T = MkT {-# UNPACK #-} !a -- Can't unpack ; mapM_ check_bang (zip3 (dataConSrcBangs con) (dataConImplBangs con) [1..]) -- Check that existentials are allowed if they are used ; checkTc (existential_ok || isVanillaDataCon con) (badExistential con) -- Check that we aren't doing GADT type refinement on kind variables -- e.g reject data T (a::k) where -- T1 :: T Int -- T2 :: T Maybe ; checkTc (not (any (isKindVar . fst) (dataConEqSpec con))) (badGadtKindCon con) ; traceTc "Done validity of data con" (ppr con <+> ppr (dataConRepType con)) } where ctxt = ConArgCtxt (dataConName con) check_bang (HsSrcBang _ (Just want_unpack) has_bang, rep_bang, n) | want_unpack, not has_bang = addWarnTc (bad_bang n (ptext (sLit "UNPACK pragma lacks '!'"))) | want_unpack , case rep_bang of { HsUnpack {} -> False; _ -> True } , not (gopt Opt_OmitInterfacePragmas dflags) -- If not optimising, se don't unpack, so don't complain! -- See MkId.dataConArgRep, the (HsBang True) case = addWarnTc (bad_bang n (ptext (sLit "Ignoring unusable UNPACK pragma"))) check_bang _ = return () bad_bang n herald = hang herald 2 (ptext (sLit "on the") <+> speakNth n <+> ptext (sLit "argument of") <+> quotes (ppr con)) ------------------------------- checkNewDataCon :: DataCon -> TcM () -- Further checks for the data constructor of a newtype checkNewDataCon con = do { checkTc (isSingleton arg_tys) (newtypeFieldErr con (length arg_tys)) -- One argument ; check_con (null eq_spec) $ ptext (sLit "A newtype constructor must have a return type of form T a1 ... an") -- Return type is (T a b c) ; check_con (null theta) $ ptext (sLit "A newtype constructor cannot have a context in its type") ; check_con (null ex_tvs) $ ptext (sLit "A newtype constructor cannot have existential type variables") -- No existentials ; checkTc (not (any isBanged (dataConSrcBangs con))) (newtypeStrictError con) -- No strictness } where (_univ_tvs, ex_tvs, eq_spec, theta, arg_tys, _res_ty) = dataConFullSig con check_con what msg = checkTc what (msg $$ ppr con <+> dcolon <+> ppr (dataConUserType con)) ------------------------------- checkValidClass :: Class -> TcM () checkValidClass cls = do { constrained_class_methods <- xoptM Opt_ConstrainedClassMethods ; multi_param_type_classes <- xoptM Opt_MultiParamTypeClasses ; nullary_type_classes <- xoptM Opt_NullaryTypeClasses ; fundep_classes <- xoptM Opt_FunctionalDependencies -- Check that the class is unary, unless multiparameter type classes -- are enabled; also recognize deprecated nullary type classes -- extension (subsumed by multiparameter type classes, Trac #8993) ; checkTc (multi_param_type_classes || cls_arity == 1 || (nullary_type_classes && cls_arity == 0)) (classArityErr cls_arity cls) ; checkTc (fundep_classes || null fundeps) (classFunDepsErr cls) -- Check the super-classes ; checkValidTheta (ClassSCCtxt (className cls)) theta -- Now check for cyclic superclasses -- If there are superclass cycles, checkClassCycleErrs bails. ; checkClassCycleErrs cls -- Check the class operations. -- But only if there have been no earlier errors -- See Note [Abort when superclass cycle is detected] ; whenNoErrs $ mapM_ (check_op constrained_class_methods) op_stuff -- Check the associated type defaults are well-formed and instantiated ; mapM_ check_at_defs at_stuff } where (tyvars, fundeps, theta, _, at_stuff, op_stuff) = classExtraBigSig cls cls_arity = count isTypeVar tyvars -- Ignore kind variables cls_tv_set = mkVarSet tyvars mini_env = zipVarEnv tyvars (mkTyVarTys tyvars) check_op constrained_class_methods (sel_id, dm) = addErrCtxt (classOpCtxt sel_id tau) $ do { checkValidTheta ctxt (tail theta) -- The 'tail' removes the initial (C a) from the -- class itself, leaving just the method type ; traceTc "class op type" (ppr op_ty <+> ppr tau) ; checkValidType ctxt tau -- Check that the method type mentions a class variable -- But actually check that the variables *reachable from* -- the method type include a class variable. -- Example: tc223 -- class Error e => Game b mv e | b -> mv e where -- newBoard :: MonadState b m => m () -- Here, MonadState has a fundep m->b, so newBoard is fine ; check_mentions (growThetaTyVars theta (tyVarsOfType tau)) (ptext (sLit "class method") <+> quotes (ppr sel_id)) ; case dm of GenDefMeth dm_name -> do { dm_id <- tcLookupId dm_name ; checkValidType (FunSigCtxt op_name) (idType dm_id) } _ -> return () } where ctxt = FunSigCtxt op_name op_name = idName sel_id op_ty = idType sel_id (_,theta1,tau1) = tcSplitSigmaTy op_ty (_,theta2,tau2) = tcSplitSigmaTy tau1 (theta,tau) | constrained_class_methods = (theta1 ++ theta2, tau2) | otherwise = (theta1, mkPhiTy (tail theta1) tau1) -- Ugh! The function might have a type like -- op :: forall a. C a => forall b. (Eq b, Eq a) => tau2 -- With -XConstrainedClassMethods, we want to allow this, even though the inner -- forall has an (Eq a) constraint. Whereas in general, each constraint -- in the context of a for-all must mention at least one quantified -- type variable. What a mess! check_at_defs (ATI fam_tc m_dflt_rhs) = do { check_mentions (mkVarSet fam_tvs) $ ptext (sLit "associated type") <+> quotes (ppr fam_tc) ; whenIsJust m_dflt_rhs $ \ (rhs, loc) -> checkValidTyFamEqn (Just (cls, mini_env)) fam_tc fam_tvs (mkTyVarTys fam_tvs) rhs loc } where fam_tvs = tyConTyVars fam_tc check_mentions :: TyVarSet -> SDoc -> TcM () -- Check that the thing (method or associated type) mentions at least -- one of the class type variables -- The check is disabled for nullary type classes, -- since there is no possible ambiguity (Trac #10020) check_mentions thing_tvs thing_doc = checkTc (cls_arity == 0 || thing_tvs `intersectsVarSet` cls_tv_set) (noClassTyVarErr cls thing_doc) checkFamFlag :: Name -> TcM () -- Check that we don't use families without -XTypeFamilies -- The parser won't even parse them, but I suppose a GHC API -- client might have a go! checkFamFlag tc_name = do { idx_tys <- xoptM Opt_TypeFamilies ; checkTc idx_tys err_msg } where err_msg = hang (ptext (sLit "Illegal family declaration for") <+> quotes (ppr tc_name)) 2 (ptext (sLit "Use TypeFamilies to allow indexed type families")) {- Note [Abort when superclass cycle is detected] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We must avoid doing the ambiguity check for the methods (in checkValidClass.check_op) when there are already errors accumulated. This is because one of the errors may be a superclass cycle, and superclass cycles cause canonicalization to loop. Here is a representative example: class D a => C a where meth :: D a => () class C a => D a This fixes Trac #9415, #9739 ************************************************************************ * * Checking role validity * * ************************************************************************ -} checkValidRoleAnnots :: RoleAnnots -> TyThing -> TcM () checkValidRoleAnnots role_annots thing = case thing of { ATyCon tc | isTypeSynonymTyCon tc -> check_no_roles | isFamilyTyCon tc -> check_no_roles | isAlgTyCon tc -> check_roles where name = tyConName tc -- Role annotations are given only on *type* variables, but a tycon stores -- roles for all variables. So, we drop the kind roles (which are all -- Nominal, anyway). tyvars = tyConTyVars tc roles = tyConRoles tc (kind_vars, type_vars) = span isKindVar tyvars type_roles = dropList kind_vars roles role_annot_decl_maybe = lookupRoleAnnots role_annots name check_roles = whenIsJust role_annot_decl_maybe $ \decl@(L loc (RoleAnnotDecl _ the_role_annots)) -> addRoleAnnotCtxt name $ setSrcSpan loc $ do { role_annots_ok <- xoptM Opt_RoleAnnotations ; checkTc role_annots_ok $ needXRoleAnnotations tc ; checkTc (type_vars `equalLength` the_role_annots) (wrongNumberOfRoles type_vars decl) ; _ <- zipWith3M checkRoleAnnot type_vars the_role_annots type_roles -- Representational or phantom roles for class parameters -- quickly lead to incoherence. So, we require -- IncoherentInstances to have them. See #8773. ; incoherent_roles_ok <- xoptM Opt_IncoherentInstances ; checkTc ( incoherent_roles_ok || (not $ isClassTyCon tc) || (all (== Nominal) type_roles)) incoherentRoles ; lint <- goptM Opt_DoCoreLinting ; when lint $ checkValidRoles tc } check_no_roles = whenIsJust role_annot_decl_maybe illegalRoleAnnotDecl ; _ -> return () } checkRoleAnnot :: TyVar -> Located (Maybe Role) -> Role -> TcM () checkRoleAnnot _ (L _ Nothing) _ = return () checkRoleAnnot tv (L _ (Just r1)) r2 = when (r1 /= r2) $ addErrTc $ badRoleAnnot (tyVarName tv) r1 r2 -- This is a double-check on the role inference algorithm. It is only run when -- -dcore-lint is enabled. See Note [Role inference] in TcTyDecls checkValidRoles :: TyCon -> TcM () -- If you edit this function, you may need to update the GHC formalism -- See Note [GHC Formalism] in CoreLint checkValidRoles tc | isAlgTyCon tc -- tyConDataCons returns an empty list for data families = mapM_ check_dc_roles (tyConDataCons tc) | Just rhs <- synTyConRhs_maybe tc = check_ty_roles (zipVarEnv (tyConTyVars tc) (tyConRoles tc)) Representational rhs | otherwise = return () where check_dc_roles datacon = do { traceTc "check_dc_roles" (ppr datacon <+> ppr (tyConRoles tc)) ; mapM_ (check_ty_roles role_env Representational) $ eqSpecPreds eq_spec ++ theta ++ arg_tys } -- See Note [Role-checking data constructor arguments] in TcTyDecls where (univ_tvs, ex_tvs, eq_spec, theta, arg_tys, _res_ty) = dataConFullSig datacon univ_roles = zipVarEnv univ_tvs (tyConRoles tc) -- zipVarEnv uses zipEqual, but we don't want that for ex_tvs ex_roles = mkVarEnv (zip ex_tvs (repeat Nominal)) role_env = univ_roles `plusVarEnv` ex_roles check_ty_roles env role (TyVarTy tv) = case lookupVarEnv env tv of Just role' -> unless (role' `ltRole` role || role' == role) $ report_error $ ptext (sLit "type variable") <+> quotes (ppr tv) <+> ptext (sLit "cannot have role") <+> ppr role <+> ptext (sLit "because it was assigned role") <+> ppr role' Nothing -> report_error $ ptext (sLit "type variable") <+> quotes (ppr tv) <+> ptext (sLit "missing in environment") check_ty_roles env Representational (TyConApp tc tys) = let roles' = tyConRoles tc in zipWithM_ (maybe_check_ty_roles env) roles' tys check_ty_roles env Nominal (TyConApp _ tys) = mapM_ (check_ty_roles env Nominal) tys check_ty_roles _ Phantom ty@(TyConApp {}) = pprPanic "check_ty_roles" (ppr ty) check_ty_roles env role (AppTy ty1 ty2) = check_ty_roles env role ty1 >> check_ty_roles env Nominal ty2 check_ty_roles env role (FunTy ty1 ty2) = check_ty_roles env role ty1 >> check_ty_roles env role ty2 check_ty_roles env role (ForAllTy tv ty) = check_ty_roles (extendVarEnv env tv Nominal) role ty check_ty_roles _ _ (LitTy {}) = return () maybe_check_ty_roles env role ty = when (role == Nominal || role == Representational) $ check_ty_roles env role ty report_error doc = addErrTc $ vcat [ptext (sLit "Internal error in role inference:"), doc, ptext (sLit "Please report this as a GHC bug: http://www.haskell.org/ghc/reportabug")] {- ************************************************************************ * * Building record selectors * * ************************************************************************ -} mkDefaultMethodIds :: [TyThing] -> [Id] -- See Note [Default method Ids and Template Haskell] mkDefaultMethodIds things = [ mkExportedLocalId VanillaId dm_name (idType sel_id) | ATyCon tc <- things , Just cls <- [tyConClass_maybe tc] , (sel_id, DefMeth dm_name) <- classOpItems cls ] {- Note [Default method Ids and Template Haskell] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider this (Trac #4169): class Numeric a where fromIntegerNum :: a fromIntegerNum = ... ast :: Q [Dec] ast = [d| instance Numeric Int |] When we typecheck 'ast' we have done the first pass over the class decl (in tcTyClDecls), but we have not yet typechecked the default-method declarations (because they can mention value declarations). So we must bring the default method Ids into scope first (so they can be seen when typechecking the [d| .. |] quote, and typecheck them later. -} mkRecSelBinds :: [TyThing] -> HsValBinds Name -- NB We produce *un-typechecked* bindings, rather like 'deriving' -- This makes life easier, because the later type checking will add -- all necessary type abstractions and applications mkRecSelBinds tycons = ValBindsOut [(NonRecursive, b) | b <- binds] sigs where (sigs, binds) = unzip rec_sels rec_sels = map mkRecSelBind [ (tc,fld) | ATyCon tc <- tycons , fld <- tyConFields tc ] mkRecSelBind :: (TyCon, FieldLabel) -> (LSig Name, LHsBinds Name) mkRecSelBind (tycon, sel_name) = (L loc (IdSig sel_id), unitBag (L loc sel_bind)) where loc = getSrcSpan sel_name sel_id = mkExportedLocalId rec_details sel_name sel_ty rec_details = RecSelId { sel_tycon = tycon, sel_naughty = is_naughty } -- Find a representative constructor, con1 all_cons = tyConDataCons tycon cons_w_field = [ con | con <- all_cons , sel_name `elem` dataConFieldLabels con ] con1 = ASSERT( not (null cons_w_field) ) head cons_w_field -- Selector type; Note [Polymorphic selectors] field_ty = dataConFieldType con1 sel_name data_ty = dataConOrigResTy con1 data_tvs = tyVarsOfType data_ty is_naughty = not (tyVarsOfType field_ty `subVarSet` data_tvs) (field_tvs, field_theta, field_tau) = tcSplitSigmaTy field_ty sel_ty | is_naughty = unitTy -- See Note [Naughty record selectors] | otherwise = mkForAllTys (varSetElemsKvsFirst $ data_tvs `extendVarSetList` field_tvs) $ mkPhiTy (dataConStupidTheta con1) $ -- Urgh! mkPhiTy field_theta $ -- Urgh! mkFunTy data_ty field_tau -- Make the binding: sel (C2 { fld = x }) = x -- sel (C7 { fld = x }) = x -- where cons_w_field = [C2,C7] sel_bind = mkTopFunBind Generated sel_lname alts where alts | is_naughty = [mkSimpleMatch [] unit_rhs] | otherwise = map mk_match cons_w_field ++ deflt mk_match con = mkSimpleMatch [L loc (mk_sel_pat con)] (L loc (HsVar field_var)) mk_sel_pat con = ConPatIn (L loc (getName con)) (RecCon rec_fields) rec_fields = HsRecFields { rec_flds = [rec_field], rec_dotdot = Nothing } rec_field = noLoc (HsRecField { hsRecFieldId = sel_lname , hsRecFieldArg = L loc (VarPat field_var) , hsRecPun = False }) sel_lname = L loc sel_name field_var = mkInternalName (mkBuiltinUnique 1) (getOccName sel_name) loc -- Add catch-all default case unless the case is exhaustive -- We do this explicitly so that we get a nice error message that -- mentions this particular record selector deflt | all dealt_with all_cons = [] | otherwise = [mkSimpleMatch [L loc (WildPat placeHolderType)] (mkHsApp (L loc (HsVar (getName rEC_SEL_ERROR_ID))) (L loc (HsLit msg_lit)))] -- Do not add a default case unless there are unmatched -- constructors. We must take account of GADTs, else we -- get overlap warning messages from the pattern-match checker -- NB: we need to pass type args for the *representation* TyCon -- to dataConCannotMatch, hence the calculation of inst_tys -- This matters in data families -- data instance T Int a where -- A :: { fld :: Int } -> T Int Bool -- B :: { fld :: Int } -> T Int Char dealt_with con = con `elem` cons_w_field || dataConCannotMatch inst_tys con inst_tys = substTyVars (mkTopTvSubst (dataConEqSpec con1)) (dataConUnivTyVars con1) unit_rhs = mkLHsTupleExpr [] msg_lit = HsStringPrim "" $ unsafeMkByteString $ occNameString (getOccName sel_name) --------------- tyConFields :: TyCon -> [FieldLabel] tyConFields tc | isAlgTyCon tc = nub (concatMap dataConFieldLabels (tyConDataCons tc)) | otherwise = [] {- Note [Polymorphic selectors] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When a record has a polymorphic field, we pull the foralls out to the front. data T = MkT { f :: forall a. [a] -> a } Then f :: forall a. T -> [a] -> a NOT f :: T -> forall a. [a] -> a This is horrid. It's only needed in deeply obscure cases, which I hate. The only case I know is test tc163, which is worth looking at. It's far from clear that this test should succeed at all! Note [Naughty record selectors] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ A "naughty" field is one for which we can't define a record selector, because an existential type variable would escape. For example: data T = forall a. MkT { x,y::a } We obviously can't define x (MkT v _) = v Nevertheless we *do* put a RecSelId into the type environment so that if the user tries to use 'x' as a selector we can bleat helpfully, rather than saying unhelpfully that 'x' is not in scope. Hence the sel_naughty flag, to identify record selectors that don't really exist. In general, a field is "naughty" if its type mentions a type variable that isn't in the result type of the constructor. Note that this *allows* GADT record selectors (Note [GADT record selectors]) whose types may look like sel :: T [a] -> a For naughty selectors we make a dummy binding sel = () for naughty selectors, so that the later type-check will add them to the environment, and they'll be exported. The function is never called, because the tyepchecker spots the sel_naughty field. Note [GADT record selectors] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ For GADTs, we require that all constructors with a common field 'f' have the same result type (modulo alpha conversion). [Checked in TcTyClsDecls.checkValidTyCon] E.g. data T where T1 { f :: Maybe a } :: T [a] T2 { f :: Maybe a, y :: b } :: T [a] T3 :: T Int and now the selector takes that result type as its argument: f :: forall a. T [a] -> Maybe a Details: the "real" types of T1,T2 are: T1 :: forall r a. (r~[a]) => a -> T r T2 :: forall r a b. (r~[a]) => a -> b -> T r So the selector loooks like this: f :: forall a. T [a] -> Maybe a f (a:*) (t:T [a]) = case t of T1 c (g:[a]~[c]) (v:Maybe c) -> v `cast` Maybe (right (sym g)) T2 c d (g:[a]~[c]) (v:Maybe c) (w:d) -> v `cast` Maybe (right (sym g)) T3 -> error "T3 does not have field f" Note the forall'd tyvars of the selector are just the free tyvars of the result type; there may be other tyvars in the constructor's type (e.g. 'b' in T2). Note the need for casts in the result! Note [Selector running example] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ It's OK to combine GADTs and type families. Here's a running example: data instance T [a] where T1 { fld :: b } :: T [Maybe b] The representation type looks like this data :R7T a where T1 { fld :: b } :: :R7T (Maybe b) and there's coercion from the family type to the representation type :CoR7T a :: T [a] ~ :R7T a The selector we want for fld looks like this: fld :: forall b. T [Maybe b] -> b fld = /\b. \(d::T [Maybe b]). case d `cast` :CoR7T (Maybe b) of T1 (x::b) -> x The scrutinee of the case has type :R7T (Maybe b), which can be gotten by appying the eq_spec to the univ_tvs of the data con. ************************************************************************ * * Error messages * * ************************************************************************ -} tcAddTyFamInstCtxt :: TyFamInstDecl Name -> TcM a -> TcM a tcAddTyFamInstCtxt decl = tcAddFamInstCtxt (ptext (sLit "type instance")) (tyFamInstDeclName decl) tcAddDataFamInstCtxt :: DataFamInstDecl Name -> TcM a -> TcM a tcAddDataFamInstCtxt decl = tcAddFamInstCtxt (pprDataFamInstFlavour decl <+> ptext (sLit "instance")) (unLoc (dfid_tycon decl)) tcAddFamInstCtxt :: SDoc -> Name -> TcM a -> TcM a tcAddFamInstCtxt flavour tycon thing_inside = addErrCtxt ctxt thing_inside where ctxt = hsep [ptext (sLit "In the") <+> flavour <+> ptext (sLit "declaration for"), quotes (ppr tycon)] tcAddClosedTypeFamilyDeclCtxt :: TyCon -> TcM a -> TcM a tcAddClosedTypeFamilyDeclCtxt tc = addErrCtxt ctxt where ctxt = ptext (sLit "In the equations for closed type family") <+> quotes (ppr tc) resultTypeMisMatch :: Name -> DataCon -> DataCon -> SDoc resultTypeMisMatch field_name con1 con2 = vcat [sep [ptext (sLit "Constructors") <+> ppr con1 <+> ptext (sLit "and") <+> ppr con2, ptext (sLit "have a common field") <+> quotes (ppr field_name) <> comma], nest 2 $ ptext (sLit "but have different result types")] fieldTypeMisMatch :: Name -> DataCon -> DataCon -> SDoc fieldTypeMisMatch field_name con1 con2 = sep [ptext (sLit "Constructors") <+> ppr con1 <+> ptext (sLit "and") <+> ppr con2, ptext (sLit "give different types for field"), quotes (ppr field_name)] dataConCtxtName :: [Located Name] -> SDoc dataConCtxtName [con] = ptext (sLit "In the definition of data constructor") <+> quotes (ppr con) dataConCtxtName con = ptext (sLit "In the definition of data constructors") <+> interpp'SP con dataConCtxt :: Outputable a => a -> SDoc dataConCtxt con = ptext (sLit "In the definition of data constructor") <+> quotes (ppr con) classOpCtxt :: Var -> Type -> SDoc classOpCtxt sel_id tau = sep [ptext (sLit "When checking the class method:"), nest 2 (pprPrefixOcc sel_id <+> dcolon <+> ppr tau)] classArityErr :: Int -> Class -> SDoc classArityErr n cls | n == 0 = mkErr "No" "no-parameter" | otherwise = mkErr "Too many" "multi-parameter" where mkErr howMany allowWhat = vcat [ptext (sLit $ howMany ++ " parameters for class") <+> quotes (ppr cls), parens (ptext (sLit $ "Use MultiParamTypeClasses to allow " ++ allowWhat ++ " classes"))] classFunDepsErr :: Class -> SDoc classFunDepsErr cls = vcat [ptext (sLit "Fundeps in class") <+> quotes (ppr cls), parens (ptext (sLit "Use FunctionalDependencies to allow fundeps"))] noClassTyVarErr :: Class -> SDoc -> SDoc noClassTyVarErr clas what = sep [ptext (sLit "The") <+> what, ptext (sLit "mentions none of the type or kind variables of the class") <+> quotes (ppr clas <+> hsep (map ppr (classTyVars clas)))] recSynErr :: [LTyClDecl Name] -> TcRn () recSynErr syn_decls = setSrcSpan (getLoc (head sorted_decls)) $ addErr (sep [ptext (sLit "Cycle in type synonym declarations:"), nest 2 (vcat (map ppr_decl sorted_decls))]) where sorted_decls = sortLocated syn_decls ppr_decl (L loc decl) = ppr loc <> colon <+> ppr decl recClsErr :: [TyCon] -> TcRn () recClsErr cycles = addErr (sep [ptext (sLit "Cycle in class declaration (via superclasses):"), nest 2 (hsep (intersperse (text "->") (map ppr cycles)))]) badDataConTyCon :: DataCon -> Type -> Type -> SDoc badDataConTyCon data_con res_ty_tmpl actual_res_ty = hang (ptext (sLit "Data constructor") <+> quotes (ppr data_con) <+> ptext (sLit "returns type") <+> quotes (ppr actual_res_ty)) 2 (ptext (sLit "instead of an instance of its parent type") <+> quotes (ppr res_ty_tmpl)) badGadtKindCon :: DataCon -> SDoc badGadtKindCon data_con = hang (ptext (sLit "Data constructor") <+> quotes (ppr data_con) <+> ptext (sLit "cannot be GADT-like in its *kind* arguments")) 2 (ppr data_con <+> dcolon <+> ppr (dataConUserType data_con)) badGadtDecl :: Name -> SDoc badGadtDecl tc_name = vcat [ ptext (sLit "Illegal generalised algebraic data declaration for") <+> quotes (ppr tc_name) , nest 2 (parens $ ptext (sLit "Use GADTs to allow GADTs")) ] badExistential :: DataCon -> SDoc badExistential con = hang (ptext (sLit "Data constructor") <+> quotes (ppr con) <+> ptext (sLit "has existential type variables, a context, or a specialised result type")) 2 (vcat [ ppr con <+> dcolon <+> ppr (dataConUserType con) , parens $ ptext (sLit "Use ExistentialQuantification or GADTs to allow this") ]) badStupidTheta :: Name -> SDoc badStupidTheta tc_name = ptext (sLit "A data type declared in GADT style cannot have a context:") <+> quotes (ppr tc_name) newtypeConError :: Name -> Int -> SDoc newtypeConError tycon n = sep [ptext (sLit "A newtype must have exactly one constructor,"), nest 2 $ ptext (sLit "but") <+> quotes (ppr tycon) <+> ptext (sLit "has") <+> speakN n ] newtypeStrictError :: DataCon -> SDoc newtypeStrictError con = sep [ptext (sLit "A newtype constructor cannot have a strictness annotation,"), nest 2 $ ptext (sLit "but") <+> quotes (ppr con) <+> ptext (sLit "does")] newtypeFieldErr :: DataCon -> Int -> SDoc newtypeFieldErr con_name n_flds = sep [ptext (sLit "The constructor of a newtype must have exactly one field"), nest 2 $ ptext (sLit "but") <+> quotes (ppr con_name) <+> ptext (sLit "has") <+> speakN n_flds] badSigTyDecl :: Name -> SDoc badSigTyDecl tc_name = vcat [ ptext (sLit "Illegal kind signature") <+> quotes (ppr tc_name) , nest 2 (parens $ ptext (sLit "Use KindSignatures to allow kind signatures")) ] emptyConDeclsErr :: Name -> SDoc emptyConDeclsErr tycon = sep [quotes (ppr tycon) <+> ptext (sLit "has no constructors"), nest 2 $ ptext (sLit "(EmptyDataDecls permits this)")] wrongKindOfFamily :: TyCon -> SDoc wrongKindOfFamily family = ptext (sLit "Wrong category of family instance; declaration was for a") <+> kindOfFamily where kindOfFamily | isTypeFamilyTyCon family = text "type family" | isDataFamilyTyCon family = text "data family" | otherwise = pprPanic "wrongKindOfFamily" (ppr family) wrongNumberOfParmsErr :: Arity -> SDoc wrongNumberOfParmsErr max_args = ptext (sLit "Number of parameters must match family declaration; expected") <+> ppr max_args wrongTyFamName :: Name -> Name -> SDoc wrongTyFamName fam_tc_name eqn_tc_name = hang (ptext (sLit "Mismatched type name in type family instance.")) 2 (vcat [ ptext (sLit "Expected:") <+> ppr fam_tc_name , ptext (sLit " Actual:") <+> ppr eqn_tc_name ]) inaccessibleCoAxBranch :: TyCon -> CoAxBranch -> SDoc inaccessibleCoAxBranch tc fi = ptext (sLit "Overlapped type family instance equation:") $$ (pprCoAxBranch tc fi) badRoleAnnot :: Name -> Role -> Role -> SDoc badRoleAnnot var annot inferred = hang (ptext (sLit "Role mismatch on variable") <+> ppr var <> colon) 2 (sep [ ptext (sLit "Annotation says"), ppr annot , ptext (sLit "but role"), ppr inferred , ptext (sLit "is required") ]) wrongNumberOfRoles :: [a] -> LRoleAnnotDecl Name -> SDoc wrongNumberOfRoles tyvars d@(L _ (RoleAnnotDecl _ annots)) = hang (ptext (sLit "Wrong number of roles listed in role annotation;") $$ ptext (sLit "Expected") <+> (ppr $ length tyvars) <> comma <+> ptext (sLit "got") <+> (ppr $ length annots) <> colon) 2 (ppr d) illegalRoleAnnotDecl :: LRoleAnnotDecl Name -> TcM () illegalRoleAnnotDecl (L loc (RoleAnnotDecl tycon _)) = setErrCtxt [] $ setSrcSpan loc $ addErrTc (ptext (sLit "Illegal role annotation for") <+> ppr tycon <> char ';' $$ ptext (sLit "they are allowed only for datatypes and classes.")) needXRoleAnnotations :: TyCon -> SDoc needXRoleAnnotations tc = ptext (sLit "Illegal role annotation for") <+> ppr tc <> char ';' $$ ptext (sLit "did you intend to use RoleAnnotations?") incoherentRoles :: SDoc incoherentRoles = (text "Roles other than" <+> quotes (text "nominal") <+> text "for class parameters can lead to incoherence.") $$ (text "Use IncoherentInstances to allow this; bad role found") addTyThingCtxt :: TyThing -> TcM a -> TcM a addTyThingCtxt thing = addErrCtxt ctxt where name = getName thing flav = case thing of ATyCon tc | isClassTyCon tc -> ptext (sLit "class") | isTypeFamilyTyCon tc -> ptext (sLit "type family") | isDataFamilyTyCon tc -> ptext (sLit "data family") | isTypeSynonymTyCon tc -> ptext (sLit "type") | isNewTyCon tc -> ptext (sLit "newtype") | isDataTyCon tc -> ptext (sLit "data") _ -> pprTrace "addTyThingCtxt strange" (ppr thing) Outputable.empty ctxt = hsep [ ptext (sLit "In the"), flav , ptext (sLit "declaration for"), quotes (ppr name) ] addRoleAnnotCtxt :: Name -> TcM a -> TcM a addRoleAnnotCtxt name = addErrCtxt $ text "while checking a role annotation for" <+> quotes (ppr name)
pparkkin/eta
compiler/ETA/TypeCheck/TcTyClsDecls.hs
bsd-3-clause
100,831
877
25
29,643
17,851
9,843
8,008
-1
-1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- Module : Network.AWS.Redshift.DeleteHsmConfiguration -- Copyright : (c) 2013-2014 Brendan Hay <[email protected]> -- License : This Source Code Form is subject to the terms of -- the Mozilla Public License, v. 2.0. -- A copy of the MPL can be found in the LICENSE file or -- you can obtain it at http://mozilla.org/MPL/2.0/. -- Maintainer : Brendan Hay <[email protected]> -- Stability : experimental -- Portability : non-portable (GHC extensions) -- -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | Deletes the specified Amazon Redshift HSM configuration. -- -- <http://docs.aws.amazon.com/redshift/latest/APIReference/API_DeleteHsmConfiguration.html> module Network.AWS.Redshift.DeleteHsmConfiguration ( -- * Request DeleteHsmConfiguration -- ** Request constructor , deleteHsmConfiguration -- ** Request lenses , dhcHsmConfigurationIdentifier -- * Response , DeleteHsmConfigurationResponse -- ** Response constructor , deleteHsmConfigurationResponse ) where import Network.AWS.Prelude import Network.AWS.Request.Query import Network.AWS.Redshift.Types import qualified GHC.Exts newtype DeleteHsmConfiguration = DeleteHsmConfiguration { _dhcHsmConfigurationIdentifier :: Text } deriving (Eq, Ord, Read, Show, Monoid, IsString) -- | 'DeleteHsmConfiguration' constructor. -- -- The fields accessible through corresponding lenses are: -- -- * 'dhcHsmConfigurationIdentifier' @::@ 'Text' -- deleteHsmConfiguration :: Text -- ^ 'dhcHsmConfigurationIdentifier' -> DeleteHsmConfiguration deleteHsmConfiguration p1 = DeleteHsmConfiguration { _dhcHsmConfigurationIdentifier = p1 } -- | The identifier of the Amazon Redshift HSM configuration to be deleted. dhcHsmConfigurationIdentifier :: Lens' DeleteHsmConfiguration Text dhcHsmConfigurationIdentifier = lens _dhcHsmConfigurationIdentifier (\s a -> s { _dhcHsmConfigurationIdentifier = a }) data DeleteHsmConfigurationResponse = DeleteHsmConfigurationResponse deriving (Eq, Ord, Read, Show, Generic) -- | 'DeleteHsmConfigurationResponse' constructor. deleteHsmConfigurationResponse :: DeleteHsmConfigurationResponse deleteHsmConfigurationResponse = DeleteHsmConfigurationResponse instance ToPath DeleteHsmConfiguration where toPath = const "/" instance ToQuery DeleteHsmConfiguration where toQuery DeleteHsmConfiguration{..} = mconcat [ "HsmConfigurationIdentifier" =? _dhcHsmConfigurationIdentifier ] instance ToHeaders DeleteHsmConfiguration instance AWSRequest DeleteHsmConfiguration where type Sv DeleteHsmConfiguration = Redshift type Rs DeleteHsmConfiguration = DeleteHsmConfigurationResponse request = post "DeleteHsmConfiguration" response = nullResponse DeleteHsmConfigurationResponse
romanb/amazonka
amazonka-redshift/gen/Network/AWS/Redshift/DeleteHsmConfiguration.hs
mpl-2.0
3,346
0
9
647
329
202
127
47
1
{-# OPTIONS -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See -- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces -- for details -- | The assignment of virtual registers to stack slots -- We have lots of stack slots. Memory-to-memory moves are a pain on most -- architectures. Therefore, we avoid having to generate memory-to-memory moves -- by simply giving every virtual register its own stack slot. -- The StackMap stack map keeps track of virtual register - stack slot -- associations and of which stack slots are still free. Once it has been -- associated, a stack slot is never "freed" or removed from the StackMap again, -- it remains associated until we are done with the current CmmProc. -- module RegAlloc.Linear.StackMap ( StackSlot, StackMap(..), emptyStackMap, getStackSlotFor ) where import RegAlloc.Linear.FreeRegs import Outputable import Platform import UniqFM import Unique -- | Identifier for a stack slot. type StackSlot = Int data StackMap = StackMap { -- | The slots that are still available to be allocated. stackMapFreeSlots :: [StackSlot] -- | Assignment of vregs to stack slots. , stackMapAssignment :: UniqFM StackSlot } -- | An empty stack map, with all slots available. emptyStackMap :: Platform -> StackMap emptyStackMap platform = StackMap [0 .. maxSpillSlots platform] emptyUFM -- | If this vreg unique already has a stack assignment then return the slot number, -- otherwise allocate a new slot, and update the map. -- getStackSlotFor :: StackMap -> Unique -> (StackMap, Int) getStackSlotFor (StackMap [] _) _ -- This happens all the time when trying to compile darcs' SHA1.hs, see Track #1993 -- SHA1.lhs has also been added to the Crypto library on Hackage, -- so we see this all the time. -- -- It would be better to automatically invoke the graph allocator, or do something -- else besides panicing, but that's a job for a different day. -- BL 2009/02 -- = panic $ "RegAllocLinear.getStackSlotFor: out of stack slots\n" ++ " If you are trying to compile SHA1.hs from the crypto library then this\n" ++ " is a known limitation in the linear allocator.\n" ++ "\n" ++ " Try enabling the graph colouring allocator with -fregs-graph instead." ++ " You can still file a bug report if you like.\n" getStackSlotFor fs@(StackMap (freeSlot:stack') reserved) reg = case lookupUFM reserved reg of Just slot -> (fs, slot) Nothing -> (StackMap stack' (addToUFM reserved reg freeSlot), freeSlot)
nomeata/ghc
compiler/nativeGen/RegAlloc/Linear/StackMap.hs
bsd-3-clause
2,719
26
11
525
304
181
123
30
2
-- | Basic operations on graphs. -- module GraphOps ( addNode, delNode, getNode, lookupNode, modNode, size, union, addConflict, delConflict, addConflicts, addCoalesce, delCoalesce, addExclusion, addExclusions, addPreference, coalesceNodes, coalesceGraph, freezeNode, freezeOneInGraph, freezeAllInGraph, scanGraph, setColor, validateGraph, slurpNodeConflictCount ) where import GraphBase import Outputable import Unique import UniqSet import UniqFM import Data.List hiding (union) import Data.Maybe -- | Lookup a node from the graph. lookupNode :: Uniquable k => Graph k cls color -> k -> Maybe (Node k cls color) lookupNode graph k = lookupUFM (graphMap graph) k -- | Get a node from the graph, throwing an error if it's not there getNode :: Uniquable k => Graph k cls color -> k -> Node k cls color getNode graph k = case lookupUFM (graphMap graph) k of Just node -> node Nothing -> panic "ColorOps.getNode: not found" -- | Add a node to the graph, linking up its edges addNode :: Uniquable k => k -> Node k cls color -> Graph k cls color -> Graph k cls color addNode k node graph = let -- add back conflict edges from other nodes to this one map_conflict = nonDetFoldUFM -- It's OK to use nonDetFoldUFM here because the -- operation is commutative (adjustUFM_C (\n -> n { nodeConflicts = addOneToUniqSet (nodeConflicts n) k})) (graphMap graph) (nodeConflicts node) -- add back coalesce edges from other nodes to this one map_coalesce = nonDetFoldUFM -- It's OK to use nonDetFoldUFM here because the -- operation is commutative (adjustUFM_C (\n -> n { nodeCoalesce = addOneToUniqSet (nodeCoalesce n) k})) map_conflict (nodeCoalesce node) in graph { graphMap = addToUFM map_coalesce k node} -- | Delete a node and all its edges from the graph. delNode :: (Uniquable k) => k -> Graph k cls color -> Maybe (Graph k cls color) delNode k graph | Just node <- lookupNode graph k = let -- delete conflict edges from other nodes to this one. graph1 = foldl' (\g k1 -> let Just g' = delConflict k1 k g in g') graph $ nonDetEltsUFM (nodeConflicts node) -- delete coalesce edge from other nodes to this one. graph2 = foldl' (\g k1 -> let Just g' = delCoalesce k1 k g in g') graph1 $ nonDetEltsUFM (nodeCoalesce node) -- See Note [Unique Determinism and code generation] -- delete the node graph3 = graphMapModify (\fm -> delFromUFM fm k) graph2 in Just graph3 | otherwise = Nothing -- | Modify a node in the graph. -- returns Nothing if the node isn't present. -- modNode :: Uniquable k => (Node k cls color -> Node k cls color) -> k -> Graph k cls color -> Maybe (Graph k cls color) modNode f k graph = case lookupNode graph k of Just Node{} -> Just $ graphMapModify (\fm -> let Just node = lookupUFM fm k node' = f node in addToUFM fm k node') graph Nothing -> Nothing -- | Get the size of the graph, O(n) size :: Graph k cls color -> Int size graph = sizeUFM $ graphMap graph -- | Union two graphs together. union :: Graph k cls color -> Graph k cls color -> Graph k cls color union graph1 graph2 = Graph { graphMap = plusUFM (graphMap graph1) (graphMap graph2) } -- | Add a conflict between nodes to the graph, creating the nodes required. -- Conflicts are virtual regs which need to be colored differently. addConflict :: Uniquable k => (k, cls) -> (k, cls) -> Graph k cls color -> Graph k cls color addConflict (u1, c1) (u2, c2) = let addNeighbor u c u' = adjustWithDefaultUFM (\node -> node { nodeConflicts = addOneToUniqSet (nodeConflicts node) u' }) (newNode u c) { nodeConflicts = unitUniqSet u' } u in graphMapModify ( addNeighbor u1 c1 u2 . addNeighbor u2 c2 u1) -- | Delete a conflict edge. k1 -> k2 -- returns Nothing if the node isn't in the graph delConflict :: Uniquable k => k -> k -> Graph k cls color -> Maybe (Graph k cls color) delConflict k1 k2 = modNode (\node -> node { nodeConflicts = delOneFromUniqSet (nodeConflicts node) k2 }) k1 -- | Add some conflicts to the graph, creating nodes if required. -- All the nodes in the set are taken to conflict with each other. addConflicts :: Uniquable k => UniqSet k -> (k -> cls) -> Graph k cls color -> Graph k cls color addConflicts conflicts getClass -- just a single node, but no conflicts, create the node anyway. | (u : []) <- nonDetEltsUFM conflicts = graphMapModify $ adjustWithDefaultUFM id (newNode u (getClass u)) u | otherwise = graphMapModify $ (\fm -> foldl' (\g u -> addConflictSet1 u getClass conflicts g) fm $ nonDetEltsUFM conflicts) -- See Note [Unique Determinism and code generation] addConflictSet1 :: Uniquable k => k -> (k -> cls) -> UniqSet k -> UniqFM (Node k cls color) -> UniqFM (Node k cls color) addConflictSet1 u getClass set = case delOneFromUniqSet set u of set' -> adjustWithDefaultUFM (\node -> node { nodeConflicts = unionUniqSets set' (nodeConflicts node) } ) (newNode u (getClass u)) { nodeConflicts = set' } u -- | Add an exclusion to the graph, creating nodes if required. -- These are extra colors that the node cannot use. addExclusion :: (Uniquable k, Uniquable color) => k -> (k -> cls) -> color -> Graph k cls color -> Graph k cls color addExclusion u getClass color = graphMapModify $ adjustWithDefaultUFM (\node -> node { nodeExclusions = addOneToUniqSet (nodeExclusions node) color }) (newNode u (getClass u)) { nodeExclusions = unitUniqSet color } u addExclusions :: (Uniquable k, Uniquable color) => k -> (k -> cls) -> [color] -> Graph k cls color -> Graph k cls color addExclusions u getClass colors graph = foldr (addExclusion u getClass) graph colors -- | Add a coalescence edge to the graph, creating nodes if requried. -- It is considered adventageous to assign the same color to nodes in a coalesence. addCoalesce :: Uniquable k => (k, cls) -> (k, cls) -> Graph k cls color -> Graph k cls color addCoalesce (u1, c1) (u2, c2) = let addCoalesce u c u' = adjustWithDefaultUFM (\node -> node { nodeCoalesce = addOneToUniqSet (nodeCoalesce node) u' }) (newNode u c) { nodeCoalesce = unitUniqSet u' } u in graphMapModify ( addCoalesce u1 c1 u2 . addCoalesce u2 c2 u1) -- | Delete a coalescence edge (k1 -> k2) from the graph. delCoalesce :: Uniquable k => k -> k -> Graph k cls color -> Maybe (Graph k cls color) delCoalesce k1 k2 = modNode (\node -> node { nodeCoalesce = delOneFromUniqSet (nodeCoalesce node) k2 }) k1 -- | Add a color preference to the graph, creating nodes if required. -- The most recently added preference is the most prefered. -- The algorithm tries to assign a node it's prefered color if possible. -- addPreference :: Uniquable k => (k, cls) -> color -> Graph k cls color -> Graph k cls color addPreference (u, c) color = graphMapModify $ adjustWithDefaultUFM (\node -> node { nodePreference = color : (nodePreference node) }) (newNode u c) { nodePreference = [color] } u -- | Do aggressive coalescing on this graph. -- returns the new graph and the list of pairs of nodes that got coaleced together. -- for each pair, the resulting node will have the least key and be second in the pair. -- coalesceGraph :: (Uniquable k, Ord k, Eq cls, Outputable k) => Bool -- ^ If True, coalesce nodes even if this might make the graph -- less colorable (aggressive coalescing) -> Triv k cls color -> Graph k cls color -> ( Graph k cls color , [(k, k)]) -- pairs of nodes that were coalesced, in the order that the -- coalescing was applied. coalesceGraph aggressive triv graph = coalesceGraph' aggressive triv graph [] coalesceGraph' :: (Uniquable k, Ord k, Eq cls, Outputable k) => Bool -> Triv k cls color -> Graph k cls color -> [(k, k)] -> ( Graph k cls color , [(k, k)]) coalesceGraph' aggressive triv graph kkPairsAcc = let -- find all the nodes that have coalescence edges cNodes = filter (\node -> not $ isEmptyUniqSet (nodeCoalesce node)) $ nonDetEltsUFM $ graphMap graph -- See Note [Unique Determinism and code generation] -- build a list of pairs of keys for node's we'll try and coalesce -- every pair of nodes will appear twice in this list -- ie [(k1, k2), (k2, k1) ... ] -- This is ok, GrapOps.coalesceNodes handles this and it's convenient for -- build a list of what nodes get coalesced together for later on. -- cList = [ (nodeId node1, k2) | node1 <- cNodes , k2 <- nonDetEltsUFM $ nodeCoalesce node1 ] -- See Note [Unique Determinism and code generation] -- do the coalescing, returning the new graph and a list of pairs of keys -- that got coalesced together. (graph', mPairs) = mapAccumL (coalesceNodes aggressive triv) graph cList -- keep running until there are no more coalesces can be found in case catMaybes mPairs of [] -> (graph', reverse kkPairsAcc) pairs -> coalesceGraph' aggressive triv graph' (reverse pairs ++ kkPairsAcc) -- | Coalesce this pair of nodes unconditionally \/ aggressively. -- The resulting node is the one with the least key. -- -- returns: Just the pair of keys if the nodes were coalesced -- the second element of the pair being the least one -- -- Nothing if either of the nodes weren't in the graph coalesceNodes :: (Uniquable k, Ord k, Eq cls) => Bool -- ^ If True, coalesce nodes even if this might make the graph -- less colorable (aggressive coalescing) -> Triv k cls color -> Graph k cls color -> (k, k) -- ^ keys of the nodes to be coalesced -> (Graph k cls color, Maybe (k, k)) coalesceNodes aggressive triv graph (k1, k2) | (kMin, kMax) <- if k1 < k2 then (k1, k2) else (k2, k1) -- the nodes being coalesced must be in the graph , Just nMin <- lookupNode graph kMin , Just nMax <- lookupNode graph kMax -- can't coalesce conflicting modes , not $ elementOfUniqSet kMin (nodeConflicts nMax) , not $ elementOfUniqSet kMax (nodeConflicts nMin) -- can't coalesce the same node , nodeId nMin /= nodeId nMax = coalesceNodes_merge aggressive triv graph kMin kMax nMin nMax -- don't do the coalescing after all | otherwise = (graph, Nothing) coalesceNodes_merge :: (Uniquable k, Eq cls) => Bool -> Triv k cls color -> Graph k cls color -> k -> k -> Node k cls color -> Node k cls color -> (Graph k cls color, Maybe (k, k)) coalesceNodes_merge aggressive triv graph kMin kMax nMin nMax -- sanity checks | nodeClass nMin /= nodeClass nMax = error "GraphOps.coalesceNodes: can't coalesce nodes of different classes." | not (isNothing (nodeColor nMin) && isNothing (nodeColor nMax)) = error "GraphOps.coalesceNodes: can't coalesce colored nodes." --- | otherwise = let -- the new node gets all the edges from its two components node = Node { nodeId = kMin , nodeClass = nodeClass nMin , nodeColor = Nothing -- nodes don't conflict with themselves.. , nodeConflicts = (unionUniqSets (nodeConflicts nMin) (nodeConflicts nMax)) `delOneFromUniqSet` kMin `delOneFromUniqSet` kMax , nodeExclusions = unionUniqSets (nodeExclusions nMin) (nodeExclusions nMax) , nodePreference = nodePreference nMin ++ nodePreference nMax -- nodes don't coalesce with themselves.. , nodeCoalesce = (unionUniqSets (nodeCoalesce nMin) (nodeCoalesce nMax)) `delOneFromUniqSet` kMin `delOneFromUniqSet` kMax } in coalesceNodes_check aggressive triv graph kMin kMax node coalesceNodes_check :: Uniquable k => Bool -> Triv k cls color -> Graph k cls color -> k -> k -> Node k cls color -> (Graph k cls color, Maybe (k, k)) coalesceNodes_check aggressive triv graph kMin kMax node -- Unless we're coalescing aggressively, if the result node is not trivially -- colorable then don't do the coalescing. | not aggressive , not $ triv (nodeClass node) (nodeConflicts node) (nodeExclusions node) = (graph, Nothing) | otherwise = let -- delete the old nodes from the graph and add the new one Just graph1 = delNode kMax graph Just graph2 = delNode kMin graph1 graph3 = addNode kMin node graph2 in (graph3, Just (kMax, kMin)) -- | Freeze a node -- This is for the iterative coalescer. -- By freezing a node we give up on ever coalescing it. -- Move all its coalesce edges into the frozen set - and update -- back edges from other nodes. -- freezeNode :: Uniquable k => k -- ^ key of the node to freeze -> Graph k cls color -- ^ the graph -> Graph k cls color -- ^ graph with that node frozen freezeNode k = graphMapModify $ \fm -> let -- freeze all the edges in the node to be frozen Just node = lookupUFM fm k node' = node { nodeCoalesce = emptyUniqSet } fm1 = addToUFM fm k node' -- update back edges pointing to this node freezeEdge k node = if elementOfUniqSet k (nodeCoalesce node) then node { nodeCoalesce = delOneFromUniqSet (nodeCoalesce node) k } else node -- panic "GraphOps.freezeNode: edge to freeze wasn't in the coalesce set" -- If the edge isn't actually in the coelesce set then just ignore it. fm2 = nonDetFoldUFM (adjustUFM_C (freezeEdge k)) fm1 -- It's OK to use nonDetFoldUFM here because the operation -- is commutative $ nodeCoalesce node in fm2 -- | Freeze one node in the graph -- This if for the iterative coalescer. -- Look for a move related node of low degree and freeze it. -- -- We probably don't need to scan the whole graph looking for the node of absolute -- lowest degree. Just sample the first few and choose the one with the lowest -- degree out of those. Also, we don't make any distinction between conflicts of different -- classes.. this is just a heuristic, after all. -- -- IDEA: freezing a node might free it up for Simplify.. would be good to check for triv -- right here, and add it to a worklist if known triv\/non-move nodes. -- freezeOneInGraph :: (Uniquable k) => Graph k cls color -> ( Graph k cls color -- the new graph , Bool ) -- whether we found a node to freeze freezeOneInGraph graph = let compareNodeDegree n1 n2 = compare (sizeUniqSet $ nodeConflicts n1) (sizeUniqSet $ nodeConflicts n2) candidates = sortBy compareNodeDegree $ take 5 -- 5 isn't special, it's just a small number. $ scanGraph (\node -> not $ isEmptyUniqSet (nodeCoalesce node)) graph in case candidates of -- there wasn't anything available to freeze [] -> (graph, False) -- we found something to freeze (n : _) -> ( freezeNode (nodeId n) graph , True) -- | Freeze all the nodes in the graph -- for debugging the iterative allocator. -- freezeAllInGraph :: (Uniquable k) => Graph k cls color -> Graph k cls color freezeAllInGraph graph = foldr freezeNode graph $ map nodeId $ nonDetEltsUFM $ graphMap graph -- See Note [Unique Determinism and code generation] -- | Find all the nodes in the graph that meet some criteria -- scanGraph :: (Node k cls color -> Bool) -> Graph k cls color -> [Node k cls color] scanGraph match graph = filter match $ nonDetEltsUFM $ graphMap graph -- See Note [Unique Determinism and code generation] -- | validate the internal structure of a graph -- all its edges should point to valid nodes -- If they don't then throw an error -- validateGraph :: (Uniquable k, Outputable k, Eq color) => SDoc -- ^ extra debugging info to display on error -> Bool -- ^ whether this graph is supposed to be colored. -> Graph k cls color -- ^ graph to validate -> Graph k cls color -- ^ validated graph validateGraph doc isColored graph -- Check that all edges point to valid nodes. | edges <- unionManyUniqSets ( (map nodeConflicts $ nonDetEltsUFM $ graphMap graph) ++ (map nodeCoalesce $ nonDetEltsUFM $ graphMap graph)) , nodes <- mkUniqSet $ map nodeId $ nonDetEltsUFM $ graphMap graph , badEdges <- minusUniqSet edges nodes , not $ isEmptyUniqSet badEdges = pprPanic "GraphOps.validateGraph" ( text "Graph has edges that point to non-existant nodes" $$ text " bad edges: " <> pprUFM badEdges (vcat . map ppr) $$ doc ) -- Check that no conflicting nodes have the same color | badNodes <- filter (not . (checkNode graph)) $ nonDetEltsUFM $ graphMap graph -- See Note [Unique Determinism and code generation] , not $ null badNodes = pprPanic "GraphOps.validateGraph" ( text "Node has same color as one of it's conflicts" $$ text " bad nodes: " <> hcat (map (ppr . nodeId) badNodes) $$ doc) -- If this is supposed to be a colored graph, -- check that all nodes have a color. | isColored , badNodes <- filter (\n -> isNothing $ nodeColor n) $ nonDetEltsUFM $ graphMap graph , not $ null badNodes = pprPanic "GraphOps.validateGraph" ( text "Supposably colored graph has uncolored nodes." $$ text " uncolored nodes: " <> hcat (map (ppr . nodeId) badNodes) $$ doc ) -- graph looks ok | otherwise = graph -- | If this node is colored, check that all the nodes which -- conflict with it have different colors. checkNode :: (Uniquable k, Eq color) => Graph k cls color -> Node k cls color -> Bool -- ^ True if this node is ok checkNode graph node | Just color <- nodeColor node , Just neighbors <- sequence $ map (lookupNode graph) $ nonDetEltsUFM $ nodeConflicts node -- See Note [Unique Determinism and code generation] , neighbourColors <- catMaybes $ map nodeColor neighbors , elem color neighbourColors = False | otherwise = True -- | Slurp out a map of how many nodes had a certain number of conflict neighbours slurpNodeConflictCount :: Graph k cls color -> UniqFM (Int, Int) -- ^ (conflict neighbours, num nodes with that many conflicts) slurpNodeConflictCount graph = addListToUFM_C (\(c1, n1) (_, n2) -> (c1, n1 + n2)) emptyUFM $ map (\node -> let count = sizeUniqSet $ nodeConflicts node in (count, (count, 1))) $ nonDetEltsUFM -- See Note [Unique Determinism and code generation] $ graphMap graph -- | Set the color of a certain node setColor :: Uniquable k => k -> color -> Graph k cls color -> Graph k cls color setColor u color = graphMapModify $ adjustUFM_C (\n -> n { nodeColor = Just color }) u {-# INLINE adjustWithDefaultUFM #-} adjustWithDefaultUFM :: Uniquable k => (a -> a) -> a -> k -> UniqFM a -> UniqFM a adjustWithDefaultUFM f def k map = addToUFM_C (\old _ -> f old) map k def -- Argument order different from UniqFM's adjustUFM {-# INLINE adjustUFM_C #-} adjustUFM_C :: Uniquable k => (a -> a) -> k -> UniqFM a -> UniqFM a adjustUFM_C f k map = case lookupUFM map k of Nothing -> map Just a -> addToUFM map k (f a)
olsner/ghc
compiler/utils/GraphOps.hs
bsd-3-clause
23,303
0
18
8,751
4,796
2,473
2,323
415
2
{-# LANGUAGE RankNTypes, PolyKinds #-} -- NB: -fprint-explicit-runtime-reps enabled in all.T module TypeSkolEscape where import GHC.Types import GHC.Exts type Bad = forall (v :: RuntimeRep) (a :: TYPE v). a
sdiehl/ghc
testsuite/tests/dependent/should_fail/TypeSkolEscape.hs
bsd-3-clause
210
0
8
33
43
28
15
-1
-1
module Time {-( ClockTime, Month(January,February,March,April,May,June, July,August,September,October,November,December), Day(Sunday,Monday,Tuesday,Wednesday,Thursday,Friday,Saturday), CalendarTime(CalendarTime, ctYear, ctMonth, ctDay, ctHour, ctMin, ctPicosec, ctWDay, ctYDay, ctTZName, ctTZ, ctIsDST), TimeDiff(TimeDiff, tdYear, tdMonth, tdDay, tdHour, tdMin, tdSec, tdPicosec), getClockTime, addToClockTime, diffClockTimes, toCalendarTime, toUTCTime, toClockTime, calendarTimeToString, formatCalendarTime )-} where import Prelude import Ix(Ix) import Locale --(TimeLocale(..),defaultTimeLocale) import Char ( intToDigit ) newtype ClockTime = ClockTime Int deriving (Eq,Ord,Show) -- The Show instance is a deviation from Haskell 98, provided for -- compatibility with Hugs and GHC. data Month = January | February | March | April | May | June | July | August | September | October | November | December deriving (Eq, Ord, Enum, Bounded, Ix, Read, Show) --instance Enum Month data Day = Sunday | Monday | Tuesday | Wednesday | Thursday | Friday | Saturday deriving (Eq, Ord, Enum, Bounded, Ix, Read, Show) --instance Enum Day data CalendarTime = CalendarTime { ctYear :: Int, ctMonth :: Month, ctDay, ctHour, ctMin, ctSec :: Int, ctPicosec :: Integer, ctWDay :: Day, ctYDay :: Int, ctTZName :: String, ctTZ :: Int, ctIsDST :: Bool } deriving (Eq, Ord, Read, Show) data TimeDiff = TimeDiff { tdYear, tdMonth, tdDay, tdHour, tdMin, tdSec :: Int, tdPicosec :: Integer } deriving (Eq, Ord, Read, Show) getClockTime :: IO ClockTime getClockTime = undefined -- Implementation-dependent addToClockTime :: TimeDiff -> ClockTime -> ClockTime addToClockTime td ct = undefined -- Implementation-dependent diffClockTimes :: ClockTime -> ClockTime -> TimeDiff diffClockTimes ct1 ct2 = undefined -- Implementation-dependent toCalendarTime :: ClockTime -> IO CalendarTime toCalendarTime ct = undefined -- Implementation-dependent toUTCTime :: ClockTime -> CalendarTime toUTCTime ct = undefined -- Implementation-dependent toClockTime :: CalendarTime -> ClockTime toClockTime cal = undefined -- Implementation-dependent calendarTimeToString :: CalendarTime -> String calendarTimeToString = formatCalendarTime defaultTimeLocale "%c" formatCalendarTime :: TimeLocale -> String -> CalendarTime -> String formatCalendarTime l fmt ct@(CalendarTime year mon day hour min sec sdec wday yday tzname _ _) = doFmt fmt where doFmt ('%':c:cs) = decode c ++ doFmt cs doFmt (c:cs) = c : doFmt cs doFmt "" = "" to12 :: Int -> Int to12 h = let h' = h `mod` 12 in if h' == 0 then 12 else h' decode 'A' = fst (wDays l !! fromEnum wday) decode 'a' = snd (wDays l !! fromEnum wday) decode 'B' = fst (months l !! fromEnum mon) decode 'b' = snd (months l !! fromEnum mon) decode 'h' = snd (months l !! fromEnum mon) decode 'C' = show2 (year `quot` 100) decode 'c' = doFmt (dateTimeFmt l) decode 'D' = doFmt "%m/%d/%y" decode 'd' = show2 day decode 'e' = show2' day decode 'H' = show2 hour decode 'I' = show2 (to12 hour) decode 'j' = show3 yday decode 'k' = show2' hour decode 'l' = show2' (to12 hour) decode 'M' = show2 min decode 'm' = show2 (fromEnum mon+1) decode 'n' = "\n" decode 'p' = (if hour < 12 then fst else snd) (amPm l) decode 'R' = doFmt "%H:%M" decode 'r' = doFmt (time12Fmt l) decode 'T' = doFmt "%H:%M:%S" decode 't' = "\t" decode 'S' = show2 sec --decode 's' = undefined -- Implementation-dependent decode 'U' = show2 ((yday + 7 - fromEnum wday) `div` 7) decode 'u' = show (let n = fromEnum wday in if n == 0 then 7 else n) decode 'V' = let (week, days) = (yday + 7 - if fromEnum wday > 0 then fromEnum wday - 1 else 6) `divMod` 7 in show2 (if days >= 4 then week+1 else if week == 0 then 53 else week) decode 'W' = show2 ((yday + 7 - if fromEnum wday > 0 then fromEnum wday - 1 else 6) `div` 7) decode 'w' = show (fromEnum wday) decode 'X' = doFmt (timeFmt l) decode 'x' = doFmt (dateFmt l) decode 'Y' = show year decode 'y' = show2 (year `rem` 100) decode 'Z' = tzname decode '%' = "%" decode c = [c] show2, show2', show3 :: Int -> String show2 x = [intToDigit (x `quot` 10), intToDigit (x `rem` 10)] show2' x = if x < 10 then [ ' ', intToDigit x] else show2 x show3 x = intToDigit (x `quot` 100) : show2 (x `rem` 100)
forste/haReFork
tools/base/tests/HaskellLibraries/Time.hs
bsd-3-clause
5,602
0
16
2,094
1,470
787
683
102
45
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-} {-# LANGUAGE RankNTypes, ExistentialQuantification #-} -- An interesting interaction of universals and existentials, prompted by -- http://www.haskell.org/pipermail/haskell-cafe/2004-October/007160.html -- -- Note the nested pattern-match in runProg; tc183 checks the -- non-nested version -- 3 Sept 2010: with the new typechecker, this one succeeds module Foo where import Control.Monad.Trans data Bar m = forall t. (MonadTrans t, Monad (t m)) => Bar (t m () -> m ()) (t m Int) data Foo = Foo (forall m. Monad m => Bar m) runProg :: Foo -> IO () runProg (Foo (Bar run op)) = run (prog op) -- This nested match "ought" to work; because -- runProg (Foo b) = case b of -- Bar run op -> run (prog op) -- does work. But the interactions with GADTs and -- desugaring defeated me, and I removed (in GHC 6.4) the ability -- to instantiate functions on the left prog :: (MonadTrans t, Monad (t IO)) => a -> t IO () prog x = error "urk"
urbanslug/ghc
testsuite/tests/typecheck/should_fail/tcfail126.hs
bsd-3-clause
1,007
2
10
196
211
116
95
12
1
-- | Check universe constraints. module Idris.Core.Constraints ( ucheck ) where import Idris.Core.TT ( TC(..), UExp(..), UConstraint(..), FC(..), ConstraintFC(..), Err'(..) ) import Control.Applicative import Control.Monad.State.Strict import Data.List ( partition ) import qualified Data.Map.Strict as M import qualified Data.Set as S -- | Check that a list of universe constraints can be satisfied. ucheck :: S.Set ConstraintFC -> TC () ucheck = void . solve 10 . S.filter (not . ignore) where -- TODO: remove the first ignore clause once Idris.Core.Binary:598 is dealt with ignore (ConstraintFC c _) | any (== Var (-1)) (varsIn c) = True ignore (ConstraintFC (ULE a b) _) = a == b ignore _ = False newtype Var = Var Int deriving (Eq, Ord, Show) data Domain = Domain Int Int deriving (Eq, Ord, Show) data SolverState = SolverState { queue :: Queue , domainStore :: M.Map Var ( Domain , S.Set ConstraintFC -- constraints that effected this variable ) , cons_lhs :: M.Map Var (S.Set ConstraintFC) , cons_rhs :: M.Map Var (S.Set ConstraintFC) } data Queue = Queue [ConstraintFC] (S.Set UConstraint) solve :: Int -> S.Set ConstraintFC -> TC (M.Map Var Int) solve maxUniverseLevel ucs = evalStateT (propagate >> extractSolution) initSolverState where inpConstraints = S.toAscList ucs -- | initial solver state. -- the queue contains all constraints, the domain store contains the initial domains. initSolverState :: SolverState initSolverState = let (initUnaryQueue, initQueue) = partition (\ c -> length (varsIn (uconstraint c)) == 1) inpConstraints in SolverState { queue = Queue (initUnaryQueue ++ initQueue) (S.fromList (map uconstraint (initUnaryQueue ++ initQueue))) , domainStore = M.fromList [ (v, (Domain 0 maxUniverseLevel, S.empty)) | v <- ordNub [ v | ConstraintFC c _ <- inpConstraints , v <- varsIn c ] ] , cons_lhs = constraintsLHS , cons_rhs = constraintsRHS } lhs (ULT (UVar x) _) = Just (Var x) lhs (ULE (UVar x) _) = Just (Var x) lhs _ = Nothing rhs (ULT _ (UVar x)) = Just (Var x) rhs (ULE _ (UVar x)) = Just (Var x) rhs _ = Nothing -- | a map from variables to the list of constraints the variable occurs in. (in the LHS of a constraint) constraintsLHS :: M.Map Var (S.Set ConstraintFC) constraintsLHS = M.fromListWith S.union [ (v, S.singleton (ConstraintFC c fc)) | (ConstraintFC c fc) <- inpConstraints , let vars = varsIn c , length vars > 1 -- do not register unary constraints , v <- vars , lhs c == Just v ] -- | a map from variables to the list of constraints the variable occurs in. (in the RHS of a constraint) constraintsRHS :: M.Map Var (S.Set ConstraintFC) constraintsRHS = M.fromListWith S.union [ (v, S.singleton (ConstraintFC c fc)) | (ConstraintFC c fc) <- inpConstraints , let vars = varsIn c , length vars > 1 -- do not register unary constraints , v <- vars , rhs c == Just v ] -- | this is where the actual work is done. -- dequeue the first constraint, -- filter domains, -- update domains (possibly resulting in a domain wipe out), -- until the queue is empty. propagate :: StateT SolverState TC () propagate = do mcons <- nextConstraint case mcons of Nothing -> return () Just (ConstraintFC cons fc) -> do case cons of ULE a b -> do Domain lowerA upperA <- domainOf a Domain lowerB upperB <- domainOf b when (upperB < upperA) $ updateUpperBoundOf (ConstraintFC cons fc) a upperB when (lowerA > lowerB) $ updateLowerBoundOf (ConstraintFC cons fc) b lowerA ULT a b -> do Domain lowerA upperA <- domainOf a Domain lowerB upperB <- domainOf b let upperB_pred = pred upperB let lowerA_succ = succ lowerA when (upperB_pred < upperA) $ updateUpperBoundOf (ConstraintFC cons fc) a upperB_pred when (lowerA_succ > lowerB) $ updateLowerBoundOf (ConstraintFC cons fc) b lowerA_succ propagate -- | extract a solution from the state. extractSolution :: (MonadState SolverState m, Functor m) => m (M.Map Var Int) extractSolution = M.map (extractValue . fst) <$> gets domainStore extractValue :: Domain -> Int extractValue (Domain x _) = x -- | dequeue the first constraint. nextConstraint :: MonadState SolverState m => m (Maybe ConstraintFC) nextConstraint = do Queue list set <- gets queue case list of [] -> return Nothing (q:qs) -> do modify $ \ st -> st { queue = Queue qs (S.delete (uconstraint q) set) } return (Just q) -- | look up the domain of a variable from the state. -- for convenience, this function also accepts UVal's and returns a singleton domain for them. domainOf :: MonadState SolverState m => UExp -> m Domain domainOf (UVar var) = gets (fst . (M.! Var var) . domainStore) domainOf (UVal val) = return (Domain val val) asPair :: Domain -> (Int, Int) asPair (Domain x y) = (x, y) updateUpperBoundOf :: ConstraintFC -> UExp -> Int -> StateT SolverState TC () updateUpperBoundOf suspect (UVar var) upper = do doms <- gets domainStore let (oldDom@(Domain lower _), suspects) = doms M.! Var var let newDom = Domain lower upper when (wipeOut newDom) $ lift $ Error $ UniverseError (ufc suspect) (UVar var) (asPair oldDom) (asPair newDom) (suspect : S.toList suspects) modify $ \ st -> st { domainStore = M.insert (Var var) (newDom, S.insert suspect suspects) doms } addToQueueRHS (uconstraint suspect) (Var var) updateUpperBoundOf _ UVal{} _ = return () updateLowerBoundOf :: ConstraintFC -> UExp -> Int -> StateT SolverState TC () updateLowerBoundOf suspect (UVar var) lower = do doms <- gets domainStore let (oldDom@(Domain _ upper), suspects) = doms M.! Var var let newDom = Domain lower upper when (wipeOut newDom) $ lift $ Error $ At (ufc suspect) $ Msg $ unlines $ "Universe inconsistency." : ("Working on: " ++ show (UVar var)) : ("Old domain: " ++ show oldDom) : ("New domain: " ++ show newDom) : "Involved constraints: " : map (("\t"++) . show) (suspect : S.toList suspects) modify $ \ st -> st { domainStore = M.insert (Var var) (newDom, S.insert suspect suspects) doms } addToQueueLHS (uconstraint suspect) (Var var) updateLowerBoundOf _ UVal{} _ = return () -- | add all constraints (with the given var on the lhs) to the queue addToQueueLHS :: MonadState SolverState m => UConstraint -> Var -> m () addToQueueLHS thisCons var = do clhs <- gets cons_lhs case M.lookup var clhs of Nothing -> return () Just cs -> do Queue list set <- gets queue let set' = S.insert thisCons set let newCons = [ c | c <- S.toList cs, uconstraint c `S.notMember` set' ] if null newCons then return () else modify $ \ st -> st { queue = Queue (list ++ newCons) (S.union set (S.fromList (map uconstraint newCons))) } -- | add all constraints (with the given var on the rhs) to the queue addToQueueRHS :: MonadState SolverState m => UConstraint -> Var -> m () addToQueueRHS thisCons var = do crhs <- gets cons_rhs case M.lookup var crhs of Nothing -> return () Just cs -> do Queue list set <- gets queue let set' = S.insert thisCons set let newCons = [ c | c <- S.toList cs, uconstraint c `S.notMember` set' ] if null newCons then return () else modify $ \ st -> st { queue = Queue (list ++ newCons) (insertAll (map uconstraint newCons) set) } insertAll [] s = s insertAll (x : xs) s = insertAll xs (S.insert x s) -- | check if a domain is wiped out. wipeOut :: Domain -> Bool wipeOut (Domain l u) = l > u ordNub :: Ord a => [a] -> [a] ordNub = S.toList . S.fromList -- | variables in a constraint varsIn :: UConstraint -> [Var] varsIn (ULT a b) = [ Var v | UVar v <- [a,b] ] varsIn (ULE a b) = [ Var v | UVar v <- [a,b] ]
osa1/Idris-dev
src/Idris/Core/Constraints.hs
bsd-3-clause
9,942
0
25
3,884
2,895
1,458
1,437
-1
-1
module Rhodium.Context where import Control.Exception.Base (assert) type Label = String data UpTerm var = UpVar var | UpPred Label [DnTerm var] -- ^ variables and atoms | UpPi (UpTerm var) (UpTerm var) -- ^ dependant product | UpSigma (UpTerm var) (UpTerm var) -- ^ dependant sum | UpWType (UpTerm var) (UpTerm var) -- ^ W-types | UpType -- ^ the type of Types deriving (Eq,Show) data DnTerm var = DnVar var | DnPred Label [DnTerm var] -- ^ variables and atoms | DnType (UpTerm var) -- ^ type reflected as a term | DnLambda (DnTerm var) | DnApp (DnTerm var) (DnTerm var) -- ^ dependant product | DnPair (DnTerm var) (DnTerm var) | DnSplit (DnTerm var) -- ^ dependant sum | DnSup (DnTerm var) (DnTerm var) | DnWRec (UpTerm var) (DnTerm var) deriving (Eq,Show) liftTerm :: DnTerm var -> UpTerm var liftTerm (DnVar n) = UpVar n liftTerm (DnPred s ts) = UpPred s ts liftTerm (DnType typ) = typ -- other terms are not liftable -- | An Object of the Contextual Category: a context in type theory. -- A context is a list of Terms of type Type, with variables -- represented by deBruijn indices indicating an *offset* in the list, relative -- to the current term. data ObC = ObC [UpTerm Int] deriving (Eq) -- | A Morphism of the Contextual Category: a term in type theory. data HomC = HomC { source :: [UpTerm Int], target :: [UpTerm Int], morph :: [DnTerm Int] } deriving (Eq) -- | A Pair of *composable* morphisms of the Contextual Category. -- Such a constructed pair should always respect the condition -- source lpart == target rpart data Hom2C = Hom2C { lpart :: HomC, rpart :: HomC } deriving (Eq) (<.>) :: HomC -> HomC -> HomC g <.> f = comp $ Hom2C g f infixr 9 <.> ----- -- Structural rules ----- -- | Identity morphism. unit :: ObC -> HomC unit (ObC obs) = HomC { source = obs, target = obs, morph = zipWith (\_ i -> DnVar i) obs (iterate (+ 1) 0) } -- | Composition of morphisms. comp :: Hom2C -> HomC comp (Hom2C g f) = -- pre-condition assert (target f == source g) $ -- code HomC { source = source f, target = target g, morph = map (substDn $ morph f) (morph g) } ---- Dependent projection ft :: ObC -> ObC ft (ObC ob) = ObC $ tail ob -- | Build a canonical projection morphism out of this object. -- ## TODO: proj has a special case on 'DnApp' proj :: ObC -> HomC proj (ObC obs) = -- pre-condition assert (not $ null obs) $ -- code HomC { source = obs, target = tail obs, morph = zipWith (\_ i -> DnVar i) (tail obs) (iterate (+ 1) 1) } -- | True if the given morphism is a section of the canonical projection. isSection :: HomC -> Bool isSection f = source f == tail (target f) && tail (morph f) == morph (unit (ObC (source f))) -- | Pullback the canonical projection 'proj' from object 'x' along 'f' pullback :: HomC -> ObC -> ObC pullback f (ObC obs) = -- pre-condition assert (tail obs == target f) $ -- code ObC $ (substUp (morph f) (head obs)) : (source f) q :: HomC -> ObC -> HomC q f ob@(ObC obs) = assert (tail obs == target f) $ let ObC fstar = pullback f ob in HomC { source = fstar, target = obs, morph = (DnVar 0) : (offset 1 $ morph f) } -- helpers substUp :: [DnTerm Int] -> UpTerm Int -> UpTerm Int substUp s (UpVar i) = liftTerm $ s !! i -- ^ ## WRONG: 'DnApp' counts for two objects substUp s (UpPred p vs) = UpPred p (map (substUp s) vs) substUp s (UpPi a b) = substDn :: [DnTerm Int] -> DnTerm Int -> DnTerm Int substDn s (DnVar i) = s !! i -- ## WRONG: 'DnApp' counts for two objects substDn s (DnPred p vs) = DnPred p (map (substDn s) vs) substDn s (DnLambda f) = DnLambda (substDn s f) -- ## WRONG !!! incr :: [DnTerm Int] -> [DnTerm Int] incr = offset 1 offset :: Int -> [DnTerm Int] -> [DnTerm Int] offset n = map $ fmap (+ n) {-# INLINE[2] offset #-} {-# RULES "offset/offset" forall n m ts. offset n (offset m ts) = offset (n + m) ts #-} ----- -- Dependent Functions ----- -- ∏-FORM pi :: ObC -> ObC pi (ObC (b:a:os)) = ObC $ (UpPi a b) : os -- ∏-INTRO -- b : [G,A] -> [G,A,B] -- lambda b : [G] -> [G,Pi(A,B)] lambda :: HomC -> HomC lambda b = let upB:upA:gamma = target b f:_:bs = morph b in HomC { source = tail (source b), target = (UpPi upA upB) : gamma, morph = (DnLambda f) : bs } -- ∏-ELIM app :: HomC -> HomC app g = -- pre-condition: a == upA let a:(UpPi upA upB):gamma = target g x:f:morphs = morph g in HomC { source = source g, target = upB : upA : gamma, morph = (DnApp f x) : morphs } -- k : [Г] -> [Г,∏(A,B)] , c : [Г] -> [Г,A] app2 :: HomC -> HomC -> HomC app2 k a = -- pre-conditions assert (source k == source a) $ assert (let (UpPi _ _):g = target k in source k == g) $ assert (let _:g = target a in source a == g) $ assert (let (UpPi upA _):_ = target k upA':_ = target a in upA == upA') $ -- code let (UpPi upA upB):gamma = target k f:_ = morph k x:morphs = morph a in HomC { source = gamma, target = upB : upA : gamma, morph = (DnApp f x) : morphs } ----- -- W-Types ----- -- W-FORM w :: ObC -> ObC w (ObC (b:a:os)) = ObC $ (UpWType a b) : os -- W-INTRO -- for each object [Г,A,B] -- a map [Г,A,∏(B,p^*_B p^*_A W(A,B))] -> [Г,W(A,B)] sup :: ObC -> HomC sup (ObC (b:a:gamma)) = let ppw = UpWType a b -- ## TODO: should be a pullback of p^*_A, P^*_B in HomC { source = (UpPi b ppw):a:gamma, target = (UpWType a b):gamma, morph = (DnSup (DnVar 1) (DnVar 0)):(offset 2 $ morph $ unit (ObC gamma)) } -- W-ELIM -- for each map 'd', a map 'wrec' such that -- wrec . sup = d . λ(wrec . app(...)) wrec :: HomC -> HomC wrec d = let in HomC { source = tail $ target d, target = target d, morph = [] } ----- -- Validation ----- -- | Check that the given object is valid in the given environment containing -- bindings for named values. -- validOb :: Env -> ObC -> Bool -- | Check that the given morphism is valid in the given environment containing -- bindings for named values. -- validHom :: Env -> HomC -> Bool ----- -- Instances ----- instance Functor DnTerm where fmap f (DnVar x) = DnVar (f x) fmap f (DnPred a xs) = DnPred a (map (fmap f) xs) fmap f (DnLambda k) = DnLambda (fmap f k) fmap f (DnPair a b) = DnPair (fmap f a) (fmap f b) instance Show ObC where show (ObC []) = "[obQ||]" show (ObC (o:[])) = "[obQ|" ++ showUpTerm o ++ "|]" show (ObC (o:os)) = "[obQ|" ++ showListWith showUpTerm os ++ ", " ++ showUpTerm o ++ "|]" instance Show HomC where show f = "[homQ|" ++ showListWith showUpTerm (source f) ++ " :- " ++ showJudgList (zip (morph f) (target f)) ++ "|]" showUpTerm :: Show var => UpTerm var -> String showUpTerm (UpVar v) = '$' : (show v) showUpTerm (UpPred a vs) = case vs of [] -> a vs -> a ++ "(" ++ showListWith showUpTerm vs ++ ")" showUpTerm (UpPi a b) = "∏[" ++ showUpTerm a ++ "]" ++ showUpTerm b showUpTerm (UpSigma a b) = "∑[" ++ showUpTerm a ++ "]" ++ showUpTerm b showDnTerm :: Show var => DnTerm var -> String showDnTerm (DnVar v) = '$' : (show v) showDnTerm (DnPred a vs) = case vs of [] -> a vs -> a ++ "(" ++ showListWith showDnTerm vs ++ ")" showDnTerm (DnLambda k) = "λ." ++ showDnTerm k showDnTerm (DnPair a b) = "(" ++ showDnTerm a ++ "," ++ showDnTerm b ++ ")" showListWith :: (v -> String) -> [v] -> String showListWith s [] = "" showListWith s (v:[]) = s v showListWith s (v:vs) = showListWith s vs ++ ", " ++ s v showJudgList :: (Show var) => [(DnTerm var,UpTerm var)] -> String showJudgList [] = "" showJudgList ((trm,typ):[]) = showDnTerm trm ++ ":" ++ showUpTerm typ showJudgList ((trm,typ):js) = showJudgList js ++ ", " ++ showDnTerm trm ++ ":" ++ showUpTerm typ
DrNico/rhodium
tools/rhc-strap/Rhodium/Context.hs
mit
8,149
7
18
2,272
3,023
1,572
1,451
-1
-1
{- {- Nested comment -} -- Note: still commented fibs :: [Int] fibs = 1 : 1 : zipWith (+) fibs (tail fibs) -} main :: IO () main = print [1..]
cgag/loc
tests/data/nested-comments.hs
mit
147
0
6
36
23
12
11
2
1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TemplateHaskell #-} module Ringo.ArgParser (ProgArgs(..), parseArgs) where import qualified Data.Text as Text import qualified Distribution.Package as P import qualified Distribution.PackageDescription as P import qualified Distribution.CurrentPackageDescription as P import qualified Distribution.Text as DText import Data.List (intercalate) import Options.Applicative import Ringo.Types data ProgArgs = ProgArgs { progSettings :: Settings , progInputFile :: FilePath , progOutputDir :: FilePath } deriving (Eq, Show) settingsParser :: Parser Settings settingsParser = let Settings {..} = defSettings in Settings <$> (Text.pack <$> strOption (long "dim-prefix" <> short 'd' <> value (Text.unpack settingDimPrefix) <> showDefault <> help "Prefix for dimension tables")) <*> (Text.pack <$> strOption (long "fact-prefix" <> short 'f' <> value (Text.unpack settingFactPrefix) <> showDefault <> help "Prefix for fact tables")) <*> option auto (let timeunits = map show [Second ..] in long "timeunit" <> short 't' <> value settingTimeUnit <> showDefault <> completeWith timeunits <> help ("Time unit granularity for fact tables. Possible values: " ++ intercalate ", " timeunits)) <*> minorOption "avg-count-col-suffix" settingAvgCountColumSuffix "Suffix for average count columns" <*> minorOption "avg-sum-col-suffix" settingAvgSumColumnSuffix "Suffix for average sum columns" <*> minorOption "dim-id-col-name" settingDimTableIdColumnName "Name of dimension table id columns" <*> minorOption "dim-id-col-type" settingDimTableIdColumnType "Type of dimension table id columns" <*> minorOption "fact-count-col-type" settingFactCountColumnType "Type of fact table count columns" <*> option auto (long "fact-count-distinct-error-rate" <> hidden <> value settingFactCountDistinctErrorRate <> showDefault <> help "Error rate for count distinct calulations") <*> minorOption "fact-infix" settingFactInfix "Infix for fact tables" <*> minorOption "dependencies-json-file" settingDependenciesJSONFileName "Name of the output dependencies json file" <*> minorOption "facts-json-file" settingFactsJSONFileName "Name of the output facts json file" <*> minorOption "dimensions-json-file" settingDimensionJSONFileName "Name of the output dimensions json file" <*> option auto (long "foreign-key-id-coalesce-val" <> hidden <> value settingForeignKeyIdCoalesceValue <> showDefault <> help "Value to coalease missing foriegn key ids to, in fact tables") <*> minorOption "tablename-suffix-template" settingTableNameSuffixTemplate "Suffix template for table names in SQL" where minorOption longDesc defValue helpTxt = Text.pack <$> strOption (long longDesc <> hidden <> value (Text.unpack defValue) <> showDefault <> help helpTxt) progArgsParser :: Parser ProgArgs progArgsParser = ProgArgs <$> settingsParser <*> argument str (metavar "INPUT" <> action "file" <> help "Input file") <*> argument str (metavar "OUTPUT" <> action "directory" <> help "Output directory") progName :: String progName = $(P.getField (DText.display . P.pkgName . P.package)) versionParser :: Parser (a -> a) versionParser = infoOption (progName ++ " " ++ version) (long "version" <> help "Print version information") where version = $(P.getField (DText.display . P.pkgVersion . P.package)) parseArgs :: IO ProgArgs parseArgs = execParser $ info (helper <*> versionParser <*> progArgsParser) (fullDesc <> progDesc $(P.getField P.description) <> header (progName ++ " - " ++ $(P.getField P.synopsis)) <> footer ("© " ++ $(P.getField P.copyright) ++ ". " ++ $(P.getField P.homepage)))
quintype/ringo
app/Ringo/ArgParser.hs
mit
5,122
0
32
1,955
902
454
448
108
1
{-# LANGUAGE PatternSynonyms #-} -- For HasCallStack compatibility {-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} module JSDOM.Generated.MediaKeySystemAccess (getConfiguration, getConfiguration_, createMediaKeys, createMediaKeys_, getKeySystem, MediaKeySystemAccess(..), gTypeMediaKeySystemAccess) where import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..)) import qualified Prelude (error) import Data.Typeable (Typeable) import Data.Traversable (mapM) import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!)) import Data.Int (Int64) import Data.Word (Word, Word64) import JSDOM.Types import Control.Applicative ((<$>)) import Control.Monad (void) import Control.Lens.Operators ((^.)) import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync) import JSDOM.Enums -- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaKeySystemAccess.getConfiguration Mozilla MediaKeySystemAccess.getConfiguration documentation> getConfiguration :: (MonadDOM m) => MediaKeySystemAccess -> m MediaKeySystemConfiguration getConfiguration self = liftDOM ((self ^. jsf "getConfiguration" ()) >>= fromJSValUnchecked) -- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaKeySystemAccess.getConfiguration Mozilla MediaKeySystemAccess.getConfiguration documentation> getConfiguration_ :: (MonadDOM m) => MediaKeySystemAccess -> m () getConfiguration_ self = liftDOM (void (self ^. jsf "getConfiguration" ())) -- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaKeySystemAccess.createMediaKeys Mozilla MediaKeySystemAccess.createMediaKeys documentation> createMediaKeys :: (MonadDOM m) => MediaKeySystemAccess -> m MediaKeys createMediaKeys self = liftDOM (((self ^. jsf "createMediaKeys" ()) >>= readPromise) >>= fromJSValUnchecked) -- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaKeySystemAccess.createMediaKeys Mozilla MediaKeySystemAccess.createMediaKeys documentation> createMediaKeys_ :: (MonadDOM m) => MediaKeySystemAccess -> m () createMediaKeys_ self = liftDOM (void (self ^. jsf "createMediaKeys" ())) -- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaKeySystemAccess.keySystem Mozilla MediaKeySystemAccess.keySystem documentation> getKeySystem :: (MonadDOM m, FromJSString result) => MediaKeySystemAccess -> m result getKeySystem self = liftDOM ((self ^. js "keySystem") >>= fromJSValUnchecked)
ghcjs/jsaddle-dom
src/JSDOM/Generated/MediaKeySystemAccess.hs
mit
2,815
0
13
388
588
351
237
43
1
{-# LANGUAGE CPP #-} module GHCJS.DOM.RTCDataChannel ( #if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT) module GHCJS.DOM.JSFFI.Generated.RTCDataChannel #else #endif ) where #if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT) import GHCJS.DOM.JSFFI.Generated.RTCDataChannel #else #endif
plow-technologies/ghcjs-dom
src/GHCJS/DOM/RTCDataChannel.hs
mit
355
0
5
33
33
26
7
4
0
module Str where import qualified Data.Text as DT -- type Str = BS.ByteString type Str = DT.Text
dancor/melang
src/Str.hs
mit
99
0
5
19
21
15
6
3
0
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE ViewPatterns #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE MultiWayIf #-} module RWPAS.Level.Type ( Level() -- * Level construction , generateLevel , generateLevelM , emptyLevel , roomLevel , portalOnRightSideLevel , addPortal , terrainFeature , TerrainFeature(..) , levelName , levelSize , impassable -- * Items , itemByCoordinates -- * Decorations , Decoration(..) , decorationByCoordinate -- * Actor handling -- -- Some of these functions are in RWPAS.World instead that's a bit higher -- level than these. , eachActor , getMemoryAt , insertActor , tryMoveActor , removeActor , actorById , actorByCoordinates , updateActorMemories -- * Types, coordinates, sizes , LevelCoordinates , Size , LevelID , diagonalDistance -- * Decorations , removeDecorations -- * Computing field of view , levelFieldOfView -- * Stepping , step , StepResult(..) ) where import Control.Lens hiding ( Level ) import Control.Monad.State.Strict import Data.Data import Data.Foldable import Data.IntMap ( IntMap ) import qualified Data.IntMap as IM import Data.IntSet ( IntSet ) import qualified Data.IntSet as IS import Data.Map.Strict ( Map ) import qualified Data.Map.Strict as M import Data.Maybe import Data.SafeCopy import Data.Text ( Text ) import GHC.Generics import Linear.V2 import RWPAS.Actor import RWPAS.Direction import RWPAS.FieldOfView import RWPAS.Item import RWPAS.SafeCopyOrphanInstances() import RWPAS.TwoDimensionalVector import RWPAS.WorldCoordinates data Decoration = Spikes !Direction8 | BloodySpikes !Direction8 | NotDecorated deriving ( Eq, Ord, Show, Read, Typeable, Data, Generic ) instance Enum Decoration where toEnum x | x >= 1 && x <= 8 = let dir = toEnum (x-1) :: Direction8 in Spikes dir toEnum x | x >= 9 && x <= 16 = let dir = toEnum (x-9) :: Direction8 in BloodySpikes dir toEnum 0 = NotDecorated toEnum _ = error "toEnum (Decoration): invalid value" {-# INLINE toEnum #-} fromEnum (Spikes dir) = fromEnum dir + 1 fromEnum (BloodySpikes dir) = fromEnum dir + 9 fromEnum NotDecorated = 0 {-# INLINE fromEnum #-} data Level = Level { _terrain :: !Vector2D , _decorations :: !(Map LevelCoordinates Decoration) , _items :: !(Map LevelCoordinates Item) , _portals :: !(IntMap Portal) , _portalKeys :: !(Map LevelCoordinates IntSet) , _actorKeys :: !(Map LevelCoordinates ActorID) , _actors :: !(IntMap Actor) , _actorMemories :: !(Map ActorID (Map LevelCoordinates TerrainFeature)) , _levelName :: !Text } deriving ( Eq, Ord, Show, Typeable, Generic ) -- | Coordinates relative to some `Level`. type LevelCoordinates = V2 Int type LevelID = Int data Portal = Portal { _axis :: !Direction4 , _targetLevel :: !LevelID , _targetLevelAxisTopPosition :: !Int , _targetLevelAxisPosition :: !Int , _portalLength :: !Int , _axisTopPosition :: !Int , _axisPosition :: !Int } deriving ( Eq, Ord, Show, Read, Typeable, Data, Generic ) -- | Describes the size of something. type Size = V2 Int data TerrainFeature = Floor | Wall | Planks | PlanksFloor | Tree1 | Tree2 | Dirt | Grass | Rock -- ^ Same as `Wall` but completely black. deriving ( Eq, Ord, Show, Read, Typeable, Data, Generic, Enum ) type PortalID = Int -- Derive lenses here makeLenses ''Level makeLenses ''Portal deriveSafeCopy 0 'base ''Level deriveSafeCopy 0 'base ''Portal deriveSafeCopy 0 'base ''TerrainFeature deriveSafeCopy 0 'base ''Decoration -- | If there's no feature at some coordinate, what we should assume it is? defaultTerrainFeature :: TerrainFeature defaultTerrainFeature = Rock getMemoryAt :: ActorID -> Level -> LevelCoordinates -> Maybe TerrainFeature getMemoryAt aid level coords = do memory <- level^.actorMemories.at aid memory^.at coords {- A portal example: Portal (axis = DRight, portalLength = 3, axisPosition = 3, axisTopPosition = 2) x is marked to portal map, y is marked if axis = DLeft. The portal is the line between xs and ys. 1234 .... 1 .xy. 2 .xy. 3 .xy. 4 .... 5 -} addPortal :: Portal -> PortalID -> Level -> Level addPortal portal portal_id = execState $ do case portal^.axis of DRight -> for_ [0..portal^.portalLength-1] $ \offset -> set_key (portal^.axisPosition-1) (offset + portal^.axisTopPosition) DLeft -> for_ [0..portal^.portalLength-1] $ \offset -> set_key (portal^.axisPosition) (offset + portal^.axisTopPosition) DDown -> for_ [0..portal^.portalLength-1] $ \offset -> set_key (offset + portal^.axisTopPosition) (portal^.axisPosition-1) DUp -> for_ [0..portal^.portalLength-1] $ \offset -> set_key (offset + portal^.axisTopPosition) (portal^.axisPosition) portals.at portal_id .= Just portal where set_key x y = let pos = V2 x y in portalKeys.at pos %= Just . \case Nothing -> IS.singleton portal_id Just set -> IS.insert portal_id set decorationByCoordinate :: LevelCoordinates -> Lens' Level Decoration decorationByCoordinate coords = lens get_it set_it where get_it lvl = fromMaybe NotDecorated (lvl^.decorations.at coords) set_it lvl NotDecorated = lvl & decorations.at coords .~ Nothing set_it lvl x = lvl & decorations.at coords .~ Just x {-# INLINE decorationByCoordinate #-} -- | Generate a level with a generator function. generateLevel :: Text -> Int -> Int -> (Int -> Int -> TerrainFeature) -> Level generateLevel name w h generator = (emptyLevel name) { _terrain = generate w h $ \x y -> fromIntegral $ fromEnum $ generator x y } generateLevelM :: Monad m => Text -> Int -> Int -> (Int -> Int -> m TerrainFeature) -> m Level generateLevelM name w h generator = do generated <- generateM w h $ \x y -> fromIntegral . fromEnum <$> generator x y return (emptyLevel name) { _terrain = generated } -- | A completely empty level. emptyLevel :: Text -> Level emptyLevel name = Level { _terrain = generate 1 1 $ \_ _ -> fromIntegral $ fromEnum defaultTerrainFeature , _actors = mempty , _actorMemories = mempty , _actorKeys = mempty , _items = mempty , _portals = mempty , _portalKeys = mempty , _decorations = mempty , _levelName = name } updateActorMemories :: ActorID -> M.Map LevelCoordinates TerrainFeature -> Level -> Level updateActorMemories aid memories levels = case levels^.actorMemories.at aid of Nothing -> levels & actorMemories.at aid .~ Just memories Just m -> levels & actorMemories.at aid .~ Just (M.union memories m) -- | Same as `roomLevel` but adds a portal to the right side of the room. -- -- The portal leads to the left side of the room. Pass the same `LevelID` as -- the level itself. portalOnRightSideLevel :: Size -> PortalID -> PortalID -> LevelID -> Level portalOnRightSideLevel sz@(V2 w h) pid pid2 lid = let initial_level = roomLevel sz in addPortal Portal { _axis = DLeft , _targetLevel = lid , _targetLevelAxisTopPosition = 1 , _targetLevelAxisPosition = w-1 , _portalLength = h-1 , _axisTopPosition = 1 , _axisPosition = 1 } pid2 $ addPortal Portal { _axis = DRight , _targetLevel = lid , _targetLevelAxisTopPosition = 1 , _targetLevelAxisPosition = 1 , _portalLength = h-1 , _axisTopPosition = 1 , _axisPosition = w-1 } pid initial_level -- | A level that just has a single rectangular room. The walkable area is -- sized according to the given coordinates, with (1, 1) being the top-left -- corner of the room and (0, 0) is top-left wall. roomLevel :: Size -> Level roomLevel (V2 w h) = Level { _terrain = makeOneRoom w h , _actors = mempty , _actorKeys = mempty , _actorMemories = mempty , _items = mempty , _portals = mempty , _portalKeys = mempty , _decorations = mempty , _levelName = "Rectangular Room" } where makeOneRoom w h = generate (w+1) (h+1) $ \x y -> if | x == 0 || y == 0 || x == w || y == h -> fromIntegral $ fromEnum Wall | x == w `div` 2 && y > 5 && y < h-6 -> fromIntegral $ fromEnum Wall | otherwise -> fromIntegral $ fromEnum Floor -- | Lens to a terrain feature at some location. terrainFeature :: LevelCoordinates -> Level -> TerrainFeature terrainFeature coords level = toEnum $ fromIntegral $ getAt coords (level^.terrain) (fromIntegral $ fromEnum Rock) {-# INLINE terrainFeature #-} impassable :: TerrainFeature -> Bool impassable Floor = False impassable Dirt = False impassable Grass = False impassable PlanksFloor = False impassable _ = True seeThrough :: TerrainFeature -> Int seeThrough Floor = 0 seeThrough Dirt = 0 seeThrough Grass = 0 seeThrough PlanksFloor = 0 seeThrough Tree1 = 1 seeThrough Tree2 = 1 seeThrough _ = 10000 tryMoveActor :: ActorID -> Direction8 -> LevelID -> Level -> (LevelID -> Maybe Level) -> Maybe (Level, Maybe (LevelID, Level)) tryMoveActor aid dir source_level_id level get_level = do actor <- IM.lookup aid (level^.actors) let actor_pos = actor^.position case step dir actor_pos level of SameLevel new_actor_pos -> if impassable (terrainFeature new_actor_pos level) || isJust (actorByCoordinates new_actor_pos level) then Nothing else Just (level & (actorKeys.at new_actor_pos .~ Just aid) . (actors.at aid .~ Just (actor & position .~ new_actor_pos)) . (actorKeys.at actor_pos .~ Nothing), Nothing) EnterLevel (WorldCoordinates new_actor_pos new_level_id) -> -- TODO: check any complications if new level is the same as old one -- (that is, portal goes to level itself) -- -- Right now it should be safe because RWPAS.Control sets the latter -- level last, overwriting the operation of removing the actor from the -- level. case get_level new_level_id of Nothing -> Nothing Just new_level -> if impassable (terrainFeature new_actor_pos new_level) || isJust (actorByCoordinates new_actor_pos new_level) then Nothing else Just (level & (actors.at aid .~ Nothing) . (actorKeys.at actor_pos .~ Nothing) ,Just (new_level_id ,new_level & (actors.at aid .~ Just (actor & position .~ new_actor_pos)) . (actorKeys.at new_actor_pos .~ Just aid) . (if source_level_id == new_level_id then actorKeys.at actor_pos .~ Nothing else id))) data StepResult = SameLevel !LevelCoordinates | EnterLevel !WorldCoordinates deriving ( Eq, Ord, Show, Read, Typeable, Data, Generic ) couldPotentiallyGoThroughPortal :: Direction8 -> Direction4 -> Bool couldPotentiallyGoThroughPortal D8Up DUp = True couldPotentiallyGoThroughPortal D8UpLeft DUp = True couldPotentiallyGoThroughPortal D8UpRight DUp = True couldPotentiallyGoThroughPortal _ DUp = False couldPotentiallyGoThroughPortal D8Down DDown = True couldPotentiallyGoThroughPortal D8DownLeft DDown = True couldPotentiallyGoThroughPortal D8DownRight DDown = True couldPotentiallyGoThroughPortal _ DDown = False couldPotentiallyGoThroughPortal D8Left DLeft = True couldPotentiallyGoThroughPortal D8DownLeft DLeft = True couldPotentiallyGoThroughPortal D8UpLeft DLeft = True couldPotentiallyGoThroughPortal _ DLeft = False couldPotentiallyGoThroughPortal D8Right DRight = True couldPotentiallyGoThroughPortal D8DownRight DRight = True couldPotentiallyGoThroughPortal D8UpRight DRight = True couldPotentiallyGoThroughPortal _ DRight = False swapV2 :: V2 a -> V2 a swapV2 (V2 x y) = V2 y x {-# INLINE swapV2 #-} -- | Steps one unit to some direction in a level. Returns a `StepResult` that -- tells if the step moved through a portal or stayed on the same level. step :: Direction8 -> LevelCoordinates -> Level -> StepResult step dir coords@(V2 x y) level = case level^.portalKeys.at coords of Nothing -> SameLevel local_target Just set | IS.null set -> SameLevel local_target Just set -> case findInSet doesItGoThrough set of Nothing -> SameLevel local_target Just portal -> let initial_position_on_the_other_side = V2 (negate (portal^.axisPosition) + portal^.targetLevelAxisPosition) (negate (portal^.axisTopPosition) + portal^.targetLevelAxisTopPosition) final_position_on_the_other_side = initial_position_on_the_other_side + direction8ToDelta dir fixed_position_on_the_other_side = case portal^.axis of DLeft -> final_position_on_the_other_side + V2 x y DRight -> final_position_on_the_other_side + V2 x y DUp -> swapV2 final_position_on_the_other_side + V2 y x DDown -> swapV2 final_position_on_the_other_side + V2 y x in EnterLevel (WorldCoordinates fixed_position_on_the_other_side (portal^.targetLevel)) where local_target = direction8ToDelta dir + coords findInSet fun set = let lst = IS.toList set in case find (\portal_id -> case level^.portals.at portal_id of Nothing -> False Just ok -> fun ok) lst of Nothing -> Nothing Just pid -> level^.portals.at pid doesItGoThrough portal = couldPotentiallyGoThroughPortal dir (portal^.axis) data AugmentedCoords = AugmentedCoords !LevelCoordinates !(V2 Int) levelFieldOfView :: Monad m => Int -> Int -> LevelCoordinates -> Level -> LevelID -> (LevelID -> Maybe Level) -> (LevelCoordinates -> V2 Int -> Level -> LevelID -> m ()) -> m () levelFieldOfView x_extent y_extent coords level level_id get_level i_see = void $ flip execStateT (level, level_id) $ computeFieldOfView (\(AugmentedCoords coords offset_coords) -> do (lvl, lvl_id) <- get lift $ i_see coords offset_coords lvl lvl_id) (\(AugmentedCoords coords _) -> do (lvl, _) <- get return $ seeThrough (terrainFeature coords lvl)) ByDirection { _leftD = goThrough D8Left , _rightD = goThrough D8Right , _upD = goThrough D8Up , _downD = goThrough D8Down , _leftupD = goThrough D8UpLeft , _leftdownD = goThrough D8DownLeft , _uprightD = goThrough D8UpRight , _downrightD = goThrough D8DownRight } (AugmentedCoords coords coords) 2 x_extent y_extent where goThrough dir8 (AugmentedCoords coords offset_coords) = do result <- goThrough' dir8 coords case result of Nothing -> return Nothing Just ok -> let new_offset_coords = offset_coords + direction8ToDelta dir8 in return $ Just $ AugmentedCoords ok new_offset_coords goThrough' dir8 coords = do (lvl, _) <- get case step dir8 coords lvl of SameLevel new_coords -> return $ Just new_coords EnterLevel (WorldCoordinates new_coords new_level_id) -> case get_level new_level_id of Nothing -> return Nothing Just new_level -> do put (new_level, new_level_id) return $ Just new_coords {-# INLINE levelFieldOfView #-} -- | Removes all decorations from a level. removeDecorations :: Level -> Level removeDecorations lvl = lvl & decorations .~ mempty -- | Lens to an actor using some actor ID. actorById :: ActorID -> Lens' Level (Maybe Actor) actorById aid = actors.at aid actorByCoordinates :: LevelCoordinates -> Level -> Maybe ActorID actorByCoordinates coords level = level^.actorKeys.at coords levelSize :: Level -> V2 Int levelSize lvl = V2 (viewWidth (lvl^.terrain)) (viewHeight (lvl^.terrain)) {-# INLINE levelSize #-} -- | Returns the diagonal distance between two coordinates. diagonalDistance :: V2 Int -> V2 Int -> Int diagonalDistance (V2 x1 y1) (V2 x2 y2) = max (abs $ x1-x2) (abs $ y1-y2) {-# INLINE diagonalDistance #-} -- | Inserts an actor somewhere on the level. -- -- Actor already at the target position is overwritten, if there was anything -- there. insertActor :: ActorID -> Actor -> Level -> Level insertActor aid actor = (actors.at aid .~ Just actor) . (actorKeys.at (actor^.position) .~ Just aid) -- | Removes an actor from the level. -- -- Does nothing if the actor is not in the level. removeActor :: ActorID -> Level -> Level removeActor aid level = case level^.actors.at aid of Nothing -> level Just actor -> level & (actors.at aid .~ Nothing) . (actorKeys.at (actor^.position) .~ Nothing) -- | A fold all actors in a level. eachActor :: IndexedFold ActorID Level Actor eachActor = actors.ifolded itemByCoordinates :: LevelCoordinates -> Lens' Level (Maybe Item) itemByCoordinates x = items.at x
Noeda/rwpas
src/RWPAS/Level/Type.hs
mit
18,371
0
26
5,273
4,554
2,369
2,185
-1
-1
{-# LANGUAGE FlexibleContexts #-} module Ch27.SyslogUDPClient where import Data.Bits import Network.Socket hiding (sendTo) import Network.Socket.ByteString (sendTo) import Ch27.SyslogTypes import qualified Data.ByteString.Char8 as Strict import qualified Data.ByteString.Lazy.Char8 as Lazy (toStrict) import Data.ByteString.Builder (Builder, toLazyByteString, stringUtf8, charUtf8, byteString) import Data.Monoid ((<>)) import Control.Monad.Except (MonadError, throwError, catchError, runExceptT) import Control.Monad.IO.Class (MonadIO, liftIO) import Control.Exception -- (IOException) import Data.Typeable (Typeable) -- import Control.Monad.Catch -- import Control.Monad (join) data SyslogHandle = SyslogHandle { slSocket :: Socket, slProgram :: Strict.ByteString, slAddress :: SockAddr } -- port number or name type Port = Strict.ByteString type ProgName = Strict.ByteString openlog :: MonadIO m => HostName -> Port -> ProgName -> m SyslogHandle openlog hostname port progname = do addrinfos <- liftIO $ getAddrInfo Nothing (Just hostname) (Just (Strict.unpack port)) let serveraddr = head addrinfos sock <- liftIO $ socket (addrFamily serveraddr) Datagram defaultProtocol return $ SyslogHandle sock progname (addrAddress serveraddr) data SyslogException = InvalidCode | Generic String deriving (Show, Typeable) instance Exception SyslogException -- Goal: catch both SyslogExceptions and IOExceptions {- 1st try: use the MonadError type class. Issue: the context can never be satisfied in the IO monad! That's because there is a fundep in the MonadError class declaration: class MonadError e m | m -> e and the instance for IO is: instance MonadError IOException IO so the following won't compile: foo :: SyslogHandle -> IO () foo h = syslog h fac prio "foo" -} syslog :: (MonadError SomeException m, MonadIO m) => SyslogHandle -> Facility -> Priority -> Strict.ByteString -> m () syslog syslogh fac pri msg = case toSyslogCode fac pri of Nothing -> throwError (toException InvalidCode) Just code -> do eres <- runExceptT $ sendstr $ Lazy.toStrict (toLazyByteString sendmsgBuilder) either (throwError . toException) return eres where sendmsgBuilder :: Builder sendmsgBuilder = charUtf8 '<' <> stringUtf8 (show code) <> charUtf8 '>' <> byteString (slProgram syslogh) <> stringUtf8 ": " <> byteString msg sendstr :: (MonadError IOException m, MonadIO m) => Strict.ByteString -> m () sendstr omsg | Strict.null omsg = return () | otherwise = do liftIO $ catchError (() <$ sendTo (slSocket syslogh) omsg (slAddress syslogh)) throwError {- 2hd try: do without the MonadError type class and manually handle failure Issue: it's re-doing what MonadError already does under the hood -} syslog' :: (MonadIO m) => Facility -> Priority -> Strict.ByteString -> SyslogHandle -> m (Either SomeException ()) syslog' fac pri msg h = case toSyslogCode fac pri of Nothing -> return . Left . toException $ InvalidCode Just code -> do liftIO $ catch (Right <$> sendstr (Lazy.toStrict (toLazyByteString sendmsgBuilder))) (return . Left . toException :: MonadIO m => IOException -> m (Either SomeException ())) where sendmsgBuilder :: Builder sendmsgBuilder = charUtf8 '<' <> stringUtf8 (show code) <> charUtf8 '>' <> byteString (slProgram h) <> stringUtf8 ": " <> byteString msg sendstr :: (MonadIO m) => Strict.ByteString -> m () sendstr omsg | Strict.null omsg = return () | otherwise = do sent <- liftIO $ sendTo (slSocket h) omsg (slAddress h) sendstr (Strict.drop sent omsg) -- liftIO $ throwIO (userError "Boom") -- caught -- liftIO $ throwIO InvalidCode -- uncaught and the type system doesn't help closelog :: MonadIO m => SyslogHandle -> m () closelog syslogh = liftIO $ close (slSocket syslogh) toSyslogCode :: Facility -> Priority -> Maybe Int toSyslogCode fac prio = mkCode <$> codeOfFac fac <*> Just (fromEnum prio) where mkCode :: Int -> Int -> Int mkCode facCode prioCode = (facCode `shiftL` 3) .|. prioCode oneshotlog :: HostName -> Port -> ProgName -> Facility -> Priority -> Strict.ByteString -> IO (Either SomeException ()) oneshotlog hn p pn fac prio msg = bracket (openlog hn p pn) closelog (syslog' fac prio msg) -- this doesn't compile: how can I solve it? Maybe ExceptT isn't the right tool for the job -- bracket (openlog hn p pn) closelog (\h -> syslog h fac prio msg)
futtetennista/IntroductionToFunctionalProgramming
RWH/src/Ch27/SyslogUDPClient.hs
mit
4,975
0
19
1,337
1,220
626
594
103
2
{-# LANGUAGE DataKinds #-} {-# LANGUAGE PolyKinds #-} {-# LANGUAGE TypeFamilies #-} module Apollo.Reflection ( Demote' , Demote , ReflectS(..) , Proxiable(..) , Proxy(..) , KProxy(..) ) where import Data.Proxy -- | Type level function that selects a canonical type constructor for a given -- kind. Generally, the selected type constructor consist of singletons, taking -- each type in the kind @k@ to a distinct type. type family Demote' (p :: KProxy k) :: k -> * -- | The proxy argument can be inferred if a concrete type of kind @k@ is -- available. type Demote (a :: k) = Demote' ('KProxy :: KProxy k) -- | Types in some kind @k@ that can be reflected into values. class ReflectS (a :: k) where reflectS :: Proxy a -> Demote a a -- | Class of type constructors that can be demoted to proxies. -- -- Useful for converting various things into proxies without needing to write -- explicit type synonyms. class Proxiable (s :: k -> *) where proxy :: s a -> Proxy a instance Proxiable [] where proxy _ = Proxy instance Proxiable Maybe where proxy _ = Proxy
tsani/apollo
src/Apollo/Reflection.hs
mit
1,071
1
8
209
210
124
86
21
0
module Util.GL where import qualified Graphics.Rendering.OpenGL as GL float2gl :: Float -> GL.GLfloat float2gl = realToFrac :: Float -> GL.GLfloat
kaisellgren/ankka
src/Util/GL.hs
mit
149
0
6
22
40
25
15
4
1
{-# LANGUAGE DeriveDataTypeable #-} {- Copyright (C) 2012-2017 Jimmy Liang, Michal Antkiewicz <http://gsd.uwaterloo.ca> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -} module Main where import Language.Clafer.IG.ClaferIG import Language.Clafer.IG.ClaferModel import Language.Clafer.IG.CommandLine import Language.Clafer.IG.Solution import Language.Clafer.IG.Sugarer import Language.Clafer.ClaferArgs import Language.Clafer.JSONMetaData import Language.ClaferT import Language.Clafer.Common import Control.Monad import Control.Monad.IO.Class import Data.Either import qualified Data.Map as Map import Data.List (partition) import Data.Maybe import Data.IORef import Prelude hiding (all) import System.Console.CmdArgs import System.Directory import System.FilePath claferIGArgsDef :: IGArgs claferIGArgsDef = IGArgs { all = def &= opt "1" &= help "Saves all instances or a counterexample. Reads scopes from a `.cfr.scope` file or uses the provided global scope." &= typ "INTEGER", saveDir = def &= help "Specify the directory for storing saved files." &= typ "FILE", bitwidth = 4 &= help "Set the bitwidth for integers." &= typ "INTEGER", -- Default bitwidth is 4. maxInt = 7 &= help "Set the bitwidth for integers based on the largest required number. Overrides --bitwidth argument." &= typ "INTEGER", alloySolution = def &= help "Convert Alloy solution to a Clafer solution." &= typ "FILE", claferModelFile = def &= argPos 0 &= typ "FILE", useUids = False &= help "Use unique clafer names in the Clafer solution.", addTypes = False &= help "Add colon/reference types to the Clafer solution.", json = False &= help "Render solution as JSON (forces 'addUids').", flatten_inheritance_comp = def &= help "Flatten inheritance during compiling ('alloy' mode only)" &= name "i", no_layout_comp = def &= help "Don't resolve off-side rule layout during compiling" &= name "l", check_duplicates_comp = def &= help "Check duplicated clafer names during compiling" &= name "c", skip_resolver_comp = def &= help "Skip name resolution during compiling" &= name "f", scope_strategy_comp = Simple &= help "Use scope computation strategy during compiling: none, simple (default), or full." &= name "ss" } &= summary claferIGVersion &= program "claferig" main :: IO () main = do args' <- cmdArgs claferIGArgsDef let bw = bitwidth args' mi = maxInt args' -- maxInt overrides the bitwidth setting args'' = if (mi > allowedMaxInt bw) then args' {bitwidth = requiredBitwidth mi} else args' if (not $ null $ alloySolution args'') then do _ <- runAlloySolution args'' return () else if (json args'') then tryClaferIG (args'' { useUids = True }) else tryClaferIG args'' where tryClaferIG args3 = do try <- runClaferIG args3 case try of Right r -> return r Left l -> do mapM_ putStrLn $ printError l putStrLn "Press enter to retry." void getLine tryClaferIG args3 runClaferIG :: IGArgs -> IO (Either ClaferErrs ()) runClaferIG args' = runClaferIGT args' $ do let claferModelFileName = claferModelFile args' cModel <- liftIO $ strictReadFile claferModelFileName if null cModel then error "Cannot instantiate an empty model." else liftIO $ putStrLn "Compiling the Clafer model..." oldBw <- getBitwidth env <- getClaferEnv let ir = fst3 $ fromJust $ cIr env scopes <- getScopes setBitwidth $ findNecessaryBitwidth ir oldBw $ map snd scopes solve case all args' of Just scope -> do -- copied from CommandLine LoadScopes command qNameMaps' <- getQNameMaps maybeUidScopes <- liftIO $ readCfrScopeFile qNameMaps' claferModelFileName case maybeUidScopes of Nothing -> do liftIO $ putStrLn "Using the provided global scope as a `.cfr-scope` file does not exist. Use the command `saveScopes` to create one." setGlobalScope scope Just uidScopes -> do let (globalScopes, normalScopes) = partition (\(uid, _) -> null uid) uidScopes -- from the globalScopes, take the maximum globalScopeVals = map snd globalScopes globalScope = maximum globalScopeVals -- add the "this/" prefix normalScopesAlloy = map (\(uid, scope2) -> ("this/"++uid, scope2)) normalScopes setGlobalScope globalScope mapM_ (\(uid, val) -> setAlloyScope val uid) normalScopesAlloy -- end copied solve counterRef <- liftIO $ newIORef 1 let saveDirectory = fromMaybe return $ underDirectory `liftM` saveDir args' saveAll (savePath claferModelFileName counterRef >>= saveDirectory) quit Nothing -> do liftIO $ putStrLn "Type 'h' for the list of available REPL commands\n" runCommandLine -- | Convert an Alloy XML file into an instance in Clafer runAlloySolution :: IGArgs -> IO (Either ClaferErrs ()) runAlloySolution args' = runClaferIGT args' $ do let claferModelFileName = claferModelFile args' cModel <- liftIO $ strictReadFile claferModelFileName when (cModel == "") $ error $ "Cannot convert Alloy solution without the Clafer model from which the instance was created.\n" ++ "Usage: claferIG [OPTIONS] <model.cfr> --alloy-solution=<instance.xml>\n" alloyInstance <- liftIO $ strictReadFile $ alloySolution args' -- It's an Alloy XML file in this case when (null alloyInstance) $ error $ "Provide an Alloy solution Alloy file name.\n" ++ "Usage: claferIG [OPTIONS] <model.cfr> --alloy-solution=<instance.xml>\n" env <- getClaferEnv let (_, genv', _) = fromJust $ cIr env let sMap = Map.empty uidIClaferMap' = uidClaferMap genv' liftIO $ putStrLn $ show $ (sugarClaferModel (useUids args') (addTypes args') uidIClaferMap' $ buildClaferModel $ parseSolution alloyInstance) $ sMap savePath :: FilePath -> IORef Int -> IO FilePath savePath file' counterRef = do counter <- readIORef counterRef writeIORef counterRef (counter + 1) return $ file' ++ "." ++ (show counter) ++ ".data" underDirectory :: FilePath -> FilePath -> IO FilePath underDirectory dir file' = do createDirectoryIfMissing True dir return $ joinPath [dir, file'] saveAll :: IO FilePath -> ClaferIGT IO () saveAll nextFile = do file' <- liftIO nextFile liftIO $ createDirectoryIfMissing True $ takeDirectory file' solution <- next case solution of Instance{modelInstance = modelInstance'} -> do liftIO $ writeFile file' (show modelInstance') saveAll nextFile _ -> return ()
gsdlab/claferIG
src-cmd/Main.hs
mit
8,717
0
25
2,704
1,584
781
803
142
5
module Y2016.M07.D21.Solution where import Data.Graph import Data.Tree (subForest, rootLabel) import Data.Maybe (maybeToList, mapMaybe) import Y2016.M07.D20.Solution import Y2016.M07.D19.Exercise (figure2) {-- you can get a Graph-Figure of figure from: *Y2016.M07.D21.Solution> let (gr,fnPt,fnVertM) = graphit figure2 lineSegments --} pathing :: FigureC -> Char -> Char -> String pathing fig@(gr,_,vf) start end = -- no path is "", so: let dest = head (maybeToList (vf end)) branches = subForest (head (dfs gr (maybeToList (vf start)))) nextnodes = map rootLabel branches in start : p' fig nextnodes dest p' :: FigureC -> [Vertex] -> Vertex -> String p' fig@(gr,toNodef,toVertf) roots dest = let branch = head (filter (flip (path gr) dest) roots) (_,label,branches) = toNodef branch in label : if branch == dest then "" else p' fig (mapMaybe toVertf branches) dest {-- What is _a_ pathing from 'a' to 'c'? *Y2016.M07.D21.Solution> let fig = graphit figure2 lineSegments *Y2016.M07.D21.Solution> pathing fig 'a' 'c' ~> "abgc" -- What is _a_ path from 'b' to 't'? *Y2016.M07.D21.Solution> pathing fig 'b' 't' ~> "bgcdt" Not the shortest path, but it is a path. However: *Y2016.M07.D21.Solution> pathing fig 'b' 'h' ~> "b*** Exception: Prelude.head: empty list Shows us that the above does not treat all edges as bidirectional, as it says the network does not support a b -> h path, when the figure shows there is such a path. There are several ways to address this issue. We'll tackle this tomorrow. --}
geophf/1HaskellADay
exercises/HAD/Y2016/M07/D21/Solution.hs
mit
1,574
0
17
306
312
171
141
18
2
import Data.List.Split main = getContents >>= putStr . main' main' :: String -> String main' cs = unlines $ map getShell (lines cs) getShell :: String -> String getShell ln = last $ splitOn ":" ln
ryuichiueda/UspMagazineHaskell
Study1_Q2/q1_2_1.hs
mit
200
0
8
39
78
40
38
6
1
{- | == Getting Started To get started with golden testing and this library, see <https://ro-che.info/articles/2017-12-04-golden-tests Introduction to golden testing>. This module provides a simplified interface. If you want more, see "Test.Tasty.Golden.Advanced". == Filenames Filenames are looked up in the usual way, Thus relative names are relative to the processes current working directory. It is common to run tests from the package's root directory (via @cabal test@ or @cabal install --enable-tests@), so if your test files are under the @tests\/@ subdirectory, your relative file names should start with @tests\/@ (even if your @test.hs@ is itself under @tests\/@, too). == Line endings The best way to avoid headaches with line endings (when running tests both on UNIX and Windows) is to treat your golden files as binary, even when they are actually textual. This means: * When writing output files from Haskell code, open them in binary mode (see 'openBinaryFile', 'withBinaryFile' and 'hSetBinaryMode'). This will disable automatic @\\n -> \\r\\n@ conversion on Windows. For convenience, this module exports 'writeBinaryFile' which is just like `writeFile` but opens the file in binary mode. When using 'ByteString's note that "Data.ByteString" and "Data.ByteString.Lazy" use binary mode for @writeFile@, while "Data.ByteString.Char8" and "Data.ByteString.Lazy.Char8" use text mode. * Tell your VCS not to do any newline conversion for golden files. For git check in a @.gitattributes@ file with the following contents (assuming your golden files have @.golden@ extension): >*.golden -text On its side, tasty-golden reads and writes files in binary mode, too. Why not let Haskell/git do automatic conversion on Windows? Well, for instance, @tar@ will not do the conversion for you when unpacking a release tarball, so when you run @cabal install your-package --enable-tests@, the tests will be broken. As a last resort, you can strip all @\\r@s from both arguments in your comparison function when necessary. But most of the time treating the files as binary does the job. == Linking The test suite should be compiled with @-threaded@ if you want to avoid blocking any other threads while 'goldenVsFileDiff' and similar functions wait for the result of the diff command. == Windows limitations When using 'goldenVsFileDiff' or 'goldenVsStringDiff' under Windows the exit code from the diff program that you specify will not be captured correctly if that program uses @exec@. More specifically, you will get the exit code of the /original child/ (which always exits with code 0, since it called @exec@), not the exit code of the process which carried on with execution after @exec@. This is different from the behavior prescribed by POSIX but is the best approximation that can be realised under the restrictions of the Windows process model. See 'System.Process' for further details or <https://github.com/haskell/process/pull/168> for even more. -} {-# LANGUAGE CPP #-} {-# LANGUAGE OverloadedStrings #-} module Test.Tasty.Golden ( -- * Functions to create a golden test goldenVsFile , goldenVsString , goldenVsFileDiff , goldenVsStringDiff -- * Options , SizeCutoff(..) , DeleteOutputFile(..) -- * Various utilities , writeBinaryFile , findByExtension , createDirectoriesAndWriteFile ) where import Test.Tasty import Test.Tasty.Golden.Advanced import Test.Tasty.Golden.Internal import Text.Printf import qualified Data.ByteString.Lazy as LBS import qualified Data.Text.Lazy as LT import qualified Data.Text.Lazy.Encoding as LT import System.IO import System.IO.Temp import qualified System.Process.Typed as PT import System.Exit import System.FilePath import System.Directory import System.PosixCompat.Files import Control.Exception import Control.Monad import qualified Data.Set as Set #if !MIN_VERSION_base(4,11,0) import Data.Monoid #endif -- | Compare the output file's contents against the golden file's contents -- after the given action has created the output file. goldenVsFile :: TestName -- ^ test name -> FilePath -- ^ path to the «golden» file (the file that contains correct output) -> FilePath -- ^ path to the output file -> IO () -- ^ action that creates the output file -> TestTree -- ^ the test verifies that the output file contents is the same as the golden file contents goldenVsFile name ref new act = goldenTest2 name (readFileStrict ref) (act >> readFileStrict new) cmp upd del where cmp = simpleCmp $ printf "Files '%s' and '%s' differ" ref new upd = createDirectoriesAndWriteFile ref del = removeFile new -- | Compare a given string against the golden file's contents. goldenVsString :: TestName -- ^ test name -> FilePath -- ^ path to the «golden» file (the file that contains correct output) -> IO LBS.ByteString -- ^ action that returns a string -> TestTree -- ^ the test verifies that the returned string is the same as the golden file contents goldenVsString name ref act = askOption $ \sizeCutoff -> goldenTest name (readFileStrict ref) act (cmp sizeCutoff) upd where cmp sizeCutoff x y = simpleCmp msg x y where msg = printf "Test output was different from '%s'. It was:\n" ref <> unpackUtf8 (truncateLargeOutput sizeCutoff y) upd = createDirectoriesAndWriteFile ref simpleCmp :: Eq a => String -> a -> a -> IO (Maybe String) simpleCmp e x y = return $ if x == y then Nothing else Just e -- | Same as 'goldenVsFile', but invokes an external diff command. -- -- See the notes at the top of this module regarding linking with -- @-threaded@ and Windows-specific issues. goldenVsFileDiff :: TestName -- ^ test name -> (FilePath -> FilePath -> [String]) -- ^ function that constructs the command line to invoke the diff -- command. -- -- E.g. -- -- >\ref new -> ["diff", "-u", ref, new] -> FilePath -- ^ path to the golden file -> FilePath -- ^ path to the output file -> IO () -- ^ action that produces the output file -> TestTree goldenVsFileDiff name cmdf ref new act = askOption $ \sizeCutoff -> goldenTest2 name (getFileStatus ref >> return ()) -- Use getFileStatus to check if the golden file exists. If the file -- doesn't exist, getFileStatus will throw an isDoesNotExistError that -- runGolden will handle by creating the golden file before proceeding. -- See #32. act (\_ _ -> runDiff (cmdf ref new) sizeCutoff) upd del where upd _ = readFileStrict new >>= createDirectoriesAndWriteFile ref del = removeFile new -- | Same as 'goldenVsString', but invokes an external diff command. -- -- See the notes at the top of this module regarding linking with -- @-threaded@ and Windows-specific issues. goldenVsStringDiff :: TestName -- ^ test name -> (FilePath -> FilePath -> [String]) -- ^ function that constructs the command line to invoke the diff -- command. -- -- E.g. -- -- >\ref new -> ["diff", "-u", ref, new] -> FilePath -- ^ path to the golden file -> IO LBS.ByteString -- ^ action that returns a string -> TestTree goldenVsStringDiff name cmdf ref act = askOption $ \sizeCutoff -> goldenTest name (readFileStrict ref) (act) (cmp sizeCutoff) upd where template = takeBaseName ref <.> "actual" cmp sizeCutoff _ actBS = withSystemTempFile template $ \tmpFile tmpHandle -> do -- Write act output to temporary ("new") file LBS.hPut tmpHandle actBS >> hFlush tmpHandle let cmd = cmdf ref tmpFile diff_result :: Maybe String <- runDiff cmd sizeCutoff return $ flip fmap diff_result $ \diff -> printf "Test output was different from '%s'. Output of %s:\n" ref (show cmd) <> diff upd = createDirectoriesAndWriteFile ref truncateLargeOutput :: SizeCutoff -> LBS.ByteString -> LBS.ByteString truncateLargeOutput (SizeCutoff n) str = if LBS.length str <= n then str else LBS.take n str <> "<truncated>" <> "\nUse --accept or increase --size-cutoff to see full output." -- | Like 'writeFile', but uses binary mode. (Needed only when you work -- with 'String'.) writeBinaryFile :: FilePath -> String -> IO () writeBinaryFile f txt = withBinaryFile f WriteMode (\hdl -> hPutStr hdl txt) -- | Find all files in the given directory and its subdirectories that have -- the given extensions. -- -- It is typically used to find all test files and produce a golden test -- per test file. -- -- The returned paths use forward slashes to separate path components, -- even on Windows. Thus if the file name ends up in a golden file, it -- will not differ when run on another platform. -- -- The semantics of extensions is the same as in 'takeExtension'. In -- particular, non-empty extensions should have the form @".ext"@. -- -- This function may throw any exception that 'getDirectoryContents' may -- throw. -- -- It doesn't do anything special to handle symlinks (in particular, it -- probably won't work on symlink loops). -- -- Nor is it optimized to work with huge directory trees (you'd probably -- want to use some form of coroutines for that). findByExtension :: [FilePath] -- ^ extensions -> FilePath -- ^ directory -> IO [FilePath] -- ^ paths findByExtension extsList = go where exts = Set.fromList extsList go dir = do allEntries <- getDirectoryContents dir let entries = filter (not . (`elem` [".", ".."])) allEntries liftM concat $ forM entries $ \e -> do let path = dir ++ "/" ++ e isDir <- doesDirectoryExist path if isDir then go path else return $ if takeExtension path `Set.member` exts then [path] else [] -- | Like 'LBS.writeFile', but also create parent directories if they are -- missing. createDirectoriesAndWriteFile :: FilePath -> LBS.ByteString -> IO () createDirectoriesAndWriteFile path bs = do let dir = takeDirectory path createDirectoryIfMissing True -- create parents too dir LBS.writeFile path bs -- | Force the evaluation of a lazily-produced bytestring. -- -- This is important to close the file handles. -- -- See <https://ro-che.info/articles/2015-05-28-force-list>. forceLbs :: LBS.ByteString -> () forceLbs = LBS.foldr seq () readFileStrict :: FilePath -> IO LBS.ByteString readFileStrict path = do s <- LBS.readFile path evaluate $ forceLbs s return s unpackUtf8 :: LBS.ByteString -> String unpackUtf8 = LT.unpack . LT.decodeUtf8 runDiff :: [String] -- ^ the diff command -> SizeCutoff -> IO (Maybe String) runDiff cmd sizeCutoff = case cmd of [] -> throwIO $ ErrorCall "tasty-golden: empty diff command" prog : args -> do let procConf = PT.setStdin PT.closed . PT.setStderr PT.inherit $ PT.proc prog args (exitCode, out) <- PT.readProcessStdout procConf return $ case exitCode of ExitSuccess -> Nothing _ -> Just . unpackUtf8 . truncateLargeOutput sizeCutoff $ out
feuerbach/tasty-golden
Test/Tasty/Golden.hs
mit
11,066
0
17
2,273
1,525
826
699
-1
-1
{-# htermination sin :: Float -> Float #-}
ComputationWithBoundedResources/ara-inference
doc/tpdb_trs/Haskell/full_haskell/Prelude_sin_1.hs
mit
43
0
2
8
3
2
1
1
0
-- Benchmark.hs -- A set of (micro-) benchmarks for the Haskell -- programming language -- -- vim: ft=haskell sw=2 ts=2 et -- {-# LANGUAGE OverloadedStrings #-} module Main where import System.CPUTime import Fibonacci as Fib import PerfectNumber as Pn import qualified Mandelbrot as M -- a helper fun for timing measurement --timeIt :: (Fractional c) => (a -> b) -> a -> IO (b, c) --timeIt action arg = -- do startTime <- getCPUTime -- res <- action arg -- finishTime <- getCPUTime -- return $ (res, fromIntegral (finishTime - startTime) / 1000000000000) toMSec :: (Fractional a) => Integer -> Integer -> a toMSec tic toc = fromIntegral (toc -tic) / 1000000000 toMSecStr :: Integer -> Integer -> String toMSecStr tic toc = show $ fromIntegral (toc -tic) / 1000000000 putElapsedSince :: Integer -> IO () putElapsedSince tic = do toc <- getCPUTime putStrLn $ "Elapsed: " ++ show (toMSec tic toc) ++ "msec." --timeIt' :: (Fractional c) => (a -> b) -> a -> IO (b, c) --timeIt' f arg = -- do tic <- getCPUTime -- res <- f arg -- toc <- getCPUTime -- return (res, fromIntegral $ (toc - tic) / 1000000000000) -- main entry point main :: IO () main = do putStrLn "Haskell Benchmarks" putStrLn "==================" putStrLn "" putStrLn "Fibonacci numbers:" putStrLn "------------------" tic0 <- getCPUTime putStrLn $ "fibNaive(35) = " ++ show (fibNaive 35) putElapsedSince tic0 tic1 <- getCPUTime putStrLn $ "fib(35) = " ++ show (fib 35) putElapsedSince tic1 tic2 <- getCPUTime putStrLn $ "fib(1000) = " ++ show (fib 1000) putElapsedSince tic2 putStrLn "" putStrLn "Perfect numbers:" putStrLn "----------------" tic3 <- getCPUTime putStrLn $ "perfectNumbers(10000) = " ++ show (perfectNumbers 10000) putElapsedSince tic3 putStrLn "" putStrLn "Perfect numbers:" putStrLn "----------------" tic4 <- getCPUTime putStrLn $ "mandelbrot(640x480): " ++ show (length (M.mandelbrot 640 480 (-0.5) 0.0 (4/640))) ++ " pixel calculated" putElapsedSince tic4 putStrLn "" putStrLn "Done!" putStrLn "Press <ENTER> to continue.." _ <- getLine return ()
kkirstein/proglang-playground
Haskell/src/Benchmark/Benchmark.hs
mit
2,251
0
15
548
499
240
259
51
1
{-# LANGUAGE NoImplicitPrelude, TypeSynonymInstances, FlexibleInstances #-} module IHaskell.Display.Diagrams (diagram) where import ClassyPrelude import System.Directory import qualified Data.ByteString.Char8 as Char import System.IO.Unsafe import Diagrams.Prelude import Diagrams.Backend.Cairo import IHaskell.Display instance IHaskellDisplay (Diagram Cairo R2) where display renderable = do png <- diagramData renderable PNG svg <- diagramData renderable SVG return $ Display [png, svg] diagramData :: Diagram Cairo R2 -> OutputType -> IO DisplayData diagramData renderable format = do switchToTmpDir -- Compute width and height. let w = width renderable h = height renderable aspect = w / h imgHeight = 300 imgWidth = aspect * imgHeight -- Write the image. let filename = ".ihaskell-diagram." ++ extension format renderCairo filename (Height imgHeight) renderable -- Convert to base64. imgData <- readFile $ fpFromString filename let value = case format of PNG -> png (floor imgWidth) (floor imgHeight) $ base64 imgData SVG -> svg $ Char.unpack imgData return value where extension SVG = "svg" extension PNG = "png" -- Rendering hint. diagram :: Diagram Cairo R2 -> Diagram Cairo R2 diagram = id
aostiles/LiveHaskell
ihaskell-display/ihaskell-diagrams/IHaskell/Display/Diagrams.hs
mit
1,295
0
16
265
349
177
172
33
3
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-} module GHCJS.DOM.JSFFI.Generated.SVGTransformList (js_clear, clear, js_initialize, initialize, js_getItem, getItem, js_insertItemBefore, insertItemBefore, js_replaceItem, replaceItem, js_removeItem, removeItem, js_appendItem, appendItem, js_createSVGTransformFromMatrix, createSVGTransformFromMatrix, js_consolidate, consolidate, js_getNumberOfItems, getNumberOfItems, SVGTransformList, castToSVGTransformList, gTypeSVGTransformList) where import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord) import Data.Typeable (Typeable) import GHCJS.Types (JSVal(..), JSString) import GHCJS.Foreign (jsNull) import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..)) import GHCJS.Marshal (ToJSVal(..), FromJSVal(..)) import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..)) import Control.Monad.IO.Class (MonadIO(..)) import Data.Int (Int64) import Data.Word (Word, Word64) import GHCJS.DOM.Types import Control.Applicative ((<$>)) import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName) import GHCJS.DOM.JSFFI.Generated.Enums foreign import javascript unsafe "$1[\"clear\"]()" js_clear :: SVGTransformList -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.clear Mozilla SVGTransformList.clear documentation> clear :: (MonadIO m) => SVGTransformList -> m () clear self = liftIO (js_clear (self)) foreign import javascript unsafe "$1[\"initialize\"]($2)" js_initialize :: SVGTransformList -> Nullable SVGTransform -> IO (Nullable SVGTransform) -- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.initialize Mozilla SVGTransformList.initialize documentation> initialize :: (MonadIO m) => SVGTransformList -> Maybe SVGTransform -> m (Maybe SVGTransform) initialize self item = liftIO (nullableToMaybe <$> (js_initialize (self) (maybeToNullable item))) foreign import javascript unsafe "$1[\"getItem\"]($2)" js_getItem :: SVGTransformList -> Word -> IO (Nullable SVGTransform) -- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.getItem Mozilla SVGTransformList.getItem documentation> getItem :: (MonadIO m) => SVGTransformList -> Word -> m (Maybe SVGTransform) getItem self index = liftIO (nullableToMaybe <$> (js_getItem (self) index)) foreign import javascript unsafe "$1[\"insertItemBefore\"]($2, $3)" js_insertItemBefore :: SVGTransformList -> Nullable SVGTransform -> Word -> IO (Nullable SVGTransform) -- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.insertItemBefore Mozilla SVGTransformList.insertItemBefore documentation> insertItemBefore :: (MonadIO m) => SVGTransformList -> Maybe SVGTransform -> Word -> m (Maybe SVGTransform) insertItemBefore self item index = liftIO (nullableToMaybe <$> (js_insertItemBefore (self) (maybeToNullable item) index)) foreign import javascript unsafe "$1[\"replaceItem\"]($2, $3)" js_replaceItem :: SVGTransformList -> Nullable SVGTransform -> Word -> IO (Nullable SVGTransform) -- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.replaceItem Mozilla SVGTransformList.replaceItem documentation> replaceItem :: (MonadIO m) => SVGTransformList -> Maybe SVGTransform -> Word -> m (Maybe SVGTransform) replaceItem self item index = liftIO (nullableToMaybe <$> (js_replaceItem (self) (maybeToNullable item) index)) foreign import javascript unsafe "$1[\"removeItem\"]($2)" js_removeItem :: SVGTransformList -> Word -> IO (Nullable SVGTransform) -- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.removeItem Mozilla SVGTransformList.removeItem documentation> removeItem :: (MonadIO m) => SVGTransformList -> Word -> m (Maybe SVGTransform) removeItem self index = liftIO (nullableToMaybe <$> (js_removeItem (self) index)) foreign import javascript unsafe "$1[\"appendItem\"]($2)" js_appendItem :: SVGTransformList -> Nullable SVGTransform -> IO (Nullable SVGTransform) -- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.appendItem Mozilla SVGTransformList.appendItem documentation> appendItem :: (MonadIO m) => SVGTransformList -> Maybe SVGTransform -> m (Maybe SVGTransform) appendItem self item = liftIO (nullableToMaybe <$> (js_appendItem (self) (maybeToNullable item))) foreign import javascript unsafe "$1[\"createSVGTransformFromMatrix\"]($2)" js_createSVGTransformFromMatrix :: SVGTransformList -> Nullable SVGMatrix -> IO (Nullable SVGTransform) -- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.createSVGTransformFromMatrix Mozilla SVGTransformList.createSVGTransformFromMatrix documentation> createSVGTransformFromMatrix :: (MonadIO m) => SVGTransformList -> Maybe SVGMatrix -> m (Maybe SVGTransform) createSVGTransformFromMatrix self matrix = liftIO (nullableToMaybe <$> (js_createSVGTransformFromMatrix (self) (maybeToNullable matrix))) foreign import javascript unsafe "$1[\"consolidate\"]()" js_consolidate :: SVGTransformList -> IO (Nullable SVGTransform) -- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.consolidate Mozilla SVGTransformList.consolidate documentation> consolidate :: (MonadIO m) => SVGTransformList -> m (Maybe SVGTransform) consolidate self = liftIO (nullableToMaybe <$> (js_consolidate (self))) foreign import javascript unsafe "$1[\"numberOfItems\"]" js_getNumberOfItems :: SVGTransformList -> IO Word -- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.numberOfItems Mozilla SVGTransformList.numberOfItems documentation> getNumberOfItems :: (MonadIO m) => SVGTransformList -> m Word getNumberOfItems self = liftIO (js_getNumberOfItems (self))
manyoo/ghcjs-dom
ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/SVGTransformList.hs
mit
6,358
78
11
1,113
1,296
720
576
104
1
-- This is a module for constructing bounding volume hierarchies using an octree approach {-# LANGUAGE BangPatterns #-} module Octree(generateSceneGraphUsingOctree, splitBoxIntoOctreeChildren, octreeChildBox, Octree(OctreeNode, OctreeLeaf, OctreeDummy), create, Octree.insert, gather) where import Vector import {-# SOURCE #-} Primitive import BoundingBox import Misc data Octree a = OctreeDummy !AABB | OctreeNode !AABB [Octree a] | OctreeLeaf !AABB !(Vector, a) deriving (Eq) instance Show a => Show (Octree a) where show = display 0 tabs :: String tabs = '\t' : tabs display :: (Show a) => Int -> Octree a -> String display level (OctreeDummy box) = take level tabs ++ "[Dummy] box=" ++ show box ++ "\n" display level (OctreeNode box children) = take level tabs ++ "[Node] box=" ++ show box ++ "\n" ++ concatMap (display (level + 1)) children ++ "\n" display level (OctreeLeaf box (pos, value)) = take level tabs ++ "[Leaf] box=" ++ show box ++ " pos=" ++ show pos ++ " value=" ++ show value ++ "\n" create :: AABB -> Octree a create box = OctreeNode box $ map OctreeDummy (splitBoxIntoOctreeChildren box) -- Insert into an octree insert :: Vector -> a -> Octree a -> Octree a insert pos a oct = fst $ insert' pos oct (Just a) insert' :: Vector -> Octree a -> Maybe a -> (Octree a, Maybe a) insert' pos oct@(OctreeDummy box) state = case state of -- If we have been passed some state then attempt to consume it Just value -> if box `contains` pos then (OctreeLeaf box (pos, value), Nothing) else (oct, state) _ -> (oct, state) insert' pos oct@(OctreeNode box nodeChildren) state = if box `contains` pos then let (nodeChildren', state') = mapS (insert' pos) nodeChildren state in (OctreeNode box nodeChildren', state') else (oct, state) insert' pos oct@(OctreeLeaf box (pos', a')) state = if box `contains` pos then -- First up, we turn this leaf into a node with 8 children -- Discard result of mapS - we assume it returns Nothing -- Then, re-insert the original value into our nascent octree let (!newChildren, _) = mapS (insert' pos) (map OctreeDummy (splitBoxIntoOctreeChildren box)) state (!octTree', !state') = insert' pos' (OctreeNode box newChildren) (Just a') in (octTree', state') else (oct, state) -- Gather data within a sphere from an octree gather :: Position -> Double -> Octree a -> [(a, Double)] gather pos r (OctreeNode box nodeChildren) = if overlapsSphere box pos r then concatMap (gather pos r) nodeChildren else [] gather pos r (OctreeLeaf _ (pos', a)) | dSq <= r * r = [(a, dSq)] | otherwise = [] where dSq = pos `distanceSq` pos' gather _ _ (OctreeDummy _) = [] -- Generate a scene graph using an octree. Refactor this to just be an octree later splitBoxIntoOctreeChildren :: AABB -> [AABB] splitBoxIntoOctreeChildren (Vector xmin ymin zmin _, Vector xmax ymax zmax _) = [ (Vector xmin ymin zmin 1, Vector centreX centreY centreZ 1), (Vector centreX ymin zmin 1, Vector xmax centreY centreZ 1), (Vector xmin centreY zmin 1, Vector centreX ymax centreZ 1), (Vector centreX centreY zmin 1, Vector xmax ymax centreZ 1), (Vector xmin ymin centreZ 1, Vector centreX centreY zmax 1), (Vector centreX ymin centreZ 1, Vector xmax centreY zmax 1), (Vector xmin centreY centreZ 1, Vector centreX ymax zmax 1), (Vector centreX centreY centreZ 1, Vector xmax ymax zmax 1) ] where centreX = (xmin + xmax) * 0.5 centreY = (ymin + ymax) * 0.5 centreZ = (zmin + zmax) * 0.5 octreeChildBox :: AABB -> Int -> AABB octreeChildBox (Vector !xmin !ymin !zmin _, Vector !xmax !ymax ! zmax _) index = case index of 0 -> (Vector xmin ymin zmin 1, Vector centreX centreY centreZ 1) 1 -> (Vector centreX ymin zmin 1, Vector xmax centreY centreZ 1) 2 -> (Vector xmin centreY zmin 1, Vector centreX ymax centreZ 1) 3 -> (Vector centreX centreY zmin 1, Vector xmax ymax centreZ 1) 4 -> (Vector xmin ymin centreZ 1, Vector centreX centreY zmax 1) 5 -> (Vector centreX ymin centreZ 1, Vector xmax centreY zmax 1) 6 -> (Vector xmin centreY centreZ 1, Vector centreX ymax zmax 1) 7 -> (Vector centreX centreY centreZ 1, Vector xmax ymax zmax 1) _ -> error "Invalid index" where !centreX = (xmin + xmax) * 0.5 !centreY = (ymin + ymax) * 0.5 !centreZ = (zmin + zmax) * 0.5 -- Octree code that's spilt out from other modules... this is scene graph specific helper code rather than self-contained octree stuff -- Take a list of objects and split it into a list of objects that intersect a box, and those that don't objectsIntersectingBox :: [Object] -> AABB -> ([Object], [Object]) objectsIntersectingBox objects box = objectsIntersectingBox' objects box ([], []) objectsIntersectingBox' :: [Object] -> AABB -> ([Object], [Object]) -> ([Object], [Object]) objectsIntersectingBox' (obj:objs) box (currentHit, currentMiss) = if intersectsBox (primitive obj) (transform obj) box then objectsIntersectingBox' objs box (obj : currentHit, currentMiss) else objectsIntersectingBox' objs box (currentHit, obj : currentMiss) objectsIntersectingBox' [] _ (currentHit, currentMiss) = (currentHit, currentMiss) -- Iterator function. Match up objects to this box, and then iterate with the remainder assignObjectsToOctreeBoxes' :: [Object] -> [AABB] -> [[Object]] -> [[Object]] assignObjectsToOctreeBoxes' objs (box:boxes) (x:xs) = assignObjectsToOctreeBoxes' remainingObjects boxes (matchedObjects : x : xs) where (matchedObjects, remainingObjects) = objectsIntersectingBox objs box assignObjectsToOctreeBoxes' _ [] currentList = currentList assignObjectsToOctreeBoxes' objs (box:boxes) [] = assignObjectsToOctreeBoxes' remainingObjects boxes [matchedObjects] where (matchedObjects, remainingObjects) = objectsIntersectingBox objs box -- Generate the list of objects for each bounding box assignObjectsToOctreeBoxes :: [Object] -> [AABB] -> [[Object]] assignObjectsToOctreeBoxes objects boxes = assignObjectsToOctreeBoxes' objects boxes [] -- Make children using an octree algorithm generateSceneGraphUsingOctree :: [Object] -> [[Object]] generateSceneGraphUsingOctree (obj:objs) | not (boundingBoxValid nodeBox) = error "Invalid bounding box" | otherwise = onlyPopulatedBoxes where nodeBox = objectListBoundingBox (obj:objs) octreeBoxes = splitBoxIntoOctreeChildren nodeBox objsPerOctreeBox = assignObjectsToOctreeBoxes (obj:objs) octreeBoxes onlyPopulatedBoxes = filter (\x -> length x > 0) objsPerOctreeBox generateSceneGraphUsingOctree [] = []
TomHammersley/HaskellRenderer
app/src/Octree.hs
gpl-2.0
7,431
0
14
2,083
2,182
1,152
1,030
109
9
-- -- -- (C) 2011-16 Nicola Bonelli <[email protected]> -- -- This program is free software; you can redistribute it and/or modify -- it under the terms of the GNU General Public License as published by -- the Free Software Foundation; either version 2 of the License, or -- (at your option) any later version. -- -- This program is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -- GNU General Public License for more details. -- -- You should have received a copy of the GNU General Public License -- along with this program; if not, write to the Free Software Foundation, -- Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -- -- The full GNU General Public License is included in this distribution in -- the file called "COPYING". import Control.Monad (unless, forM_) import Distribution.Simple import Distribution.Simple.Setup(InstallFlags(..)) import Distribution.Simple.LocalBuildInfo (LocalBuildInfo(..)) import Distribution.PackageDescription (PackageDescription(..)) import System.Environment import System.Directory import System.FilePath.Posix haskellFiles = [ "Main.hs", "Daemon.hs", "Options.hs", "Config.hs", "PFQDaemon.hs" ] main = defaultMainWithHooks $ simpleUserHooks { postInst = pfqdInstall } pfqdInstall :: Args -> InstallFlags -> PackageDescription -> LocalBuildInfo -> IO () pfqdInstall args _ _ _ = do path <- getAppUserDataDirectory "pfqd" >>= mkDirectoryIfNotExist putStrLn $ "Installing haskell files in " ++ path forM_ haskellFiles $ \file -> copyFile ("src" </> file) (path </> file) putStrLn "Done." mkDirectoryIfNotExist :: FilePath -> IO FilePath mkDirectoryIfNotExist path = doesDirectoryExist path >>= \b -> unless b (createDirectory path) >> return path
pfq/PFQ
user/pfqd/Setup.hs
gpl-2.0
1,909
0
11
341
291
167
124
20
1
module Handler.Server where import Import import Service.Interface (get_task_types) getServerR :: ServerUrl -> Handler Html getServerR server = do aufgabenTypen <- lift $ liftM (map taskTreeToTextTree) $ get_task_types $ unpack server defaultLayout $ do addStylesheet $ StaticR css_tree_css $(widgetFile "server") unterbaum :: ServerUrl -> Tree Text -> Maybe Text -> Widget unterbaum server baum mName = do inputId <- newIdent $(widgetFile "baum")
marcellussiegburg/autotool
yesod/Handler/Server.hs
gpl-2.0
467
0
13
80
149
71
78
-1
-1
-- Haskell Practical 3 Lexer for fuller prepositional calculus -- By James Cowgill module Prac3.Lexer where import Data.Char -- Lexer tokens data Token = TokTrue | TokFalse | TokNegate | TokAnd | TokOr | TokImplies | TokEqual | TokLeft | TokRight | TokVar String deriving (Eq, Show) -- Converts single characters into a token lexerSingle :: Char -> Token lexerSingle 'T' = TokTrue lexerSingle 'F' = TokFalse lexerSingle '-' = TokNegate lexerSingle '*' = TokAnd lexerSingle '+' = TokOr lexerSingle '>' = TokImplies lexerSingle '=' = TokEqual lexerSingle '(' = TokLeft lexerSingle ')' = TokRight lexerSingle c = error ("lexical error: " ++ [c]) -- Simple lexer for prepositional calculus lexer :: String -> [Token] lexer [] = [] lexer (x:xs) | isSpace x = lexer xs | isVariable = TokVar vFst : lexer vSnd | otherwise = lexerSingle x : lexer xs where (vFst, vSnd) = span (isAsciiLower) (x:xs) isVariable = not (null vFst)
jcowgill/cs-work
syac/compilers/Prac3/Lexer.hs
gpl-3.0
1,172
0
9
408
297
157
140
33
1
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE TypeFamilies #-} module Utils where import Data.Aeson (ToJSON(..), Value(..), FromJSON) import Data.Aeson.Types (emptyObject) import Data.HashMap.Strict (union) import Database.Persist.Sqlite hiding (get, delete) import Network.HTTP.Types (notFound404) import Web.Spock.Safe import qualified Database.Persist.Sqlite as Sql (get, delete) import Models.Base import Types -- | Get a single Entity, sideload any related Entities, & wrap them in -- a JSON object. getAndWrap :: (Sideload r, ToJSON (Entity r), Named (Entity r), PersistEntity r, PersistEntityBackend r ~ SqlBackend) => Key r -> OMRoute ctx m b getAndWrap key = getOr404 key $ \value -> do sideloadedData <- sideloads [key] json $ mergeObjects (toJSON $ JSONList [Entity key value]) sideloadedData -- | Get all Entities, sideload any related Entities, & wrap them in a -- JSON object. listAndWrap :: (Sideload r, Named (Entity r), ToJSON (Entity r), PersistEntity r, PersistEntityBackend r ~ SqlBackend) => [SelectOpt r] -> OMRoute ctx m b listAndWrap ordering = do items <- runSQL $ selectList [] ordering let itemKeys = map (\(Entity k _) -> k) items sideloadedData <- sideloads itemKeys json $ mergeObjects (toJSON $ JSONList items) sideloadedData -- | Update an Entity using a Key & JSON request, wrapping it in an object. updateAndWrap :: (ToJSON (Entity r), Named r, Sideload r, FromJSON r, PersistEntity r, PersistEntityBackend r ~ SqlBackend) => Key r -> OMRoute ctx m b updateAndWrap key = getOr404 key $ \_ -> do JSONObject newItem <- jsonBody' runSQL $ replace key newItem sideloadedData <- sideloads [key] json $ mergeObjects (toJSON $ JSONList [Entity key newItem]) sideloadedData -- | Delete an Entity and return an empty JSON object. deleteAndReturn :: (PersistEntityBackend r ~ SqlBackend, PersistEntity r) => Key r -> OMRoute ctx m b deleteAndReturn key = runSQL (Sql.delete key) >> json emptyObject -- | Get an Entity and perform an action with it, return a 404 if it does -- not exist. getOr404 :: (PersistEntity r, PersistEntityBackend r ~ SqlBackend) => Key r -> (r -> OMRoute ctx m b) -> OMRoute ctx m b getOr404 key action = do maybeValue <- runSQL $ Sql.get key case maybeValue of Nothing -> setStatus notFound404 >> text "not found" Just v -> action v -- | Merge the keys of two objects, preferring the keys in the first -- argument. If Objects are not passed, the first argument is returned. mergeObjects :: (ToJSON a, ToJSON b) => a -> b -> Value mergeObjects a b = case (toJSON a, toJSON b) of (Object o1, Object o2) -> Object $ union o1 o2 (x,_) -> x
Southern-Exposure-Seed-Exchange/Order-Manager-Prototypes
spock/src/Utils.hs
gpl-3.0
3,040
0
15
851
842
432
410
49
2
------------------------------------------------------------------------------- -- AVL (Adelson-Velskii and Landis) Trees -- -- Data Structures. Grado en Informática. UMA. -- Pepe Gallardo, 2011 ------------------------------------------------------------------------------- module AVL ( AVL , empty , isEmpty , size , insert , search , isElem , delete , updateOrInsert , inOrder , preOrder , postOrder , foldInOrder , foldPreOrder , foldPostOrder , minim , maxim , deleteMinim , deleteMaxim , isAVL , mkAVL , height ) where import Data.Maybe(isJust) --import DataStructures.Graphics.DrawTrees --import Test.QuickCheck data AVL a = Empty | Node a Int (AVL a) (AVL a) deriving Show empty :: AVL a empty = Empty isEmpty :: AVL a -> Bool isEmpty Empty = True isEmpty _ = False ------------------------------------------------------------------------------- -- Size ------------------------------------------------------------------------------- size :: AVL a -> Int size Empty = 0 size (Node _ _ lt rt) = 1 + size lt + size rt ------------------------------------------------------------------------------- -- Search ------------------------------------------------------------------------------- search :: (Ord a) => a -> AVL a -> Maybe a search x' Empty = Nothing search x' (Node x h lt rt) | x'<x = search x' lt | x'>x = search x' rt | otherwise = Just x isElem :: (Ord a) => a -> AVL a -> Bool isElem x t = isJust (search x t) -- smart constructor to compute height node :: a -> AVL a -> AVL a -> AVL a node x lt rt = Node x h lt rt where h = 1 + max (height lt) (height rt) height :: AVL a -> Int height Empty = 0 height (Node x h lt rt) = h ------------------------------------------------------------------------------- -- Insertion ------------------------------------------------------------------------------- insert :: (Ord a) => a -> AVL a -> AVL a insert x' Empty = node x' Empty Empty insert x' (Node x h lt rt) | x'<x = balance x (insert x' lt) rt | x'>x = balance x lt (insert x' rt) | otherwise = Node x' h lt rt -- Should not modify key in node updateOrInsert :: (Ord a) => (a -> a) -> a -> AVL a -> AVL a updateOrInsert f x' Empty = node x' Empty Empty updateOrInsert f x' (Node x h lt rt) | x'<x = balance x (updateOrInsert f x' lt) rt | x'>x = balance x lt (updateOrInsert f x' rt) | otherwise = Node (f x) h lt rt -- insert could be written by using updateOrInsert: -- insert x' = updateOrInsert (const x') x' rotR :: AVL a -> AVL a rotR (Node x h (Node lk lh llt lrt) rt) = node lk llt (node x lrt rt) rotL :: AVL a -> AVL a rotL (Node x h lt (Node rk rh rlt rrt)) = node rk (node x lt rlt) rrt rightLeaning :: AVL a -> Bool rightLeaning (Node x h lt rt) = height lt < height rt leftLeaning :: AVL a -> Bool leftLeaning (Node x h lt rt) = height lt > height rt balance :: a -> AVL a -> AVL a -> AVL a balance k lt rt | (lh-rh > 1) && leftLeaning lt = rotR (node k lt rt) | (lh-rh > 1) = rotR (node k (rotL lt) rt) | (rh-lh > 1) && rightLeaning rt = rotL (node k lt rt) | (rh-lh > 1) = rotL (node k lt (rotR rt)) | otherwise = node k lt rt where lh = height lt rh = height rt mkAVL :: (Ord a) => [a] -> AVL a mkAVL xs = foldl (flip insert) empty xs ------------------------------------------------------------------------------- -- Deletion ------------------------------------------------------------------------------- delete :: (Ord a) => a -> AVL a -> AVL a delete x' Empty = Empty delete x' (Node x h lt rt) | x'<x = balance x (delete x' lt) rt | x'>x = balance x lt (delete x' rt) | otherwise = combine lt rt combine :: AVL a -> AVL a -> AVL a combine Empty rt = rt combine lt Empty = lt combine lt rt = balance x' lt rt' where (x',rt') = split rt -- removes and returns minimum element from non-empty tree split :: AVL a -> (a,AVL a) split (Node x h Empty rt) = (x,rt) split (Node x h lt rt) = (x',balance x lt' rt) where (x',lt') = split lt deleteMinim :: AVL a -> AVL a deleteMinim Empty = error "deleteMinim on empty tree" deleteMinim (Node x h Empty rt) = rt deleteMinim (Node x h lt rt) = balance x (deleteMinim lt) rt deleteMaxim :: AVL a -> AVL a deleteMaxim Empty = error "deleteMaxim on empty tree" deleteMaxim (Node x h lt Empty) = lt deleteMaxim (Node x h lt rt) = balance x lt (deleteMaxim rt) ------------------------------------------------------------------------------- -- Order ------------------------------------------------------------------------------- minim :: AVL a -> a minim Empty = error "minim on empty tree" minim (Node x h Empty rt) = x minim (Node x h lt rt) = minim lt maxim :: AVL a -> a maxim Empty = error "maxim on empty tree" maxim (Node x h lt Empty) = x maxim (Node x h lt rt) = maxim rt ------------------------------------------------------------------------------- -- Invariants ------------------------------------------------------------------------------- isAVL :: (Ord a) => AVL a -> Bool isAVL Empty = True isAVL (Node x h lt rt) = forAll (<x) lt && forAll (>x) rt && abs (height lt - height rt) < 2 && isAVL lt && isAVL rt where forAll :: (a -> Bool) -> AVL a -> Bool forAll p Empty = True forAll p (Node x h lt rt) = forAll p lt && p x && forAll p rt ------------------------------------------------------------------------------- -- Traversals ------------------------------------------------------------------------------- inOrder :: AVL a -> [a] inOrder t = aux t [] where aux Empty xs = xs aux (Node x h lt rt) xs = aux lt (x : aux rt xs) preOrder :: AVL a -> [a] preOrder t = aux t [] where aux Empty xs = xs aux (Node x h lt rt) xs = x : aux lt (aux rt xs) postOrder :: AVL a -> [a] postOrder t = aux t [] where aux Empty xs = xs aux (Node x h lt rt) xs = aux lt (aux rt (x:xs)) traversal :: ((b -> b) -> (b -> b) -> (b -> b) -> (b -> b)) -> (a -> b -> b) -> b -> AVL a -> b traversal order f z t = aux t z where aux Empty = id aux (Node x h lt rt) = order (f x) (aux lt) (aux rt) foldInOrder :: (a -> b -> b) -> b -> AVL a -> b foldInOrder = traversal (\xf lf rf -> lf . xf . rf) foldPreOrder :: (a -> b -> b) -> b -> AVL a -> b foldPreOrder = traversal (\xf lf rf -> xf . lf . rf) foldPostOrder :: (a -> b -> b) -> b -> AVL a -> b foldPostOrder = traversal (\xf lf rf -> lf . rf . xf)
danipozodg/dependency
AVL.hs
gpl-3.0
7,088
0
12
2,080
2,648
1,331
1,317
142
2
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.DialogFlow.Projects.GetAgent -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Retrieves the specified agent. -- -- /See:/ <https://cloud.google.com/dialogflow-enterprise/ Dialogflow API Reference> for @dialogflow.projects.getAgent@. module Network.Google.Resource.DialogFlow.Projects.GetAgent ( -- * REST Resource ProjectsGetAgentResource -- * Creating a Request , projectsGetAgent , ProjectsGetAgent -- * Request Lenses , pgaParent , pgaXgafv , pgaUploadProtocol , pgaAccessToken , pgaUploadType , pgaCallback ) where import Network.Google.DialogFlow.Types import Network.Google.Prelude -- | A resource alias for @dialogflow.projects.getAgent@ method which the -- 'ProjectsGetAgent' request conforms to. type ProjectsGetAgentResource = "v2" :> Capture "parent" Text :> "agent" :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> Get '[JSON] GoogleCloudDialogflowV2Agent -- | Retrieves the specified agent. -- -- /See:/ 'projectsGetAgent' smart constructor. data ProjectsGetAgent = ProjectsGetAgent' { _pgaParent :: !Text , _pgaXgafv :: !(Maybe Xgafv) , _pgaUploadProtocol :: !(Maybe Text) , _pgaAccessToken :: !(Maybe Text) , _pgaUploadType :: !(Maybe Text) , _pgaCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ProjectsGetAgent' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'pgaParent' -- -- * 'pgaXgafv' -- -- * 'pgaUploadProtocol' -- -- * 'pgaAccessToken' -- -- * 'pgaUploadType' -- -- * 'pgaCallback' projectsGetAgent :: Text -- ^ 'pgaParent' -> ProjectsGetAgent projectsGetAgent pPgaParent_ = ProjectsGetAgent' { _pgaParent = pPgaParent_ , _pgaXgafv = Nothing , _pgaUploadProtocol = Nothing , _pgaAccessToken = Nothing , _pgaUploadType = Nothing , _pgaCallback = Nothing } -- | Required. The project that the agent to fetch is associated with. -- Format: \`projects\/\`. pgaParent :: Lens' ProjectsGetAgent Text pgaParent = lens _pgaParent (\ s a -> s{_pgaParent = a}) -- | V1 error format. pgaXgafv :: Lens' ProjectsGetAgent (Maybe Xgafv) pgaXgafv = lens _pgaXgafv (\ s a -> s{_pgaXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). pgaUploadProtocol :: Lens' ProjectsGetAgent (Maybe Text) pgaUploadProtocol = lens _pgaUploadProtocol (\ s a -> s{_pgaUploadProtocol = a}) -- | OAuth access token. pgaAccessToken :: Lens' ProjectsGetAgent (Maybe Text) pgaAccessToken = lens _pgaAccessToken (\ s a -> s{_pgaAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). pgaUploadType :: Lens' ProjectsGetAgent (Maybe Text) pgaUploadType = lens _pgaUploadType (\ s a -> s{_pgaUploadType = a}) -- | JSONP pgaCallback :: Lens' ProjectsGetAgent (Maybe Text) pgaCallback = lens _pgaCallback (\ s a -> s{_pgaCallback = a}) instance GoogleRequest ProjectsGetAgent where type Rs ProjectsGetAgent = GoogleCloudDialogflowV2Agent type Scopes ProjectsGetAgent = '["https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/dialogflow"] requestClient ProjectsGetAgent'{..} = go _pgaParent _pgaXgafv _pgaUploadProtocol _pgaAccessToken _pgaUploadType _pgaCallback (Just AltJSON) dialogFlowService where go = buildClient (Proxy :: Proxy ProjectsGetAgentResource) mempty
brendanhay/gogol
gogol-dialogflow/gen/Network/Google/Resource/DialogFlow/Projects/GetAgent.hs
mpl-2.0
4,687
0
16
1,154
702
410
292
105
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.BigQuery.DataSets.Patch -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Updates information in an existing dataset. The update method replaces -- the entire dataset resource, whereas the patch method only replaces -- fields that are provided in the submitted dataset resource. This method -- supports patch semantics. -- -- /See:/ <https://cloud.google.com/bigquery/ BigQuery API Reference> for @bigquery.datasets.patch@. module Network.Google.Resource.BigQuery.DataSets.Patch ( -- * REST Resource DataSetsPatchResource -- * Creating a Request , dataSetsPatch , DataSetsPatch -- * Request Lenses , dspPayload , dspDataSetId , dspProjectId ) where import Network.Google.BigQuery.Types import Network.Google.Prelude -- | A resource alias for @bigquery.datasets.patch@ method which the -- 'DataSetsPatch' request conforms to. type DataSetsPatchResource = "bigquery" :> "v2" :> "projects" :> Capture "projectId" Text :> "datasets" :> Capture "datasetId" Text :> QueryParam "alt" AltJSON :> ReqBody '[JSON] DataSet :> Patch '[JSON] DataSet -- | Updates information in an existing dataset. The update method replaces -- the entire dataset resource, whereas the patch method only replaces -- fields that are provided in the submitted dataset resource. This method -- supports patch semantics. -- -- /See:/ 'dataSetsPatch' smart constructor. data DataSetsPatch = DataSetsPatch' { _dspPayload :: !DataSet , _dspDataSetId :: !Text , _dspProjectId :: !Text } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'DataSetsPatch' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'dspPayload' -- -- * 'dspDataSetId' -- -- * 'dspProjectId' dataSetsPatch :: DataSet -- ^ 'dspPayload' -> Text -- ^ 'dspDataSetId' -> Text -- ^ 'dspProjectId' -> DataSetsPatch dataSetsPatch pDspPayload_ pDspDataSetId_ pDspProjectId_ = DataSetsPatch' { _dspPayload = pDspPayload_ , _dspDataSetId = pDspDataSetId_ , _dspProjectId = pDspProjectId_ } -- | Multipart request metadata. dspPayload :: Lens' DataSetsPatch DataSet dspPayload = lens _dspPayload (\ s a -> s{_dspPayload = a}) -- | Dataset ID of the dataset being updated dspDataSetId :: Lens' DataSetsPatch Text dspDataSetId = lens _dspDataSetId (\ s a -> s{_dspDataSetId = a}) -- | Project ID of the dataset being updated dspProjectId :: Lens' DataSetsPatch Text dspProjectId = lens _dspProjectId (\ s a -> s{_dspProjectId = a}) instance GoogleRequest DataSetsPatch where type Rs DataSetsPatch = DataSet type Scopes DataSetsPatch = '["https://www.googleapis.com/auth/bigquery", "https://www.googleapis.com/auth/cloud-platform"] requestClient DataSetsPatch'{..} = go _dspProjectId _dspDataSetId (Just AltJSON) _dspPayload bigQueryService where go = buildClient (Proxy :: Proxy DataSetsPatchResource) mempty
brendanhay/gogol
gogol-bigquery/gen/Network/Google/Resource/BigQuery/DataSets/Patch.hs
mpl-2.0
3,882
0
15
872
470
283
187
74
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Compute.DiskTypes.Get -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Returns the specified disk type. Get a list of available disk types by -- making a list() request. -- -- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.diskTypes.get@. module Network.Google.Resource.Compute.DiskTypes.Get ( -- * REST Resource DiskTypesGetResource -- * Creating a Request , diskTypesGet , DiskTypesGet -- * Request Lenses , dtgProject , dtgZone , dtgDiskType ) where import Network.Google.Compute.Types import Network.Google.Prelude -- | A resource alias for @compute.diskTypes.get@ method which the -- 'DiskTypesGet' request conforms to. type DiskTypesGetResource = "compute" :> "v1" :> "projects" :> Capture "project" Text :> "zones" :> Capture "zone" Text :> "diskTypes" :> Capture "diskType" Text :> QueryParam "alt" AltJSON :> Get '[JSON] DiskType -- | Returns the specified disk type. Get a list of available disk types by -- making a list() request. -- -- /See:/ 'diskTypesGet' smart constructor. data DiskTypesGet = DiskTypesGet' { _dtgProject :: !Text , _dtgZone :: !Text , _dtgDiskType :: !Text } deriving (Eq,Show,Data,Typeable,Generic) -- | Creates a value of 'DiskTypesGet' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'dtgProject' -- -- * 'dtgZone' -- -- * 'dtgDiskType' diskTypesGet :: Text -- ^ 'dtgProject' -> Text -- ^ 'dtgZone' -> Text -- ^ 'dtgDiskType' -> DiskTypesGet diskTypesGet pDtgProject_ pDtgZone_ pDtgDiskType_ = DiskTypesGet' { _dtgProject = pDtgProject_ , _dtgZone = pDtgZone_ , _dtgDiskType = pDtgDiskType_ } -- | Project ID for this request. dtgProject :: Lens' DiskTypesGet Text dtgProject = lens _dtgProject (\ s a -> s{_dtgProject = a}) -- | The name of the zone for this request. dtgZone :: Lens' DiskTypesGet Text dtgZone = lens _dtgZone (\ s a -> s{_dtgZone = a}) -- | Name of the disk type to return. dtgDiskType :: Lens' DiskTypesGet Text dtgDiskType = lens _dtgDiskType (\ s a -> s{_dtgDiskType = a}) instance GoogleRequest DiskTypesGet where type Rs DiskTypesGet = DiskType type Scopes DiskTypesGet = '["https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/compute", "https://www.googleapis.com/auth/compute.readonly"] requestClient DiskTypesGet'{..} = go _dtgProject _dtgZone _dtgDiskType (Just AltJSON) computeService where go = buildClient (Proxy :: Proxy DiskTypesGetResource) mempty
rueshyna/gogol
gogol-compute/gen/Network/Google/Resource/Compute/DiskTypes/Get.hs
mpl-2.0
3,593
0
16
878
468
280
188
73
1
----------------------------------------------------------------------------------------- {-| Module : ParseEiffel Copyright : (c) Daan Leijen 2003 License : BSD-style Maintainer : [email protected] Stability : provisional Portability : portable Parse the wxc Eiffel definition file. -} ----------------------------------------------------------------------------------------- module ParseEiffel( parseEiffel ) where import Data.Char( digitToInt ) import Text.ParserCombinators.Parsec import qualified Text.ParserCombinators.Parsec.Token as P import Text.ParserCombinators.Parsec.Language import Types import System.Environment ( getEnv ) import System.IO.Error ( catchIOError ) {----------------------------------------------------------------------------------------- Testing -----------------------------------------------------------------------------------------} test = do files <- getDefaultEiffelFiles defss <- mapM parseEiffel files let defs = concat defss haskellDefs = map show defs writeFile "../../wxh/Graphics/UI/WXH/WxcDefs.hs" (unlines haskellDefs) getDefaultEiffelFiles :: IO [FilePath] getDefaultEiffelFiles = do wxwin <- getEnv "WXWIN" `catchIOError` \err -> return "" return [wxwin ++ "/wxc/include/wxc_defs.e" ,wxwin ++ "/wxc/ewxw/eiffel/spec/r_2_4/wx_defs.e"] {----------------------------------------------------------------------------------------- Parse Eiffel -----------------------------------------------------------------------------------------} parseEiffel :: FilePath -> IO [Def] parseEiffel fname = do putStrLn ("parsing: " ++ fname) input <- readFile fname defss <- mapM (parseDef fname) (lines input) -- putStrLn ("ok.") return (concat defss) parseDef :: FilePath -> String -> IO [Def] parseDef fname line = case parse pdef fname line of Left err -> do putStrLn ("ignore: parse error : " ++ line) return [] Right mbd -> case mbd of Just d -> return [d] Nothing -> return [] -- empty line {----------------------------------------------------------------------------------------- Parse a constant definition -----------------------------------------------------------------------------------------} -- parse a definition: return Nothing on an empty definition pdef :: Parser (Maybe Def) pdef = do whiteSpace x <- option Nothing (pconstDef <|> pignore) eof return x pconstDef :: Parser (Maybe Def) pconstDef = do name <- identifier symbol ":" tp <- pdefType reserved "is" (do x <- pdefValue return (Just (Def name x tp)) <|> return Nothing) -- external definition <?> "constant definition" pignore = do{ reserved "external"; stringLiteral; return Nothing } <|> do{ reserved "alias"; stringLiteral; return Nothing } <|> do{ reserved "end"; return Nothing } <|> do{ reserved "class"; identifier; return Nothing } <|> do{ reserved "feature"; symbol "{"; reserved "NONE"; symbol "}"; return Nothing } <?> "" pdefType :: Parser DefType pdefType = do reserved "BIT" bits <- natural return DefMask <|> do reserved "INTEGER" return DefInt <?> "integer type" pdefValue :: Parser Int pdefValue = lexeme $ do sign <- option id (do{ symbol "-"; return negate }) ds <- many1 digit base <- option 10 (do{char 'B'; return 2}) return (sign (convertNum base ds)) where convertNum :: Int -> String -> Int convertNum base digits = foldl convert 0 digits where convert x c = base*x + digitToInt c {----------------------------------------------------------------------------------------- The lexer -----------------------------------------------------------------------------------------} lexer :: P.TokenParser () lexer = P.makeTokenParser $ emptyDef { commentStart = "/*" , commentEnd = "*/" , commentLine = "--" -- ignore pre-processor stuff, but fail to recognise "//" , nestedComments = True , identStart = letter <|> char '_' , identLetter = alphaNum <|> oneOf "_'" , caseSensitive = True , reservedNames = ["is","feature","class","end","NONE","BIT","INTEGER","external","alias"] } whiteSpace = P.whiteSpace lexer lexeme = P.lexeme lexer symbol = P.symbol lexer parens = P.parens lexer semi = P.semi lexer comma = P.comma lexer commaSep = P.commaSep lexer identifier = P.identifier lexer natural = P.natural lexer reserved = P.reserved lexer stringLiteral = lexeme $ do char '"' many stringChar char '"' return () stringChar = noneOf "\"%\n\v" <|> do{ char '%'; anyChar }
thielema/wxhaskell
wxdirect/src/ParseEiffel.hs
lgpl-2.1
4,915
0
16
1,128
1,150
574
576
107
3
{-#LANGUAGE OverloadedStrings#-} module Data.P440.XML.Instances.ZSO where import qualified Data.P440.Domain.ZSO as ZSO import Data.P440.Domain.SimpleTypes import Data.P440.Domain.ComplexTypes import Data.P440.XML.XML import qualified Data.P440.XML.Instances.SimpleTypes import qualified Data.P440.XML.Instances.ComplexTypes as C import qualified Data.P440.XML.Instances.ComplexTypesZS as C instance ToNode ZSO.Файл where toNode (ZSO.Файл идЭС версПрог телОтпр должнОтпр фамОтпр запросОст) = complex "Файл" ["ИдЭС" =: идЭС ,"ВерсПрог" =: версПрог ,"ТелОтпр" =: телОтпр ,"ДолжнОтпр" =: должнОтпр ,"ФамОтпр" =: фамОтпр] [Single запросОст] instance ToNode ZSO.ЗапросОст where toNode (ZSO.ЗапросОст номЗапр стНКРФ видЗапр основЗапр типЗапр признЗапр датаПоСост датаПодп свНО свПл банкИлиУБР счетИлиКЭСП руководитель) = complex "ЗапросОст" ["НомЗапр" =: номЗапр ,"СтНКРФ" =: стНКРФ ,"ВидЗапр" =: видЗапр ,"ОсновЗапр" =: основЗапр ,"ТипЗапр" =: типЗапр ,"ПризнЗапр" =: признЗапр ,"ДатаПоСост" =: датаПоСост ,"ДатаПодп" =: датаПодп] [Single $ C.свНО "СвНО" свНО ,Single свПл ,Single банкИлиУБР ,Sequence счетИлиКЭСП ,Single $ C.рукНО "Руководитель" руководитель] instance ToSequence ZSO.СчетИлиКЭСП where toSequence (ZSO.Счет счет) = map (\(НомСч номСч) -> complex_ "Счет" ["НомСч" =: номСч]) счет toSequence (ZSO.КЭСП кэсп) = map (\идКЭСП -> complex_ "КЭСП" ["ИдКЭСП" =: идКЭСП]) кэсп
Macil-dev/440P-old
src/Data/P440/XML/Instances/ZSO.hs
unlicense
2,347
0
12
584
1,102
570
532
42
0
import Control.Applicative main :: IO () main = show . sum . map read . words <$> getLine >>= putStrLn
fabianm/olympiad
2015-2016/round-1/a0.hs
apache-2.0
104
0
9
21
44
22
22
3
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} module Kubernetes.V1.NamespaceList where import GHC.Generics import Data.Text import Kubernetes.Unversioned.ListMeta import Kubernetes.V1.Namespace import qualified Data.Aeson -- | NamespaceList is a list of Namespaces. data NamespaceList = NamespaceList { kind :: Maybe Text -- ^ Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds , apiVersion :: Maybe Text -- ^ APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources , metadata :: Maybe ListMeta -- ^ Standard list metadata. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds , items :: [Namespace] -- ^ Items is the list of Namespace objects in the list. More info: http://releases.k8s.io/HEAD/docs/user-guide/namespaces.md } deriving (Show, Eq, Generic) instance Data.Aeson.FromJSON NamespaceList instance Data.Aeson.ToJSON NamespaceList
minhdoboi/deprecated-openshift-haskell-api
kubernetes/lib/Kubernetes/V1/NamespaceList.hs
apache-2.0
1,441
0
9
192
125
77
48
19
0
module HelperSequences.A000005Spec (main, spec) where import Test.Hspec import HelperSequences.A000005 (a000005) main :: IO () main = hspec spec spec :: Spec spec = describe "A000005" $ it "correctly computes the first 20 elements" $ take 20 (map a000005 [1..]) `shouldBe` expectedValue where expectedValue = [1,2,2,3,2,4,2,4,3,4,2,6,2,4,4,5,2,6,2,6]
peterokagey/haskellOEIS
test/HelperSequences/A000005Spec.hs
apache-2.0
365
0
10
59
160
95
65
10
1
-- |This module exports functions for testing the compiler on Flapjax source -- files. module Test.FileTests(compileFlapjaxFile,compileFlapjaxFilesIn) where import System.IO import System.Directory import Text.ParserCombinators.Parsec(ParseError,parseFromFile) import Html.Parser(parse) import Flapjax.Compiler(compilePage,defaults) import Computation(Result(..),runComputation) suffixOf:: Eq a => [a] -> [a] -> Bool suffixOf suffix string = if suffixLen > stringLen then False else suffix' == suffix where stringLen = length string suffixLen = length suffix suffix' = drop (stringLen-suffixLen) string withFlapjaxFile:: (FilePath -> IO ()) -> FilePath -> IO () withFlapjaxFile action path = do exists <- doesFileExist path (if exists && (".fj" `suffixOf` path) then action path else putStr ("Ignoring " ++ path ++ "\n") >> return ()) compileFlapjaxFile:: FilePath -> IO () compileFlapjaxFile path = do htmlOrError <- parseFromFile parse path (case htmlOrError of (Left err) -> putStr ("Parse error in " ++ path ++ ":\n" ++ (show err) ++ "\n") (Right html) -> do (Success _ html) <- runComputation (compilePage defaults html) writeFile (path ++ ".html") (show html)) compileFlapjaxFilesIn:: FilePath -> IO () compileFlapjaxFilesIn path = do files <- getDirectoryContents path putStr $ show (length files) ++ " items in " ++ path ++ "...\n" mapM_ (withFlapjaxFile compileFlapjaxFile) (map ((path ++ "/") ++) files)
brownplt/ovid
src/Test/FileTests.hs
bsd-2-clause
1,531
0
16
310
504
262
242
32
2
-- 1258 import Data.Function(on) import Data.List(group, permutations, sort, sortBy) import Data.Ratio(denominator, numerator) invnum = -101010101 myadd a b | a == invnum || b == invnum = invnum | otherwise = a + b mysub a b | a == invnum || b == invnum = invnum | otherwise = a - b mymul a b | a == invnum || b == invnum = invnum | otherwise = a * b mydiv a b | a == invnum || b == invnum || b == 0 = invnum | otherwise = a / b genDigits = [(a,b,c,d) | a <- [1..9], b <- [a+1..9], c <- [b+1..9], d <- [c+1..9]] genOps = [(x,y,z) | x <- ops, y <- ops, z <- ops] where ops = [myadd, mysub, mymul, mydiv] evalTree1 (x,y,z) (a,b,c,d) = x a (y b (z c d)) evalTree2 (x,y,z) (a,b,c,d) = x a (y (z b c) d) evalTree3 (x,y,z) (a,b,c,d) = x (y (z a b) c) d evalTree4 (x,y,z) (a,b,c,d) = x (y a (z b c)) d evalTree5 (x,y,z) (a,b,c,d) = x (y a b) (z c d) tuplePerms (a,b,c,d) = [(w,x,y,z) | [w,x,y,z] <- permutations [a,b,c,d]] evalAllTrees xs = [v | ops <- genOps, ys <- tuplePerms xs, f <- fs, let r = f ops ys, denominator r == 1, let v = numerator r, v >= 1] where fs = [evalTree1, evalTree2, evalTree3, evalTree4, evalTree5] runLength xs = (runLen, xs) where getRun = map head $ group $ sort $ evalAllTrees xs runLen = length $ takeWhile id $ zipWith (==) [1..] getRun bestRunLength = numerator $ a*1000 + b*100 + c*10 + d where getBest = last $ sortBy (compare `on` fst) $ map runLength genDigits (a,b,c,d) = snd getBest main = putStrLn $ show $ bestRunLength
higgsd/euler
hs/93.hs
bsd-2-clause
1,601
0
12
446
970
524
446
37
1
{-# OPTIONS_GHC -fwarn-unused-imports #-} module Instances () where import Control.Monad.IO.Control import Data.Enumerator import Control.Monad.IO.Class import Control.Exception.Control instance MonadIO m => MonadControlIO (Iteratee a m) where liftControlIO f = liftIO $ f run' where run' iter = return $ Iteratee $ do stp <- runIteratee iter case stp of Error exc -> throwIO exc s -> return s
konn/konnfav
Instances.hs
bsd-2-clause
451
0
14
114
127
66
61
13
0
-- vim: sw=2: ts=2: set expandtab: {-# LANGUAGE TemplateHaskell, ScopedTypeVariables, FlexibleInstances, MultiParamTypeClasses, FlexibleContexts, UndecidableInstances, OverloadedStrings, CPP #-} ----------------------------------------------------------------------------- -- -- Module : Syntax -- Copyright : BSD -- License : AllRightsReserved -- -- Maintainer : Ki Yung Ahn -- Stability : -- Portability : -- -- | -- ----------------------------------------------------------------------------- module Syntax ( KiName, Ki(..) , TyName, Ty(..) , TmName, Tm(..) , IxMap ) where import Unbound.LocallyNameless hiding (Con) import GHC.Exts( IsString(..) ) type KiName = Name Ki type TyName = Name Ty type TmName = Name Tm data Ki = KVar KiName | Star | KArr Ki Ki data Ty = TVar TyName | TCon TyName | TArr Ty Ty | TApp Ty Ty | TFix Ty -- Ty must be TCon or applications headed TCon data Tm = Var TmName | Con TmName | In Integer Tm | MPr (Bind (TmName,TmName) Tm) -- Tm must be Alt | Lam (Bind TmName Tm) | App Tm Tm | Let (Bind (TmName, Embed Tm) Tm) | Alt (Maybe IxMap) [(TmName, Bind [TmName] Tm)] type IxMap = Bind [TyName] Ty $(derive [''Ki, ''Ty, ''Tm]) instance Rep a => IsString (Name a) where fromString = string2Name -- Alpha and Sbust instances are in Parser module -- in order to avoid mutually recursive module imports -- since Show class instantces for Ki, Ty, Tm depends on LBNF functions
kyagrd/micronax
src/Syntax.hs
bsd-2-clause
1,567
0
10
388
340
205
135
41
0
-- | This module implements various functions that return a probabilistic result, -- defined as unitary operators, and quantum computations. module QIO.QIORandom where import Data.Monoid as Monoid import QIO.QioSyn import QIO.Qdata import QIO.Qio import Data.Complex -- | The exponentiated Pauli-X rotation rX :: RR -> Rotation rX r (x,y) = if x==y then (cos (r/2):+0) else (0:+ (-(sin (r/2)))) -- | The exponentiated Pauli-Y rotation rY :: RR -> Rotation rY r (x,y) = if x==y then (cos (r/2):+0) else (s * sin (r/2):+0) where s = if x then 1 else -1 -- | Applies a Hadamard rotation to each qubit in the given list of qubits hadamards :: [Qbit] -> U hadamards [] = mempty hadamards (q:qs) = uhad q `mappend` hadamards qs -- | returns the highest integer power of 2 that is less than or equal to \x\ pow2 :: Int -> Int pow2 x = pow2' 0 where pow2' y | 2^(y+1) > x = 2^y | otherwise = pow2' (y+1) -- | A rotation that, given a qubit in state 0, leaves it in a super-position of -- 0 and 1, such that the probability of measuring as state 0 is \ps\. weightedU :: RR -> Qbit -> U weightedU ps q | sqrt ps <= 1 = rot q (rX (2*(acos (sqrt ps)))) | otherwise = error ("weightedU: Invalid Probability: " ++ show ps) -- | A QIO computation that uses the "weightedU" unitary, to return a Bool that -- has a probablity of \pf\ of being False. weightedBool :: RR -> QIO Bool weightedBool pf = do q <- mkQbit False applyU (weightedU pf q) measQ q -- | removes any leading Falses from a list of booleans rlf :: [Bool] -> [Bool] rlf (False:bs) = rlf bs rlf bs = bs -- | removes any leading Falses from the (big-endian) bit-wise representation -- of the given Int. rlf_l :: Int -> [Bool] rlf_l x = rlf (reverse (int2bits x)) -- | returns the number of bits left after calling the "flf_l" function rlf_n :: Int -> Int rlf_n x = length (rlf_l x) -- | Given an Int \max\ that is the largest number required to be represented in -- a quantum register, this function trims the front off the given register, to -- leave the number of qubits required to represent \max\. trim :: Int -> [Qbit] -> [Qbit] trim max qbs = drop ((length qbs)-(rlf_n max)) qbs -- | Given an Int \max\, and a quantum register in the state \max\, this function -- defines a unitary operation that will leave the quantum register in state that -- has equal probability of being measured in any of the states 0 to \max\. randomU :: Int -> [Qbit] -> U randomU max qbs = randomU' max (trim max qbs) where randomU' _ [] = mempty randomU' 0 _ = mempty randomU' max (q:qbs) = weightedU (fromIntegral ((max+1)-p)/fromIntegral (max+1)) q `mappend` condQ q (\x -> if x then (randomU (max-p) qbs) else (hadamards qbs)) where p = pow2 max -- | A quantum computation that will return a quantum integer in a state that -- has equal probabilities of being measured in any of the state 0 to \max\. randomQInt :: Int -> QIO QInt randomQInt max = do qbs <- mkQ (reverse (int2bits max)) applyU (randomU max qbs) return (QInt (reverse qbs)) -- | A quantum computation that will return a quantum integer in a state that -- has equal probabilities of being measured in any of the state \min\ to \max\. randomQIO :: (Int,Int) -> QIO Int randomQIO (min,max) = do q <- randomInt (max-min) return (q + min) -- | A quantum computation that measures the outcome of "randomQInt" randomInt :: Int -> QIO Int randomInt max = do q <- randomQInt max measQ q -- | A quantum computation that returns an integer that is equally likely to be -- any number in the range 0 to \x\-1 random :: Int -> QIO Int random x = randomInt (x-1) -- | This function uses a Quantum computation to simulate the roll of a dice dice :: IO Int dice = do x <- run (randomInt 5) return (x+1) -- | This function simulates the given number of repitions of dice rolls dice_rolls :: Int -> IO [Int] dice_rolls 0 = return [] dice_rolls y = do x <- dice xs <- dice_rolls (y-1) return (x:xs) -- | Returns the number of occurences of 1 through 6 in the given list of Ints occs :: [Int] -> (Int,Int,Int,Int,Int,Int) occs rs = (rs' 1,rs' 2,rs' 3,rs' 4,rs' 5,rs' 6) where rs' x = length ([y|y<-rs,y==x]) -- | Returns the number of occurences of 1 through 6 in the given number of -- rolls of the dice. probs' :: Int -> IO (Int,Int,Int,Int,Int,Int) probs' x = do xs <- dice_rolls x return (occs xs) -- | Returns the percentage of occurences of 1 through 6, after the given number -- of rolls of the dice. probs :: Int -> IO (RR,RR,RR,RR,RR,RR) probs x = do (a,b,c,d,e,f) <- probs' x return (fromIntegral a/x',fromIntegral b/x',fromIntegral c/x',fromIntegral d/x',fromIntegral e/x',fromIntegral f/x') where x' = fromIntegral x
alexandersgreen/qio-haskell
QIO/QIORandom.hs
bsd-2-clause
4,890
2
15
1,143
1,448
764
684
78
4
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# OPTIONS -Wall #-} module Main ( main ) where import Control.Monad (foldM) import Control.Monad.Trans (liftIO) import qualified Data.ByteString.Char8 as C import Data.Default import qualified Data.IntMap as IM import Data.Time.Clock (getCurrentTime) import System.Console.GetOpt import UI.Command import Data.ZoomCache import Data.ZoomCache.Dump import Data.ZoomCache.Multichannel() ------------------------------------------------------------ data Config = Config { noRaw :: Bool , channels :: Int , wmLevel :: Int , track :: TrackNo , intData :: Bool , variable :: Bool , spec :: TrackSpec } instance Default Config where def = defConfig defConfig :: Config defConfig = Config { noRaw = False , channels = 1 , wmLevel = 1024 , track = 1 , intData = False , variable = False , spec = def { specDeltaEncode = False , specZlibCompress = False , specName = "gen" } } data Option = NoRaw | Channels String | Watermark String | Track String | Delta | ZLib | Variable | IntData | Rate String | Label String deriving (Eq) options :: [OptDescr Option] options = genOptions genOptions :: [OptDescr Option] genOptions = [ Option ['z'] ["no-raw"] (NoArg NoRaw) "Do NOT include raw data in the output" , Option ['c'] ["channels"] (ReqArg Channels "channels") "Set number of channels" , Option ['w'] ["watermark"] (ReqArg Watermark "watermark") "Set high-watermark level" , Option ['t'] ["track"] (ReqArg Track "trackNo") "Set or select track number" , Option ['d'] ["delta"] (NoArg Delta) "Delta-encode data" , Option ['Z'] ["zlib"] (NoArg ZLib) "Zlib-compress data" , Option ['b'] ["variable"] (NoArg Variable) "Generate variable-rate data" , Option ['i'] ["integer"] (NoArg IntData) "Generate integer data" , Option ['r'] ["rate"] (ReqArg Rate "data-rate") "Set track rate" , Option ['l'] ["label"] (ReqArg Label "label") "Set track label" ] processArgs :: [String] -> IO (Config, [String]) processArgs args = do case getOpt RequireOrder options args of (opts, args', [] ) -> do config <- processConfig def opts return (config, args') (_, _, _:_) -> return (def, args) processConfig :: Config -> [Option] -> IO Config processConfig = foldM processOneOption where processOneOption config NoRaw = do return $ config {noRaw = True} processOneOption config (Channels s) = do return $ config {channels = read s} processOneOption config (Watermark s) = do return $ config {wmLevel = read s} processOneOption config (Track s) = do return $ config {track = read s} processOneOption config Delta = do return $ config { spec = (spec config){specDeltaEncode = True} } processOneOption config ZLib = do return $ config { spec = (spec config){specZlibCompress = True} } processOneOption config Variable = do return $ config { variable = True , spec = (spec config){specSRType = VariableSR} } processOneOption config IntData = do return $ config { intData = True , spec = setCodec (undefined :: Int) (spec config) } processOneOption config (Rate s) = do return $ config { spec = (spec config){specRate = fromInteger $ read s} } processOneOption config (Label s) = do return $ config { spec = (spec config){specName = C.pack s} } ------------------------------------------------------------ zoomGen :: Command () zoomGen = defCmd { cmdName = "gen" , cmdHandler = zoomGenHandler , cmdCategory = "Writing" , cmdShortDesc = "Generate zoom-cache data" , cmdExamples = [("Generate a file called foo.zoom", "foo.zoom")] } zoomGenHandler :: App () () zoomGenHandler = do (config, filenames) <- liftIO . processArgs =<< appArgs liftIO $ mapM_ (zoomWriteFile config) filenames zoomWriteFile :: Config -> FilePath -> IO () zoomWriteFile Config{..} path | intData = w ints | otherwise = w doubles where w :: (ZoomReadable a, ZoomWrite a, ZoomWritable a, ZoomWrite (SampleOffset, a)) => [a] -> IO () w d | variable && channels == 1 = writeData (sW >> mapM_ (write track) (zip (map SO [1,3..]) d)) | channels == 1 = writeData (sW >> mapM_ (write track) d) | variable = writeData (sW >> mapM_ (write track) (zip (map SO [1,3..]) (map (replicate channels) d))) | otherwise = writeData (sW >> mapM_ (write track) (map (replicate channels) d)) writeData ds = do now <- getCurrentTime withFileWrite trackMap (Just now) (not noRaw) ds path sW = setWatermark track wmLevel trackMap = IM.singleton track spec' spec' | channels == 1 && intData = setCodec (undefined :: Int) spec | channels == 1 = setCodec (undefined :: Double) spec | intData = setCodecMultichannel channels (undefined :: Int) spec | otherwise = setCodecMultichannel channels (undefined :: Double) spec ------------------------------------------------------------ doubles :: [Double] doubles = take 10000000 $ map ((* 1000.0) . sin) [0.0, 0.01 ..] ints :: [Int] ints = map round doubles ------------------------------------------------------------ zoomInfo :: Command () zoomInfo = defCmd { cmdName = "info" , cmdHandler = zoomInfoHandler , cmdCategory = "Reading" , cmdShortDesc = "Display basic info about a zoom-cache file" , cmdExamples = [("Display info about foo.zoom", "foo.zoom")] } zoomInfoHandler :: App () () zoomInfoHandler = mapM_ (liftIO . zoomInfoFile standardIdentifiers) =<< appArgs ------------------------------------------------------------ zoomDump :: Command () zoomDump = defCmd { cmdName = "dump" , cmdHandler = zoomDumpHandler , cmdCategory = "Reading" , cmdShortDesc = "Read zoom-cache data" , cmdExamples = [("Yo", "")] } zoomDumpHandler :: App () () zoomDumpHandler = do (config, filenames) <- liftIO . processArgs =<< appArgs mapM_ (liftIO . zoomDumpFile standardIdentifiers (track config)) filenames ------------------------------------------------------------ zoomSummary :: Command () zoomSummary = defCmd { cmdName = "summary" , cmdHandler = zoomSummaryHandler , cmdCategory = "Reading" , cmdShortDesc = "Read zoom-cache summary data" , cmdExamples = [("Read summary level 3 from foo.zoom", "3 foo.zoom")] } zoomSummaryHandler :: App () () zoomSummaryHandler = do (config, filenames) <- liftIO . processArgs =<< appArgs liftIO . (f (track config)) $ filenames where f trackNo (lvl:paths) = mapM_ (zoomDumpSummaryLevel (read lvl) standardIdentifiers trackNo) paths f _ _ = putStrLn "Usage: zoom-cache summary n file.zoom" ------------------------------------------------------------ -- The Application -- zoom :: Application () () zoom = def { appName = "zoom" , appVersion = "0.1" , appAuthors = ["Conrad Parker"] , appBugEmail = "[email protected]" , appShortDesc = "Trivial zoom-cache inspection tools" , appLongDesc = longDesc , appCategories = ["Reading", "Writing"] , appSeeAlso = [""] , appProject = "Zoom" , appCmds = [ zoomGen , zoomInfo , zoomDump , zoomSummary ] } longDesc :: String longDesc = "Manipulate zoom-cache files" ------------------------------------------------------------ -- Main -- main :: IO () main = appMain zoom
kfish/zoom-cache
tools/zoom-cache.hs
bsd-2-clause
8,530
2
17
2,655
2,298
1,266
1,032
198
10
{-# LANGUAGE FlexibleContexts #-} module CubicleMaze (solve) where import Data.Bits (popCount) import Data.Maybe (fromJust) import Data.List (foldl') import qualified Data.OrdPSQ as PSQ import qualified Data.Set as Set import Text.Parsec.Prim (Stream, ParsecT, parse) import Text.Parsec.Char (digit, endOfLine) import Text.Parsec.Combinator (many1, eof) type Maze = (Int, Int) -> Bool designersNumber :: Stream s m Char => ParsecT s u m Int designersNumber = read <$> (many1 digit <* endOfLine <* eof) maze :: Int -> Maze maze c (x, y) = even . popCount $ x*x + 3*x + 2*x*y + y + y*y + c neighbors :: (Int, Int) -> [(Int, Int)] neighbors (x, y) = filter (\(a, b) -> a >= 0 && b >= 0) [(x + 1, y), (x - 1, y), (x, y + 1), (x, y - 1)] openSpaceNeighbors :: Maze -> (Int, Int) -> [(Int, Int)] openSpaceNeighbors m xy = filter m $ neighbors xy ucs :: Maze -> (Int, Int) -> PSQ.OrdPSQ (Int, Int) Int () -> Set.Set (Int, Int) -> Int ucs m dst pq visited | minK == dst = minP | otherwise = ucs m dst newQ (Set.insert minK visited) where (minK, minP, _) = fromJust . PSQ.findMin $ pq newQ = foldl' (\q n -> PSQ.insert n (minP + 1) () q) (PSQ.deleteMin pq) $ openSpaceNeighbors m minK shortestPathLength :: Maze -> (Int, Int) -> (Int, Int) -> Int shortestPathLength m src dst = ucs m dst (PSQ.singleton src 0 ()) Set.empty possibleDestinations :: Maze -> (Int, Int) -> Int -> Set.Set (Int, Int) possibleDestinations m src n = iterate stepOnce (Set.singleton src) !! n where stepOnce xs = Set.foldl' (\acc x -> Set.union acc (Set.fromList . openSpaceNeighbors m $ x)) xs xs solve :: String -> IO () solve input = do let parsed = parse designersNumber "" input case parsed of Left err -> print err Right favoriteNumber -> do let cubicleMaze = maze favoriteNumber let pathLength = shortestPathLength cubicleMaze (1, 1) (31, 39) print pathLength let uniqDests = possibleDestinations cubicleMaze (1, 1) 50 print . Set.size $ uniqDests
cjlarose/advent-2016
src/CubicleMaze.hs
bsd-3-clause
2,012
0
16
432
926
497
429
40
2
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE OverloadedStrings #-} module Hassistant.Header where import qualified GHC import qualified Exception import qualified DynFlags import qualified Util import qualified Outputable import qualified GHC.Paths import Control.Applicative import qualified Data.Text as T import qualified Data.Attoparsec.Text as A import Data.Maybe(catMaybes) import Data.List(sort,nub) import Data.Either (rights) import Data.Int(Int32) import Hassistant.Common import Hassistant.Parser imports :: T.Text -> [String] imports = go [] . dropWhile (not . importLine). rights . map (A.parseOnly dropCommentP) . T.lines where importLine ('i':'m':'p':'o':'r':'t':o) = null o || head o == ' ' importLine _ = False go a [] = [unlines $ reverse a] go a (l:ls) | null l = go a ls | ' ' == head l = go (l:a) ls | importLine l = (unlines $ reverse a) : go [l] ls | otherwise = [unlines $ reverse a] parseImports :: String -> GHC.Ghc (Maybe (GHC.ImportDecl GHC.RdrName)) parseImports i = (Just <$> GHC.parseImportDecl i) `Exception.gcatch` handler where handler (_::Exception.SomeException) = return Nothing calcHash :: T.Text -> IO Int32 calcHash cont = GHC.runGhc (Just GHC.Paths.libdir) $ do dyn <- GHC.getSessionDynFlags imps <- sort . map (Outputable.showPpr dyn) . catMaybes <$> mapM parseImports (imports cont) let langs = map T.unpack . nub . sort $ languages cont return . Util.hashString $ unlines (langs ++ imps) listLANGAUGE :: [Candidate] listLANGAUGE = let obj s = (candidate $ T.pack s) { menu = Just "LANGAUGE" } in concatMap (\(s,_,_) -> [obj s, obj $ "No" ++ s]) DynFlags.xFlags languages :: T.Text -> [T.Text] languages = concat . rights . map (A.parseOnly languageP) . T.lines
philopon/hassistant.vim
src/Hassistant/Header.hs
bsd-3-clause
1,841
0
15
390
724
378
346
43
3
-- Copyright (c) 2014-present, Facebook, Inc. -- All rights reserved. -- -- This source code is distributed under the terms of a BSD license, -- found in the LICENSE file. {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE TypeFamilies #-} -- | -- A generic Haxl datasource for performing arbitrary IO concurrently. -- Every IO operation will be performed in a separate thread. -- You can use this with any kind of IO, but each different operation -- requires an instance of the 'ConcurrentIO' class. -- -- For example, to make a concurrent sleep operation: -- -- > sleep :: Int -> GenHaxl u Int -- > sleep n = dataFetch (Sleep n) -- > -- > data Sleep -- > instance ConcurrentIO Sleep where -- > data ConcurrentIOReq Sleep a where -- > Sleep :: Int -> ConcurrentIOReq Sleep Int -- > -- > performIO (Sleep n) = threadDelay (n*1000) >> return n -- > -- > deriving instance Eq (ConcurrentIOReq Sleep a) -- > deriving instance Show (ConcurrentIOReq Sleep a) -- > -- > instance ShowP (ConcurrentIOReq Sleep) where showp = show -- > -- > instance Hashable (ConcurrentIOReq Sleep a) where -- > hashWithSalt s (Sleep n) = hashWithSalt s n -- -- Note that you can have any number of constructors in your -- ConcurrentIOReq GADT, so most of the boilerplate only needs to be -- written once. module Haxl.DataSource.ConcurrentIO ( mkConcurrentIOState , ConcurrentIO(..) ) where import Control.Concurrent import Control.Exception as Exception import Control.Monad import qualified Data.Text as Text import Data.Typeable import Haxl.Core class ConcurrentIO tag where data ConcurrentIOReq tag a performIO :: ConcurrentIOReq tag a -> IO a deriving instance Typeable ConcurrentIOReq -- not needed by GHC 7.10 and later instance (Typeable tag) => StateKey (ConcurrentIOReq tag) where data State (ConcurrentIOReq tag) = ConcurrentIOState getStateType _ = typeRep (Proxy :: Proxy ConcurrentIOReq) mkConcurrentIOState :: IO (State (ConcurrentIOReq ())) mkConcurrentIOState = return ConcurrentIOState instance Typeable tag => DataSourceName (ConcurrentIOReq tag) where dataSourceName _ = Text.pack (show (typeRepTyCon (typeRep (Proxy :: Proxy tag)))) instance (Typeable tag, ShowP (ConcurrentIOReq tag), ConcurrentIO tag) => DataSource u (ConcurrentIOReq tag) where fetch _state _flags _u = BackgroundFetch $ \bfs -> do forM_ bfs $ \(BlockedFetch req rv) -> mask $ \unmask -> forkFinally (unmask (performIO req)) (putResultFromChildThread rv)
simonmar/Haxl
Haxl/DataSource/ConcurrentIO.hs
bsd-3-clause
2,689
0
18
465
406
234
172
36
1
-- | contains a prettyprinter for the -- Template Haskell datatypes module Language.Haskell.TH.Ppr where -- All of the exports from this module should -- be "public" functions. The main module TH -- re-exports them all. import Text.PrettyPrint (render) import Language.Haskell.TH.PprLib import Language.Haskell.TH.Syntax import Data.Word ( Word8 ) import Data.Char ( toLower, chr) import GHC.Show ( showMultiLineString ) import GHC.Lexeme( startsVarSym ) import Data.Ratio ( numerator, denominator ) import Prelude hiding ((<>)) nestDepth :: Int nestDepth = 4 type Precedence = Int appPrec, unopPrec, opPrec, noPrec :: Precedence appPrec = 3 -- Argument of a function application opPrec = 2 -- Argument of an infix operator unopPrec = 1 -- Argument of an unresolved infix operator noPrec = 0 -- Others parensIf :: Bool -> Doc -> Doc parensIf True d = parens d parensIf False d = d ------------------------------ pprint :: Ppr a => a -> String pprint x = render $ to_HPJ_Doc $ ppr x class Ppr a where ppr :: a -> Doc ppr_list :: [a] -> Doc ppr_list = vcat . map ppr instance Ppr a => Ppr [a] where ppr x = ppr_list x ------------------------------ instance Ppr Name where ppr v = pprName v ------------------------------ instance Ppr Info where ppr (TyConI d) = ppr d ppr (ClassI d is) = ppr d $$ vcat (map ppr is) ppr (FamilyI d is) = ppr d $$ vcat (map ppr is) ppr (PrimTyConI name arity is_unlifted) = text "Primitive" <+> (if is_unlifted then text "unlifted" else empty) <+> text "type constructor" <+> quotes (ppr name) <+> parens (text "arity" <+> int arity) ppr (ClassOpI v ty cls) = text "Class op from" <+> ppr cls <> colon <+> ppr_sig v ty ppr (DataConI v ty tc) = text "Constructor from" <+> ppr tc <> colon <+> ppr_sig v ty ppr (PatSynI nm ty) = pprPatSynSig nm ty ppr (TyVarI v ty) = text "Type variable" <+> ppr v <+> equals <+> ppr ty ppr (VarI v ty mb_d) = vcat [ppr_sig v ty, case mb_d of { Nothing -> empty; Just d -> ppr d }] ppr_sig :: Name -> Type -> Doc ppr_sig v ty = pprName' Applied v <+> dcolon <+> ppr ty pprFixity :: Name -> Fixity -> Doc pprFixity _ f | f == defaultFixity = empty pprFixity v (Fixity i d) = ppr_fix d <+> int i <+> ppr v where ppr_fix InfixR = text "infixr" ppr_fix InfixL = text "infixl" ppr_fix InfixN = text "infix" -- | Pretty prints a pattern synonym type signature pprPatSynSig :: Name -> PatSynType -> Doc pprPatSynSig nm ty = text "pattern" <+> pprPrefixOcc nm <+> dcolon <+> pprPatSynType ty -- | Pretty prints a pattern synonym's type; follows the usual -- conventions to print a pattern synonym type compactly, yet -- unambiguously. See the note on 'PatSynType' and the section on -- pattern synonyms in the GHC user's guide for more information. pprPatSynType :: PatSynType -> Doc pprPatSynType ty@(ForallT uniTys reqs ty'@(ForallT exTys provs ty'')) | null exTys, null provs = ppr (ForallT uniTys reqs ty'') | null uniTys, null reqs = noreqs <+> ppr ty' | null reqs = forall uniTys <+> noreqs <+> ppr ty' | otherwise = ppr ty where noreqs = text "() =>" forall tvs = text "forall" <+> (hsep (map ppr tvs)) <+> text "." pprPatSynType ty = ppr ty ------------------------------ instance Ppr Module where ppr (Module pkg m) = text (pkgString pkg) <+> text (modString m) instance Ppr ModuleInfo where ppr (ModuleInfo imps) = text "Module" <+> vcat (map ppr imps) ------------------------------ instance Ppr Exp where ppr = pprExp noPrec pprPrefixOcc :: Name -> Doc -- Print operators with parens around them pprPrefixOcc n = parensIf (isSymOcc n) (ppr n) isSymOcc :: Name -> Bool isSymOcc n = case nameBase n of [] -> True -- Empty name; weird (c:_) -> startsVarSym c -- c.f. OccName.startsVarSym in GHC itself pprInfixExp :: Exp -> Doc pprInfixExp (VarE v) = pprName' Infix v pprInfixExp (ConE v) = pprName' Infix v pprInfixExp _ = text "<<Non-variable/constructor in infix context>>" pprExp :: Precedence -> Exp -> Doc pprExp _ (VarE v) = pprName' Applied v pprExp _ (ConE c) = pprName' Applied c pprExp i (LitE l) = pprLit i l pprExp i (AppE e1 e2) = parensIf (i >= appPrec) $ pprExp opPrec e1 <+> pprExp appPrec e2 pprExp i (AppTypeE e t) = parensIf (i >= appPrec) $ pprExp opPrec e <+> char '@' <> pprParendType t pprExp _ (ParensE e) = parens (pprExp noPrec e) pprExp i (UInfixE e1 op e2) = parensIf (i > unopPrec) $ pprExp unopPrec e1 <+> pprInfixExp op <+> pprExp unopPrec e2 pprExp i (InfixE (Just e1) op (Just e2)) = parensIf (i >= opPrec) $ pprExp opPrec e1 <+> pprInfixExp op <+> pprExp opPrec e2 pprExp _ (InfixE me1 op me2) = parens $ pprMaybeExp noPrec me1 <+> pprInfixExp op <+> pprMaybeExp noPrec me2 pprExp i (LamE [] e) = pprExp i e -- #13856 pprExp i (LamE ps e) = parensIf (i > noPrec) $ char '\\' <> hsep (map (pprPat appPrec) ps) <+> text "->" <+> ppr e pprExp i (LamCaseE ms) = parensIf (i > noPrec) $ text "\\case" $$ nest nestDepth (ppr ms) pprExp _ (TupE es) = parens (commaSep es) pprExp _ (UnboxedTupE es) = hashParens (commaSep es) pprExp _ (UnboxedSumE e alt arity) = unboxedSumBars (ppr e) alt arity -- Nesting in Cond is to avoid potential problems in do statements pprExp i (CondE guard true false) = parensIf (i > noPrec) $ sep [text "if" <+> ppr guard, nest 1 $ text "then" <+> ppr true, nest 1 $ text "else" <+> ppr false] pprExp i (MultiIfE alts) = parensIf (i > noPrec) $ vcat $ case alts of [] -> [text "if {}"] (alt : alts') -> text "if" <+> pprGuarded arrow alt : map (nest 3 . pprGuarded arrow) alts' pprExp i (LetE ds_ e) = parensIf (i > noPrec) $ text "let" <+> pprDecs ds_ $$ text " in" <+> ppr e where pprDecs [] = empty pprDecs [d] = ppr d pprDecs ds = braces (semiSep ds) pprExp i (CaseE e ms) = parensIf (i > noPrec) $ text "case" <+> ppr e <+> text "of" $$ nest nestDepth (ppr ms) pprExp i (DoE ss_) = parensIf (i > noPrec) $ text "do" <+> pprStms ss_ where pprStms [] = empty pprStms [s] = ppr s pprStms ss = braces (semiSep ss) pprExp _ (CompE []) = text "<<Empty CompExp>>" -- This will probably break with fixity declarations - would need a ';' pprExp _ (CompE ss) = if null ss' -- If there are no statements in a list comprehension besides the last -- one, we simply treat it like a normal list. then text "[" <> ppr s <> text "]" else text "[" <> ppr s <+> bar <+> commaSep ss' <> text "]" where s = last ss ss' = init ss pprExp _ (ArithSeqE d) = ppr d pprExp _ (ListE es) = brackets (commaSep es) pprExp i (SigE e t) = parensIf (i > noPrec) $ ppr e <+> dcolon <+> ppr t pprExp _ (RecConE nm fs) = ppr nm <> braces (pprFields fs) pprExp _ (RecUpdE e fs) = pprExp appPrec e <> braces (pprFields fs) pprExp i (StaticE e) = parensIf (i >= appPrec) $ text "static"<+> pprExp appPrec e pprExp _ (UnboundVarE v) = pprName' Applied v pprExp _ (LabelE s) = text "#" <> text s pprFields :: [(Name,Exp)] -> Doc pprFields = sep . punctuate comma . map (\(s,e) -> ppr s <+> equals <+> ppr e) pprMaybeExp :: Precedence -> Maybe Exp -> Doc pprMaybeExp _ Nothing = empty pprMaybeExp i (Just e) = pprExp i e ------------------------------ instance Ppr Stmt where ppr (BindS p e) = ppr p <+> text "<-" <+> ppr e ppr (LetS ds) = text "let" <+> (braces (semiSep ds)) ppr (NoBindS e) = ppr e ppr (ParS sss) = sep $ punctuate bar $ map commaSep sss ------------------------------ instance Ppr Match where ppr (Match p rhs ds) = ppr p <+> pprBody False rhs $$ where_clause ds ------------------------------ pprGuarded :: Doc -> (Guard, Exp) -> Doc pprGuarded eqDoc (guard, expr) = case guard of NormalG guardExpr -> bar <+> ppr guardExpr <+> eqDoc <+> ppr expr PatG stmts -> bar <+> vcat (punctuate comma $ map ppr stmts) $$ nest nestDepth (eqDoc <+> ppr expr) ------------------------------ pprBody :: Bool -> Body -> Doc pprBody eq body = case body of GuardedB xs -> nest nestDepth $ vcat $ map (pprGuarded eqDoc) xs NormalB e -> eqDoc <+> ppr e where eqDoc | eq = equals | otherwise = arrow ------------------------------ instance Ppr Lit where ppr = pprLit noPrec pprLit :: Precedence -> Lit -> Doc pprLit i (IntPrimL x) = parensIf (i > noPrec && x < 0) (integer x <> char '#') pprLit _ (WordPrimL x) = integer x <> text "##" pprLit i (FloatPrimL x) = parensIf (i > noPrec && x < 0) (float (fromRational x) <> char '#') pprLit i (DoublePrimL x) = parensIf (i > noPrec && x < 0) (double (fromRational x) <> text "##") pprLit i (IntegerL x) = parensIf (i > noPrec && x < 0) (integer x) pprLit _ (CharL c) = text (show c) pprLit _ (CharPrimL c) = text (show c) <> char '#' pprLit _ (StringL s) = pprString s pprLit _ (StringPrimL s) = pprString (bytesToString s) <> char '#' pprLit i (RationalL rat) = parensIf (i > noPrec) $ integer (numerator rat) <+> char '/' <+> integer (denominator rat) bytesToString :: [Word8] -> String bytesToString = map (chr . fromIntegral) pprString :: String -> Doc -- Print newlines as newlines with Haskell string escape notation, -- not as '\n'. For other non-printables use regular escape notation. pprString s = vcat (map text (showMultiLineString s)) ------------------------------ instance Ppr Pat where ppr = pprPat noPrec pprPat :: Precedence -> Pat -> Doc pprPat i (LitP l) = pprLit i l pprPat _ (VarP v) = pprName' Applied v pprPat _ (TupP ps) = parens (commaSep ps) pprPat _ (UnboxedTupP ps) = hashParens (commaSep ps) pprPat _ (UnboxedSumP p alt arity) = unboxedSumBars (ppr p) alt arity pprPat i (ConP s ps) = parensIf (i >= appPrec) $ pprName' Applied s <+> sep (map (pprPat appPrec) ps) pprPat _ (ParensP p) = parens $ pprPat noPrec p pprPat i (UInfixP p1 n p2) = parensIf (i > unopPrec) (pprPat unopPrec p1 <+> pprName' Infix n <+> pprPat unopPrec p2) pprPat i (InfixP p1 n p2) = parensIf (i >= opPrec) (pprPat opPrec p1 <+> pprName' Infix n <+> pprPat opPrec p2) pprPat i (TildeP p) = parensIf (i > noPrec) $ char '~' <> pprPat appPrec p pprPat i (BangP p) = parensIf (i > noPrec) $ char '!' <> pprPat appPrec p pprPat i (AsP v p) = parensIf (i > noPrec) $ ppr v <> text "@" <> pprPat appPrec p pprPat _ WildP = text "_" pprPat _ (RecP nm fs) = parens $ ppr nm <+> braces (sep $ punctuate comma $ map (\(s,p) -> ppr s <+> equals <+> ppr p) fs) pprPat _ (ListP ps) = brackets (commaSep ps) pprPat i (SigP p t) = parensIf (i > noPrec) $ ppr p <+> dcolon <+> ppr t pprPat _ (ViewP e p) = parens $ pprExp noPrec e <+> text "->" <+> pprPat noPrec p ------------------------------ instance Ppr Dec where ppr = ppr_dec True ppr_dec :: Bool -- declaration on the toplevel? -> Dec -> Doc ppr_dec _ (FunD f cs) = vcat $ map (\c -> pprPrefixOcc f <+> ppr c) cs ppr_dec _ (ValD p r ds) = ppr p <+> pprBody True r $$ where_clause ds ppr_dec _ (TySynD t xs rhs) = ppr_tySyn empty t (hsep (map ppr xs)) rhs ppr_dec _ (DataD ctxt t xs ksig cs decs) = ppr_data empty ctxt t (hsep (map ppr xs)) ksig cs decs ppr_dec _ (NewtypeD ctxt t xs ksig c decs) = ppr_newtype empty ctxt t (sep (map ppr xs)) ksig c decs ppr_dec _ (ClassD ctxt c xs fds ds) = text "class" <+> pprCxt ctxt <+> ppr c <+> hsep (map ppr xs) <+> ppr fds $$ where_clause ds ppr_dec _ (InstanceD o ctxt i ds) = text "instance" <+> maybe empty ppr_overlap o <+> pprCxt ctxt <+> ppr i $$ where_clause ds ppr_dec _ (SigD f t) = pprPrefixOcc f <+> dcolon <+> ppr t ppr_dec _ (ForeignD f) = ppr f ppr_dec _ (InfixD fx n) = pprFixity n fx ppr_dec _ (PragmaD p) = ppr p ppr_dec isTop (DataFamilyD tc tvs kind) = text "data" <+> maybeFamily <+> ppr tc <+> hsep (map ppr tvs) <+> maybeKind where maybeFamily | isTop = text "family" | otherwise = empty maybeKind | (Just k') <- kind = dcolon <+> ppr k' | otherwise = empty ppr_dec isTop (DataInstD ctxt tc tys ksig cs decs) = ppr_data maybeInst ctxt tc (sep (map pprParendType tys)) ksig cs decs where maybeInst | isTop = text "instance" | otherwise = empty ppr_dec isTop (NewtypeInstD ctxt tc tys ksig c decs) = ppr_newtype maybeInst ctxt tc (sep (map pprParendType tys)) ksig c decs where maybeInst | isTop = text "instance" | otherwise = empty ppr_dec isTop (TySynInstD tc (TySynEqn tys rhs)) = ppr_tySyn maybeInst tc (sep (map pprParendType tys)) rhs where maybeInst | isTop = text "instance" | otherwise = empty ppr_dec isTop (OpenTypeFamilyD tfhead) = text "type" <+> maybeFamily <+> ppr_tf_head tfhead where maybeFamily | isTop = text "family" | otherwise = empty ppr_dec _ (ClosedTypeFamilyD tfhead@(TypeFamilyHead tc _ _ _) eqns) = hang (text "type family" <+> ppr_tf_head tfhead <+> text "where") nestDepth (vcat (map ppr_eqn eqns)) where ppr_eqn (TySynEqn lhs rhs) = ppr tc <+> sep (map pprParendType lhs) <+> text "=" <+> ppr rhs ppr_dec _ (RoleAnnotD name roles) = hsep [ text "type role", ppr name ] <+> hsep (map ppr roles) ppr_dec _ (StandaloneDerivD ds cxt ty) = hsep [ text "deriving" , maybe empty ppr_deriv_strategy ds , text "instance" , pprCxt cxt , ppr ty ] ppr_dec _ (DefaultSigD n ty) = hsep [ text "default", pprPrefixOcc n, dcolon, ppr ty ] ppr_dec _ (PatSynD name args dir pat) = text "pattern" <+> pprNameArgs <+> ppr dir <+> pprPatRHS where pprNameArgs | InfixPatSyn a1 a2 <- args = ppr a1 <+> ppr name <+> ppr a2 | otherwise = ppr name <+> ppr args pprPatRHS | ExplBidir cls <- dir = hang (ppr pat <+> text "where") nestDepth (ppr name <+> ppr cls) | otherwise = ppr pat ppr_dec _ (PatSynSigD name ty) = pprPatSynSig name ty ppr_deriv_strategy :: DerivStrategy -> Doc ppr_deriv_strategy ds = text $ case ds of StockStrategy -> "stock" AnyclassStrategy -> "anyclass" NewtypeStrategy -> "newtype" ppr_overlap :: Overlap -> Doc ppr_overlap o = text $ case o of Overlaps -> "{-# OVERLAPS #-}" Overlappable -> "{-# OVERLAPPABLE #-}" Overlapping -> "{-# OVERLAPPING #-}" Incoherent -> "{-# INCOHERENT #-}" ppr_data :: Doc -> Cxt -> Name -> Doc -> Maybe Kind -> [Con] -> [DerivClause] -> Doc ppr_data maybeInst ctxt t argsDoc ksig cs decs = sep [text "data" <+> maybeInst <+> pprCxt ctxt <+> pprName' Applied t <+> argsDoc <+> ksigDoc <+> maybeWhere, nest nestDepth (sep (pref $ map ppr cs)), if null decs then empty else nest nestDepth $ vcat $ map ppr_deriv_clause decs] where pref :: [Doc] -> [Doc] pref xs | isGadtDecl = xs pref [] = [] -- No constructors; can't happen in H98 pref (d:ds) = (char '=' <+> d):map (bar <+>) ds maybeWhere :: Doc maybeWhere | isGadtDecl = text "where" | otherwise = empty isGadtDecl :: Bool isGadtDecl = not (null cs) && all isGadtCon cs where isGadtCon (GadtC _ _ _ ) = True isGadtCon (RecGadtC _ _ _) = True isGadtCon (ForallC _ _ x ) = isGadtCon x isGadtCon _ = False ksigDoc = case ksig of Nothing -> empty Just k -> dcolon <+> ppr k ppr_newtype :: Doc -> Cxt -> Name -> Doc -> Maybe Kind -> Con -> [DerivClause] -> Doc ppr_newtype maybeInst ctxt t argsDoc ksig c decs = sep [text "newtype" <+> maybeInst <+> pprCxt ctxt <+> ppr t <+> argsDoc <+> ksigDoc, nest 2 (char '=' <+> ppr c), if null decs then empty else nest nestDepth $ vcat $ map ppr_deriv_clause decs] where ksigDoc = case ksig of Nothing -> empty Just k -> dcolon <+> ppr k ppr_deriv_clause :: DerivClause -> Doc ppr_deriv_clause (DerivClause ds ctxt) = text "deriving" <+> maybe empty ppr_deriv_strategy ds <+> ppr_cxt_preds ctxt ppr_tySyn :: Doc -> Name -> Doc -> Type -> Doc ppr_tySyn maybeInst t argsDoc rhs = text "type" <+> maybeInst <+> ppr t <+> argsDoc <+> text "=" <+> ppr rhs ppr_tf_head :: TypeFamilyHead -> Doc ppr_tf_head (TypeFamilyHead tc tvs res inj) = ppr tc <+> hsep (map ppr tvs) <+> ppr res <+> maybeInj where maybeInj | (Just inj') <- inj = ppr inj' | otherwise = empty ------------------------------ instance Ppr FunDep where ppr (FunDep xs ys) = hsep (map ppr xs) <+> text "->" <+> hsep (map ppr ys) ppr_list [] = empty ppr_list xs = bar <+> commaSep xs ------------------------------ instance Ppr FamFlavour where ppr DataFam = text "data" ppr TypeFam = text "type" ------------------------------ instance Ppr FamilyResultSig where ppr NoSig = empty ppr (KindSig k) = dcolon <+> ppr k ppr (TyVarSig bndr) = text "=" <+> ppr bndr ------------------------------ instance Ppr InjectivityAnn where ppr (InjectivityAnn lhs rhs) = bar <+> ppr lhs <+> text "->" <+> hsep (map ppr rhs) ------------------------------ instance Ppr Foreign where ppr (ImportF callconv safety impent as typ) = text "foreign import" <+> showtextl callconv <+> showtextl safety <+> text (show impent) <+> ppr as <+> dcolon <+> ppr typ ppr (ExportF callconv expent as typ) = text "foreign export" <+> showtextl callconv <+> text (show expent) <+> ppr as <+> dcolon <+> ppr typ ------------------------------ instance Ppr Pragma where ppr (InlineP n inline rm phases) = text "{-#" <+> ppr inline <+> ppr rm <+> ppr phases <+> ppr n <+> text "#-}" ppr (SpecialiseP n ty inline phases) = text "{-# SPECIALISE" <+> maybe empty ppr inline <+> ppr phases <+> sep [ ppr n <+> dcolon , nest 2 $ ppr ty ] <+> text "#-}" ppr (SpecialiseInstP inst) = text "{-# SPECIALISE instance" <+> ppr inst <+> text "#-}" ppr (RuleP n bndrs lhs rhs phases) = sep [ text "{-# RULES" <+> pprString n <+> ppr phases , nest 4 $ ppr_forall <+> ppr lhs , nest 4 $ char '=' <+> ppr rhs <+> text "#-}" ] where ppr_forall | null bndrs = empty | otherwise = text "forall" <+> fsep (map ppr bndrs) <+> char '.' ppr (AnnP tgt expr) = text "{-# ANN" <+> target1 tgt <+> ppr expr <+> text "#-}" where target1 ModuleAnnotation = text "module" target1 (TypeAnnotation t) = text "type" <+> ppr t target1 (ValueAnnotation v) = ppr v ppr (LineP line file) = text "{-# LINE" <+> int line <+> text (show file) <+> text "#-}" ppr (CompleteP cls mty) = text "{-# COMPLETE" <+> (fsep $ punctuate comma $ map ppr cls) <+> maybe empty (\ty -> dcolon <+> ppr ty) mty ------------------------------ instance Ppr Inline where ppr NoInline = text "NOINLINE" ppr Inline = text "INLINE" ppr Inlinable = text "INLINABLE" ------------------------------ instance Ppr RuleMatch where ppr ConLike = text "CONLIKE" ppr FunLike = empty ------------------------------ instance Ppr Phases where ppr AllPhases = empty ppr (FromPhase i) = brackets $ int i ppr (BeforePhase i) = brackets $ char '~' <> int i ------------------------------ instance Ppr RuleBndr where ppr (RuleVar n) = ppr n ppr (TypedRuleVar n ty) = parens $ ppr n <+> dcolon <+> ppr ty ------------------------------ instance Ppr Clause where ppr (Clause ps rhs ds) = hsep (map (pprPat appPrec) ps) <+> pprBody True rhs $$ where_clause ds ------------------------------ instance Ppr Con where ppr (NormalC c sts) = ppr c <+> sep (map pprBangType sts) ppr (RecC c vsts) = ppr c <+> braces (sep (punctuate comma $ map pprVarBangType vsts)) ppr (InfixC st1 c st2) = pprBangType st1 <+> pprName' Infix c <+> pprBangType st2 ppr (ForallC ns ctxt (GadtC c sts ty)) = commaSepApplied c <+> dcolon <+> pprForall ns ctxt <+> pprGadtRHS sts ty ppr (ForallC ns ctxt (RecGadtC c vsts ty)) = commaSepApplied c <+> dcolon <+> pprForall ns ctxt <+> pprRecFields vsts ty ppr (ForallC ns ctxt con) = pprForall ns ctxt <+> ppr con ppr (GadtC c sts ty) = commaSepApplied c <+> dcolon <+> pprGadtRHS sts ty ppr (RecGadtC c vsts ty) = commaSepApplied c <+> dcolon <+> pprRecFields vsts ty instance Ppr PatSynDir where ppr Unidir = text "<-" ppr ImplBidir = text "=" ppr (ExplBidir _) = text "<-" -- the ExplBidir's clauses are pretty printed together with the -- entire pattern synonym; so only print the direction here. instance Ppr PatSynArgs where ppr (PrefixPatSyn args) = sep $ map ppr args ppr (InfixPatSyn a1 a2) = ppr a1 <+> ppr a2 ppr (RecordPatSyn sels) = braces $ sep (punctuate comma (map ppr sels)) commaSepApplied :: [Name] -> Doc commaSepApplied = commaSepWith (pprName' Applied) pprForall :: [TyVarBndr] -> Cxt -> Doc pprForall tvs cxt -- even in the case without any tvs, there could be a non-empty -- context cxt (e.g., in the case of pattern synonyms, where there -- are multiple forall binders and contexts). | [] <- tvs = pprCxt cxt | otherwise = text "forall" <+> hsep (map ppr tvs) <+> char '.' <+> pprCxt cxt pprRecFields :: [(Name, Strict, Type)] -> Type -> Doc pprRecFields vsts ty = braces (sep (punctuate comma $ map pprVarBangType vsts)) <+> arrow <+> ppr ty pprGadtRHS :: [(Strict, Type)] -> Type -> Doc pprGadtRHS [] ty = ppr ty pprGadtRHS sts ty = sep (punctuate (space <> arrow) (map pprBangType sts)) <+> arrow <+> ppr ty ------------------------------ pprVarBangType :: VarBangType -> Doc -- Slight infelicity: with print non-atomic type with parens pprVarBangType (v, bang, t) = ppr v <+> dcolon <+> pprBangType (bang, t) ------------------------------ pprBangType :: BangType -> Doc -- Make sure we print -- -- Con {-# UNPACK #-} a -- -- rather than -- -- Con {-# UNPACK #-}a -- -- when there's no strictness annotation. If there is a strictness annotation, -- it's okay to not put a space between it and the type. pprBangType (bt@(Bang _ NoSourceStrictness), t) = ppr bt <+> pprParendType t pprBangType (bt, t) = ppr bt <> pprParendType t ------------------------------ instance Ppr Bang where ppr (Bang su ss) = ppr su <+> ppr ss ------------------------------ instance Ppr SourceUnpackedness where ppr NoSourceUnpackedness = empty ppr SourceNoUnpack = text "{-# NOUNPACK #-}" ppr SourceUnpack = text "{-# UNPACK #-}" ------------------------------ instance Ppr SourceStrictness where ppr NoSourceStrictness = empty ppr SourceLazy = char '~' ppr SourceStrict = char '!' ------------------------------ instance Ppr DecidedStrictness where ppr DecidedLazy = empty ppr DecidedStrict = char '!' ppr DecidedUnpack = text "{-# UNPACK #-} !" ------------------------------ {-# DEPRECATED pprVarStrictType "As of @template-haskell-2.11.0.0@, 'VarStrictType' has been replaced by 'VarBangType'. Please use 'pprVarBangType' instead." #-} pprVarStrictType :: (Name, Strict, Type) -> Doc pprVarStrictType = pprVarBangType ------------------------------ {-# DEPRECATED pprStrictType "As of @template-haskell-2.11.0.0@, 'StrictType' has been replaced by 'BangType'. Please use 'pprBangType' instead." #-} pprStrictType :: (Strict, Type) -> Doc pprStrictType = pprBangType ------------------------------ pprParendType :: Type -> Doc pprParendType (VarT v) = pprName' Applied v -- `Applied` is used here instead of `ppr` because of infix names (#13887) pprParendType (ConT c) = pprName' Applied c pprParendType (TupleT 0) = text "()" pprParendType (TupleT n) = parens (hcat (replicate (n-1) comma)) pprParendType (UnboxedTupleT n) = hashParens $ hcat $ replicate (n-1) comma pprParendType (UnboxedSumT arity) = hashParens $ hcat $ replicate (arity-1) bar pprParendType ArrowT = parens (text "->") pprParendType ListT = text "[]" pprParendType (LitT l) = pprTyLit l pprParendType (PromotedT c) = text "'" <> pprName' Applied c pprParendType (PromotedTupleT 0) = text "'()" pprParendType (PromotedTupleT n) = quoteParens (hcat (replicate (n-1) comma)) pprParendType PromotedNilT = text "'[]" pprParendType PromotedConsT = text "'(:)" pprParendType StarT = char '*' pprParendType ConstraintT = text "Constraint" pprParendType (SigT ty k) = parens (ppr ty <+> text "::" <+> ppr k) pprParendType WildCardT = char '_' pprParendType (InfixT x n y) = parens (ppr x <+> pprName' Infix n <+> ppr y) pprParendType t@(UInfixT {}) = parens (pprUInfixT t) pprParendType (ParensT t) = ppr t pprParendType tuple | (TupleT n, args) <- split tuple , length args == n = parens (commaSep args) pprParendType other = parens (ppr other) pprUInfixT :: Type -> Doc pprUInfixT (UInfixT x n y) = pprUInfixT x <+> pprName' Infix n <+> pprUInfixT y pprUInfixT t = ppr t instance Ppr Type where ppr (ForallT tvars ctxt ty) = sep [pprForall tvars ctxt, ppr ty] ppr ty = pprTyApp (split ty) -- Works, in a degnerate way, for SigT, and puts parens round (ty :: kind) -- See Note [Pretty-printing kind signatures] {- Note [Pretty-printing kind signatures] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ GHC's parser only recognises a kind signature in a type when there are parens around it. E.g. the parens are required here: f :: (Int :: *) type instance F Int = (Bool :: *) So we always print a SigT with parens (see Trac #10050). -} pprTyApp :: (Type, [Type]) -> Doc pprTyApp (ArrowT, [arg1,arg2]) = sep [pprFunArgType arg1 <+> text "->", ppr arg2] pprTyApp (EqualityT, [arg1, arg2]) = sep [pprFunArgType arg1 <+> text "~", ppr arg2] pprTyApp (ListT, [arg]) = brackets (ppr arg) pprTyApp (TupleT n, args) | length args == n = parens (commaSep args) pprTyApp (PromotedTupleT n, args) | length args == n = quoteParens (commaSep args) pprTyApp (fun, args) = pprParendType fun <+> sep (map pprParendType args) pprFunArgType :: Type -> Doc -- Should really use a precedence argument -- Everything except forall and (->) binds more tightly than (->) pprFunArgType ty@(ForallT {}) = parens (ppr ty) pprFunArgType ty@((ArrowT `AppT` _) `AppT` _) = parens (ppr ty) pprFunArgType ty@(SigT _ _) = parens (ppr ty) pprFunArgType ty = ppr ty split :: Type -> (Type, [Type]) -- Split into function and args split t = go t [] where go (AppT t1 t2) args = go t1 (t2:args) go ty args = (ty, args) pprTyLit :: TyLit -> Doc pprTyLit (NumTyLit n) = integer n pprTyLit (StrTyLit s) = text (show s) instance Ppr TyLit where ppr = pprTyLit ------------------------------ instance Ppr TyVarBndr where ppr (PlainTV nm) = ppr nm ppr (KindedTV nm k) = parens (ppr nm <+> dcolon <+> ppr k) instance Ppr Role where ppr NominalR = text "nominal" ppr RepresentationalR = text "representational" ppr PhantomR = text "phantom" ppr InferR = text "_" ------------------------------ pprCxt :: Cxt -> Doc pprCxt [] = empty pprCxt ts = ppr_cxt_preds ts <+> text "=>" ppr_cxt_preds :: Cxt -> Doc ppr_cxt_preds [] = empty ppr_cxt_preds [t] = ppr t ppr_cxt_preds ts = parens (commaSep ts) ------------------------------ instance Ppr Range where ppr = brackets . pprRange where pprRange :: Range -> Doc pprRange (FromR e) = ppr e <> text ".." pprRange (FromThenR e1 e2) = ppr e1 <> text "," <> ppr e2 <> text ".." pprRange (FromToR e1 e2) = ppr e1 <> text ".." <> ppr e2 pprRange (FromThenToR e1 e2 e3) = ppr e1 <> text "," <> ppr e2 <> text ".." <> ppr e3 ------------------------------ where_clause :: [Dec] -> Doc where_clause [] = empty where_clause ds = nest nestDepth $ text "where" <+> vcat (map (ppr_dec False) ds) showtextl :: Show a => a -> Doc showtextl = text . map toLower . show hashParens :: Doc -> Doc hashParens d = text "(# " <> d <> text " #)" quoteParens :: Doc -> Doc quoteParens d = text "'(" <> d <> text ")" ----------------------------- instance Ppr Loc where ppr (Loc { loc_module = md , loc_package = pkg , loc_start = (start_ln, start_col) , loc_end = (end_ln, end_col) }) = hcat [ text pkg, colon, text md, colon , parens $ int start_ln <> comma <> int start_col , text "-" , parens $ int end_ln <> comma <> int end_col ] -- Takes a list of printable things and prints them separated by commas followed -- by space. commaSep :: Ppr a => [a] -> Doc commaSep = commaSepWith ppr -- Takes a list of things and prints them with the given pretty-printing -- function, separated by commas followed by space. commaSepWith :: (a -> Doc) -> [a] -> Doc commaSepWith pprFun = sep . punctuate comma . map pprFun -- Takes a list of printable things and prints them separated by semicolons -- followed by space. semiSep :: Ppr a => [a] -> Doc semiSep = sep . punctuate semi . map ppr -- Prints out the series of vertical bars that wraps an expression or pattern -- used in an unboxed sum. unboxedSumBars :: Doc -> SumAlt -> SumArity -> Doc unboxedSumBars d alt arity = hashParens $ bars (alt-1) <> d <> bars (arity - alt) where bars i = hsep (replicate i bar) -- Text containing the vertical bar character. bar :: Doc bar = char '|'
ezyang/ghc
libraries/template-haskell/Language/Haskell/TH/Ppr.hs
bsd-3-clause
31,558
0
14
9,124
10,858
5,312
5,546
629
8
module Main where import System.Environment (getArgs) import Network.Factual.API import Data.Factual.Query.DiffsQuery import Data.Factual.Response main :: IO() main = do args <- getArgs let oauthKey = head args let oauthSecret = last args let options = Options { token = generateToken oauthKey oauthSecret, timeout = Nothing } let query = DiffsQuery { table = Custom "canada-stable", start = 1339123455775, end = 1339124455775 } result <- executeQuery options query putStrLn $ "Status: " ++ status result putStrLn $ "Version: " ++ show (version result) putStrLn $ "Data: " ++ show (response result)
rudyl313/factual-haskell-driver
examples/DiffsExample.hs
bsd-3-clause
619
0
12
109
205
105
100
16
1
import Control.Monad import qualified Data.ByteString as B import qualified Data.ByteString.Lazy as BL import Data.Char import Data.IORef import Data.Vhd import Data.Vhd.Bat import qualified Data.Vhd.Block as Block import Data.Vhd.Checksum import Data.Vhd.Node import Data.Vhd.Types import System.Environment (getArgs) import System.IO import Text.Printf cmdConvert [fileRaw, fileVhd, size] = convert =<< rawSizeBytes where vhdSizeMiB = read size vhdSizeBytes = vhdSizeMiB * 1024 * 1024 rawSizeBytes = fmap fromIntegral $ withFile fileRaw ReadMode hFileSize convert rawSizeBytes | vhdSizeMiB `mod` 2 /= 0 = error "specified VHD size is not a multiple of 2 MiB." | vhdSizeBytes < rawSizeBytes = error "specified VHD size is not large enough to contain raw data." | otherwise = do create fileVhd $ defaultCreateParameters { createVirtualSize = vhdSizeBytes } withVhd fileVhd $ \vhd -> BL.readFile fileRaw >>= writeDataRange vhd 0 cmdConvert _ = error "usage: convert <raw file> <vhd file> <size MiB>" cmdCreate [name, size] = create name $ defaultCreateParameters { createVirtualSize = read size * 1024 * 1024 } cmdCreate _ = error "usage: create <name> <size MiB>" cmdExtract [fileVhd, fileRaw] = withVhd fileVhd $ readData >=> BL.writeFile fileRaw cmdExtract _ = error "usage: extract <vhd file> <raw file>" cmdPropGet [file, key] = withVhdNode file $ \node -> do case map toLower key of "max-table-entries" -> putStrLn $ show $ headerMaxTableEntries $ nodeHeader node "blocksize" -> putStrLn $ show $ headerBlockSize $ nodeHeader node "disk-type" -> putStrLn $ show $ footerDiskType $ nodeFooter node "current-size" -> putStrLn $ show $ footerCurrentSize $ nodeFooter node "uuid" -> putStrLn $ show $ footerUniqueId $ nodeFooter node "parent-uuid" -> putStrLn $ show $ headerParentUniqueId $ nodeHeader node "parent-timestamp" -> putStrLn $ show $ headerParentTimeStamp $ nodeHeader node "parent-filepath" -> putStrLn $ show $ headerParentUnicodeName $ nodeHeader node "timestamp" -> putStrLn $ show $ footerTimeStamp $ nodeFooter node _ -> error "unknown key" cmdPropGet _ = error "usage: prop-get <file> <key>" cmdRead [file] = withVhdNode file $ \node -> do let hdr = nodeHeader node let ftr = nodeFooter node mapM_ (\(f, s) -> putStrLn (f ++ " : " ++ s)) [ ("cookie ", show $ headerCookie hdr) , ("version ", show $ headerVersion hdr) , ("max-table-entries", show $ headerMaxTableEntries hdr) , ("block-size ", showBlockSize $ headerBlockSize hdr) , ("header-checksum ", showChecksum (headerChecksum hdr) (verifyHeaderChecksum hdr)) , ("parent-uuid ", show $ headerParentUniqueId hdr) , ("parent-filepath ", show $ headerParentUnicodeName hdr) , ("parent-timestamp ", show $ headerParentTimeStamp hdr) ] mapM_ (\(f, s) -> putStrLn (f ++ " : " ++ s)) [ ("disk-geometry ", show $ footerDiskGeometry ftr) , ("original-size ", showBlockSize $ footerOriginalSize ftr) , ("current-size ", showBlockSize $ footerOriginalSize ftr) , ("type ", show $ footerDiskType ftr) , ("footer-checksum ", showChecksum (footerChecksum ftr) (verifyFooterChecksum ftr)) , ("uuid ", show $ footerUniqueId ftr) , ("timestamp ", show $ footerTimeStamp ftr) ] allocated <- newIORef 0 batIterate (nodeBat node) (fromIntegral $ headerMaxTableEntries hdr) $ \i n -> do unless (n == 0xffffffff) $ modifyIORef allocated ((+) 1) >> printf "BAT[%.5x] = %08x\n" i n nb <- readIORef allocated putStrLn ("blocks allocated : " ++ show nb ++ "/" ++ show (headerMaxTableEntries hdr)) cmdRead _ = error "usage: read <file>" cmdSnapshot [fileVhdParent, fileVhdChild] = withVhd fileVhdParent $ \vhdParent -> snapshot vhdParent fileVhdChild cmdSnapshot _ = error "usage: snapshot <parent vhd file> <child vhd file>" showBlockSize i | i < 1024 = printf "%d bytes" i | i < (1024^2) = printf "%d KiB" (i `div` 1024) | i < (1024^3) = printf "%d MiB" (i `div` (1024^2)) | otherwise = printf "%d GiB" (i `div` (1024^3)) showChecksum checksum isValid = printf "%08x (%s)" checksum (if isValid then "valid" else "invalid") main = do args <- getArgs case args of "convert" : xs -> cmdConvert xs "create" : xs -> cmdCreate xs "extract" : xs -> cmdExtract xs "prop-get" : xs -> cmdPropGet xs "read" : xs -> cmdRead xs "snapshot" : xs -> cmdSnapshot xs
jonathanknowles/hs-vhd
Vhd.hs
bsd-3-clause
4,579
22
17
1,016
1,439
736
703
92
10
module Environment where import Graphics.Rendering.OpenGL hiding (($=)) import Graphics.UI.GLUT import Control.Applicative import Data.IORef import System.Exit import Graphics.UI.GLUT.Callbacks.Window import Control.Concurrent import Control.Concurrent.MVar import System.Random import System.IO.Unsafe type Cor = (GLfloat, GLfloat, GLfloat) type Vertice = (GLfloat, GLfloat) type Damage = Int type Life = Int type Objeto = (String, Damage, Cor) vertice :: GLfloat -> GLfloat -> Vertice vertice x y = (x, y) cor :: GLfloat -> GLfloat -> GLfloat -> Cor cor r g b = (r, g, b) objetos :: [Objeto] objetos = [ ("Ground", 0, (0.625,0.269, 0.07)), ("ThornTrap", 15, (0.542, 0, 0)), ("ArrowTrap", 25, (0,1,0)), ("FireTrap", 40, (0,0,1)), ("Hole", 60, (1,1,0)) ] getObjetos:: [[Int]] -> [[Objeto]] getObjetos [] = [] getObjetos (linha:ls) = [objetos!!c | c <- linha ] : getObjetos ls
jailson-dias/Arca
src/Environment.hs
bsd-3-clause
907
2
8
156
380
236
144
30
1
{-# LANGUAGE DeriveDataTypeable, FlexibleInstances, StandaloneDeriving, TypeSynonymInstances #-} {-# OPTIONS_GHC -fno-warn-orphans #-} module Atomo.Pretty (Pretty(..), Prettied) where import Data.Char (isUpper) import Data.IORef import Data.Maybe (isNothing) import Data.Ratio import Data.Typeable import System.IO.Unsafe import Text.PrettyPrint hiding (braces) import qualified Data.Vector as V import Atomo.Method import Atomo.Types hiding (keyword) import Atomo.Lexer.Base (isOperator, Token(..), TaggedToken(..)) data Context = CNone | CDefine | CKeyword | CSingle | CArgs | CPattern | CList type Prettied = Doc deriving instance Typeable Prettied class Pretty a where -- | Pretty-print a value into a Doc. Typically this should be parseable -- back into the original value, or just a nice user-friendly output form. pretty :: a -> Prettied prettyFrom :: Context -> a -> Prettied pretty = prettyFrom CNone instance Pretty Value where prettyFrom _ (Block _ ps es) | null ps = braces exprs | otherwise = braces $ sep (map (prettyFrom CArgs) ps) <+> char '|' <+> exprs where exprs = sep . punctuate (text ";") $ map pretty es prettyFrom _ (Boolean b) = text $ show b prettyFrom _ (Character c) = char '$' <> (text . tail . init $ show c) prettyFrom _ (Continuation _) = internal "continuation" empty prettyFrom _ (Double d) = double d prettyFrom _ (Expression e) = char '\'' <> parens (pretty e) prettyFrom _ (Haskell v) = internal "haskell" $ text (show v) prettyFrom _ (Integer i) = integer i prettyFrom _ (List l) = brackets . hsep . punctuate comma $ map (prettyFrom CList) vs where vs = V.toList l prettyFrom _ (Tuple l) = parens . hsep . punctuate comma $ map (prettyFrom CList) vs where vs = V.toList l prettyFrom _ (Message m) = internal "message" $ pretty m prettyFrom _ (Method (Slot p _)) = internal "slot" $ parens (pretty p) prettyFrom _ (Method (Responder p _ _)) = internal "responder" $ parens (pretty p) prettyFrom _ (Method (Macro p _)) = internal "macro" $ parens (pretty p) prettyFrom _ (Particle p) = char '@' <> pretty p prettyFrom _ (Pattern p) = internal "pattern" $ pretty p prettyFrom _ (Process _ tid) = internal "process" $ text (words (show tid) !! 1) prettyFrom CNone (Object { oDelegates = ds, oMethods = ms }) = internal "object" (parens (text "delegates to" <+> pretty ds)) $$ nest 2 (pretty ms) prettyFrom _ (Rational r) = integer (numerator r) <> char '/' <> integer (denominator r) prettyFrom _ (Object {}) = internal "object" empty prettyFrom _ (String s) = text (show s) prettyFrom _ (Regexp _ s o _) = text "r{" <> text (macroEscape s) <> char '}' <> text o instance Pretty Methods where prettyFrom _ ms = vcat [ if not (nullMap ss) then vcat (map (vcat . map prettyMethod) (elemsMap ss)) <> if not (nullMap ks) then char '\n' else empty else empty , if not (nullMap ks) then vcat $ flip map (elemsMap ks) $ \ps -> vcat (map prettyMethod ps) <> char '\n' else empty ] where (ss, ks) = unsafePerformIO (readIORef ms) prettyMethod (Slot { mPattern = p, mValue = v }) = prettyFrom CDefine p <+> text ":=" <++> prettyFrom CDefine v prettyMethod (Responder { mPattern = p, mExpr = e }) = prettyFrom CDefine p <+> text ":=" <++> prettyFrom CDefine e prettyMethod (Macro { mPattern = p, mExpr = e }) = text "macro" <+> parens (pretty p) <++> prettyFrom CDefine e instance Pretty Pattern where prettyFrom _ PAny = text "_" prettyFrom _ (PHeadTail h t) = parens $ pretty h <+> text "." <+> pretty t prettyFrom c (PMessage m) = prettyFrom c m prettyFrom _ (PList ps) = brackets . sep $ punctuate comma (map (prettyFrom CList) ps) prettyFrom _ (PTuple ps) = parens . sep $ punctuate comma (map (prettyFrom CList) ps) prettyFrom _ (PMatch v) = prettyFrom CPattern v prettyFrom _ (PNamed n PAny) = text n prettyFrom _ (PNamed n p) = parens $ text n <> colon <+> pretty p prettyFrom _ (PObject e@(EDispatch { eMessage = msg })) | capitalized msg = pretty e | isParticular msg = pretty block where capitalized (Single { mName = n, mTarget = ETop {} }) = isUpper (head n) capitalized (Single { mTarget = EDispatch { eMessage = t@(Single {}) } }) = capitalized t capitalized _ = False isParticular (Keyword { mNames = ["call-in"], mTargets = [EBlock {}, ETop {}] }) = True isParticular _ = False block = head (mTargets msg) prettyFrom _ (PObject e) = parens $ pretty e prettyFrom _ (PInstance p) = parens $ text "->" <+> pretty p prettyFrom _ (PStrict p) = parens $ text "==" <+> pretty p prettyFrom _ (PVariable p) = parens $ text "..." <+> pretty p prettyFrom _ (PPMKeyword ns ps) | all isAny ps = char '@' <> text (concatMap keyword ns) | isAny (head ps) = char '@' <> parens (headlessKeywords ns (tail ps)) | otherwise = char '@' <> parens (keywords ns ps) where isAny PAny = True isAny _ = False prettyFrom _ (PExpr e) = pretty (EQuote Nothing e) prettyFrom _ PThis = text "<this>" prettyFrom _ PEDispatch = text "Dispatch" prettyFrom _ PEOperator = text "Operator" prettyFrom _ PEPrimitive = text "Primitive" prettyFrom _ PEBlock = text "Block" prettyFrom _ PEList = text "List" prettyFrom _ PETuple = text "Tuple" prettyFrom _ PEMacro = text "Macro" prettyFrom _ PEForMacro = text "ForMacro" prettyFrom _ PEParticle = text "Particle" prettyFrom _ PETop = text "Top" prettyFrom _ PEQuote = text "Quote" prettyFrom _ PEUnquote = text "Unquote" prettyFrom _ PEMacroQuote = text "MacroQuote" prettyFrom _ PEMatch = text "Match" instance Pretty Expr where prettyFrom _ (EDefine _ p v) = prettyFrom CDefine p <+> text ":=" <++> prettyFrom CDefine v prettyFrom _ (ESet _ p v) = prettyFrom CDefine p <+> text "=" <++> prettyFrom CDefine v prettyFrom CKeyword (EDispatch _ m@(Keyword {})) = parens $ pretty m prettyFrom CSingle (EDispatch _ m@(Keyword {})) = parens $ pretty m prettyFrom c (EDispatch _ m) = prettyFrom c m prettyFrom _ (EOperator _ ns a i) = text "operator" <+> assoc a <+> integer i <+> sep (map text ns) where assoc ALeft = text "left" assoc ARight = text "right" prettyFrom c (EPrimitive _ v) = prettyFrom c v prettyFrom _ (EBlock _ ps es) | null ps = braces exprs | otherwise = braces $ sep (map pretty ps) <+> char '|' <+> exprs where exprs = sep . punctuate (text ";") $ map pretty es prettyFrom CDefine (EVM {}) = text "..." prettyFrom _ (EVM {}) = text "<vm>" prettyFrom _ (EList _ es) = brackets . sep . punctuate comma $ map (prettyFrom CList) es prettyFrom _ (ETuple _ es) = parens . sep . punctuate comma $ map (prettyFrom CList) es prettyFrom _ (EMacro _ p e) = text "macro" <+> parens (pretty p) <++> pretty e prettyFrom _ (EForMacro { eExpr = e }) = text "for-macro" <+> pretty e prettyFrom c (EParticle _ p) = char '@' <> prettyFrom c p prettyFrom _ (ETop {}) = text "this" prettyFrom c (EQuote _ e) = char '`' <> prettySpacedExpr c e prettyFrom c (EUnquote _ e) = char '~' <> prettySpacedExpr c e prettyFrom _ (ENewDynamic {}) = internal "new-dynamic" empty prettyFrom _ (EDefineDynamic { eName = n, eExpr = e }) = internal "define-dynamic" $ text n <+> pretty e prettyFrom _ (ESetDynamic { eName = n, eExpr = e }) = internal "set-dynamic" $ text n <+> pretty e prettyFrom _ (EGetDynamic { eName = n }) = internal "get-dynamic" $ text n prettyFrom _ (EMacroQuote _ n r f) = text n <> char '{' <> text (macroEscape r) <> char '}' <> text f prettyFrom _ (EMatch _ t bs) = prettyFrom CKeyword t <+> text "match:" <+> branches where branches = braces . sep . punctuate (text ";") $ flip map bs $ \(p, e) -> pretty p <+> text "->" <+> pretty e instance Pretty [Expr] where prettyFrom _ es = sep . punctuate (text ";") $ map pretty es instance Pretty x => Pretty (Option x) where prettyFrom _ (Option _ n x) = char '&' <> text n <> char ':' <+> pretty x instance Pretty (Message Pattern) where prettyFrom _ (Single { mName = n, mTarget = PThis, mOptionals = os }) = text n <+> sep (map pretty os) prettyFrom _ (Single { mName = n, mTarget = (PObject ETop {}), mOptionals = os }) = text n <+> sep (map pretty os) prettyFrom _ (Single { mName = n, mTarget = p, mOptionals = os }) = pretty p <+> text n <+> sep (map pretty os) prettyFrom _ (Keyword { mNames = ns, mTargets = (PThis:vs), mOptionals = os }) = headlessKeywords ns vs <+> sep (map pretty os) prettyFrom _ (Keyword { mNames = ns, mTargets = (PObject ETop {}:vs), mOptionals = os }) = headlessKeywords ns vs <+> sep (map pretty os) prettyFrom _ (Keyword { mNames = ns, mTargets = vs, mOptionals = os }) = keywords ns vs <+> sep (map pretty os) instance Pretty (Message Value) where prettyFrom _ (Single { mName = n, mTarget = t, mOptionals = os }) = prettyFrom CSingle t <+> text n <+> sep (map pretty os) prettyFrom _ (Keyword { mNames = ns, mTargets = vs, mOptionals = os }) = keywords ns vs <+> sep (map pretty os) instance Pretty (Message Expr) where prettyFrom _ (Single { mName = n, mTarget = ETop {}, mOptionals = os }) = text n <+> sep (map pretty os) prettyFrom _ (Single { mName = n, mTarget = t, mOptionals = os }) = prettyFrom CSingle t <+> text n <+> sep (map pretty os) prettyFrom _ (Keyword { mNames = ns, mTargets = (ETop {}:es), mOptionals = os }) = headlessKeywords ns es <+> sep (map pretty os) prettyFrom _ (Keyword { mNames = ns, mTargets = es, mOptionals = os }) = keywords ns es <+> sep (map pretty os) instance Pretty x => Pretty (Particle x) where prettyFrom _ (Single { mName = n, mTarget = Nothing, mOptionals = [] }) = text n prettyFrom _ (Single { mName = n, mTarget = Nothing, mOptionals = os }) = parens (text n <+> sep (map pretty os)) prettyFrom _ (Single { mName = n, mTarget = Just t, mOptionals = os }) = parens (pretty t <+> text n <+> sep (map pretty os)) prettyFrom _ (Keyword { mNames = ns, mTargets = vs, mOptionals = os }) | all isNothing vs && null os = text . concat $ map keyword ns | isNothing (head vs) = parens $ headlessKeywords ns (tail vs) <+> sep (map pretty os) | otherwise = parens $ keywords ns vs <+> sep (map pretty os) instance Pretty x => Pretty (Maybe x) where prettyFrom _ Nothing = text "_" prettyFrom c (Just v) = prettyFrom c v instance Pretty Delegates where prettyFrom _ [] = internal "bottom" empty prettyFrom _ [_] = text "1 object" prettyFrom _ ds = text $ show (length ds) ++ " objects" instance Pretty Token where prettyFrom _ (TokKeyword k) = text k <> char ':' prettyFrom _ (TokOptional o) = char '&' <> text o <> char ':' prettyFrom _ (TokOptionalFlag o) = char '&' <> text o prettyFrom _ (TokOperator o) = text o prettyFrom _ (TokMacroQuote n r f) = text n <> char '{' <> text (macroEscape r) <> char '}' <> text f prettyFrom _ (TokIdentifier i) = text i prettyFrom _ (TokParticle ks) = char '@' <> hcat (map (text . keyword) ks) prettyFrom _ (TokPrimitive p) = pretty p prettyFrom _ (TokPunctuation c) = char c prettyFrom _ (TokOpen c) = char c prettyFrom _ (TokClose c) = char c prettyFrom _ (TokReserved r) = text r prettyFrom _ TokEnd = char ';' instance Pretty TaggedToken where prettyFrom c tt = prettyFrom c (tToken tt) type Tokens = [TaggedToken] instance Pretty Tokens where prettyFrom _ ts = hsep (map pretty ts) instance Pretty AtomoError where prettyFrom _ (Error v) = text "error:" <+> pretty v prettyFrom _ (ParseError e) = text "parse error:" <+> text (show e) prettyFrom _ (DidNotUnderstand m) = text "message not understood:" <+> pretty m prettyFrom _ (Mismatch a b) = text "mismatch:" $$ nest 2 (pretty a $$ pretty b) prettyFrom _ (ImportError e) = text "haskell interpreter:" <+> text (show e) prettyFrom _ (FileNotFound fn) = text "file not found:" <+> text fn prettyFrom _ (ParticleArity e g) = text ("particle needed " ++ show e ++ " values to complete, given " ++ show g) prettyFrom _ (BlockArity e g) = text ("block expected " ++ show e ++ " arguments, given " ++ show g) prettyFrom _ NoExpressions = text "no expressions to evaluate" prettyFrom _ (ValueNotFound d v) = text "could not find a" <+> text d <+> text "in" <+> pretty v prettyFrom _ (DynamicNeeded t) = text "expected dynamic value of type" <+> text t internal :: String -> Doc -> Doc internal n d = char '<' <> text n <+> d <> char '>' braces :: Doc -> Doc braces d = char '{' <+> d <+> char '}' macroEscape :: String -> String macroEscape "" = "" macroEscape ('{':cs) = "\\{" ++ macroEscape cs macroEscape ('}':cs) = "\\}" ++ macroEscape cs macroEscape (c:cs) = c : macroEscape cs headlessKeywords' :: (a -> Doc) -> [String] -> [a] -> Doc headlessKeywords' p (k:ks) (v:vs) = text (keyword k) <+> p v <++> headlessKeywords'' p ks vs headlessKeywords' _ _ _ = empty headlessKeywords'' :: (a -> Doc) -> [String] -> [a] -> Doc headlessKeywords'' p (k:ks) (v:vs) = text (keyword k) <+> p v <+++> headlessKeywords'' p ks vs headlessKeywords'' _ _ _ = empty keywords' :: (a -> Doc) -> [String] -> [a] -> Doc keywords' p ks (v:vs) = p v <+> headlessKeywords' p ks vs keywords' _ _ _ = empty headlessKeywords :: Pretty a => [String] -> [a] -> Doc headlessKeywords = headlessKeywords' (prettyFrom CKeyword) keywords :: Pretty a => [String] -> [a] -> Doc keywords = keywords' (prettyFrom CKeyword) keyword :: String -> String keyword k | isOperator k = k | otherwise = k ++ ":" prettySpacedExpr :: Context -> Expr -> Doc prettySpacedExpr c e | needsParens e = parens (prettyFrom c e) | otherwise = prettyFrom c e where needsParens (EDefine {}) = True needsParens (ESet {}) = True needsParens (EDispatch { eMessage = Keyword {} }) = True needsParens (EDispatch { eMessage = Single { mTarget = ETop {} } }) = False needsParens (EDispatch { eMessage = Single {} }) = True needsParens _ = False infixr 4 <++>, <+++> -- similar to <+>, but the second half will be nested to prevent long lines (<++>) :: Doc -> Doc -> Doc (<++>) a b | length (show a ++ show b) > 80 = a $$ nest 2 b | otherwise = a <+> b -- similar to <++>, but without nesting (<+++>) :: Doc -> Doc -> Doc (<+++>) a b | length (show a ++ show b) > 80 = a $$ b | otherwise = a <+> b
vito/atomo
src/Atomo/Pretty.hs
bsd-3-clause
15,361
0
18
4,209
6,257
3,102
3,155
319
6
-- Copyright (c) 2016-present, Facebook, Inc. -- All rights reserved. -- -- This source code is licensed under the BSD-style license found in the -- LICENSE file in the root directory of this source tree. An additional grant -- of patent rights can be found in the PATENTS file in the same directory. module Duckling.Temperature.JA.Tests ( tests ) where import Prelude import Data.String import Test.Tasty import Duckling.Dimensions.Types import Duckling.Temperature.JA.Corpus import Duckling.Testing.Asserts tests :: TestTree tests = testGroup "JA Tests" [ makeCorpusTest [This Temperature] corpus ]
rfranek/duckling
tests/Duckling/Temperature/JA/Tests.hs
bsd-3-clause
612
0
9
96
80
51
29
11
1
{-# LANGUAGE DeriveDataTypeable #-} module Sgf.XMonad.Restartable.Firefox ( FirefoxProfile (..) , FirefoxArgs , firefoxProfile , firefoxNoRemote , firefoxNewInstance , Firefox , defaultFirefox ) where import Data.Typeable import Data.Function (on) import Sgf.Data.List import Sgf.Control.Lens import Sgf.XMonad.Restartable data FirefoxProfile = FfProfileManager | FfProfile String deriving (Show, Read, Typeable, Eq) data FirefoxArgs = FirefoxArgs { _firefoxProfile :: FirefoxProfile , _firefoxNoRemote :: Bool , _firefoxNewInstance :: Bool } deriving (Show, Read, Typeable) firefoxProfile :: LensA FirefoxArgs FirefoxProfile firefoxProfile f z@FirefoxArgs {_firefoxProfile = x} = fmap (\x' -> z{_firefoxProfile = x'}) (f x) firefoxNoRemote :: LensA FirefoxArgs Bool firefoxNoRemote f z@FirefoxArgs {_firefoxNoRemote = x} = fmap (\x' -> z{_firefoxNoRemote = x'}) (f x) firefoxNewInstance :: LensA FirefoxArgs Bool firefoxNewInstance f z@FirefoxArgs {_firefoxNewInstance = x} = fmap (\x' -> z{_firefoxNewInstance = x'}) (f x) instance Eq FirefoxArgs where (==) = (==) `on` viewA firefoxProfile instance Arguments FirefoxArgs where serialize x = do let xr = viewA firefoxNoRemote x xi = viewA firefoxNewInstance x xp = case (viewA firefoxProfile x) of FfProfile p -> unless' (null p) ["-P", p] FfProfileManager -> ["-ProfileManager"] fmap concat . sequence $ [ return xp , when' xr (return ["--no-remote"]) , when' xi (return ["--new-instance"]) ] defaultArgs = FirefoxArgs { _firefoxProfile = FfProfile "default" , _firefoxNoRemote = False , _firefoxNewInstance = True } type Firefox = Program FirefoxArgs defaultFirefox :: Firefox defaultFirefox = setA progBin "firefox" defaultProgram
sgf-dma/sgf-xmonad-modules
src/Sgf/XMonad/Restartable/Firefox.hs
bsd-3-clause
2,369
0
16
907
545
304
241
51
1
{-# LANGUAGE OverloadedStrings, DeriveDataTypeable #-} module Commi.Task where import Haste.Serialize import Haste.JSON import Data.Typeable import Data.Maybe import Control.Applicative import Genetic.Options data Input = Input { inputCityMatrix :: [[Int]] , inputCityN :: Int , inputIndividLength :: Int , inputGeneticOptions :: GeneticOptions } deriving (Typeable, Show) instance Serialize Input where toJSON i = Dict [ ("inputCityMatrix", toJSON $ inputCityMatrix i) , ("inputCityN", toJSON $ inputCityN i) , ("inputIndividLength", toJSON $ inputIndividLength i) , ("inputGeneticOptions", toJSON $ inputGeneticOptions i) ] parseJSON j = Input <$> j .: "inputCityMatrix" <*> j .: "inputCityN" <*> j .: "inputIndividLength" <*> j .: "inputGeneticOptions" initialInput :: Input initialInput = Input { inputCityMatrix = [[-1000, -5, -10, -30, -25, -40, -15, -10, -25, -5, -15, -10], [-5, -1000, -20, -40, -18, -20, -30, -5, -15, -10, -25, -15], [-10, -20, -1000, -15, -40, -15, -5, -15, -5, -40, -20, -40], [-30, -40, -15, -1000, -15, -35, -25, -50, -10, -25, -5, -30], [-25, -18, -40, -15, -1000, -25, -10, -20, -15, -50, -10, -25], [-40, -20, -15, -35, -25, -1000, -5, -30, -30, -70, -5, -35], [-15, -30, -5, -25, -10, -5, -1000, -10, -20, -15, -30, -5], [-10, -5, -15, -50, -20, -30, -10, -1000, -25, -30, -40, -5], [-25, -15, -5, -10, -15, -30, -20, -25, -1000, -15, -10, -18], [-5, -10, -40, -25, -50, -70, -15, -30, -15, -1000, -20, -20], [-15, -25, -20, -5, -10, -5, -30, -40, -10, -20, -1000, -5], [-10, -15, -40, -30, -25, -35, -5, -5, -18, -20, -5, -1000]] , inputGeneticOptions = initialOptions , inputCityN = 2 , inputIndividLength = 5 } data Output = Output { outputSolution :: [Int], outputCost :: Int, outputFitness :: Double } deriving (Typeable, Show) data PlotState = PlotState{ values :: [(Double, Double)] -- ^ Points: x - generation number, y - fitness value } deriving (Typeable, Show) initialPlotState :: PlotState initialPlotState = PlotState []
Teaspot-Studio/bmstu-commi-genetics-haste
Commi/Task.hs
bsd-3-clause
2,431
0
13
773
1,094
653
441
53
1
----------------------------------------------------------------------------- -- | -- Module : Language.C.Syntax.ParserMonad -- Copyright : (c) [1999..2004] Manuel M T Chakravarty -- (c) 2005-2007 Duncan Coutts -- License : BSD-style -- Maintainer : [email protected] -- Portability : portable -- -- Monad for the C lexer and parser -- -- This monad has to be usable with Alex and Happy. Some things in it are -- dictated by that, eg having to be able to remember the last token. -- -- The monad also provides a unique name supply (via the Name module) -- -- For parsing C we have to maintain a set of identifiers that we know to be -- typedef'ed type identifiers. We also must deal correctly with scope so we -- keep a list of sets of identifiers so we can save the outer scope when we -- enter an inner scope. module Language.C.Parser.ParserMonad ( P, execParser, failP, getNewName, -- :: P Name addTypedef, -- :: Ident -> P () shadowTypedef, -- :: Ident -> P () isTypeIdent, -- :: Ident -> P Bool enterScope, -- :: P () leaveScope, -- :: P () setPos, -- :: Position -> P () getPos, -- :: P Position getInput, -- :: P String setInput, -- :: String -> P () getLastToken, -- :: P CToken getSavedToken, -- :: P CToken setLastToken, -- :: CToken -> P () handleEofToken, -- :: P () getCurrentPosition,-- :: P Position ParseError(..), ) where import Language.C.Data.Error (internalErr, showErrorInfo,ErrorInfo(..),ErrorLevel(..)) import Language.C.Data.Position (Position(..)) import Language.C.Data.InputStream import Language.C.Data.Name (Name) import Language.C.Data.Ident (Ident) import Language.C.Parser.Tokens (CToken(CTokEof)) import Control.Applicative (Applicative(..)) import Control.Monad (liftM, ap) import Data.Set (Set) import qualified Data.Set as Set (fromList, insert, member, delete) newtype ParseError = ParseError ([String],Position) instance Show ParseError where show (ParseError (msgs,pos)) = showErrorInfo "Syntax Error !" (ErrorInfo LevelError pos msgs) data ParseResult a = POk !PState a | PFailed [String] Position -- The error message and position data PState = PState { curPos :: !Position, -- position at current input location curInput :: !InputStream, -- the current input prevToken :: CToken, -- the previous token savedToken :: CToken, -- and the token before that namesupply :: ![Name], -- the name unique supply tyidents :: !(Set Ident), -- the set of typedef'ed identifiers scopes :: ![Set Ident] -- the tyident sets for outer scopes } newtype P a = P { unP :: PState -> ParseResult a } instance Functor P where fmap = liftM instance Applicative P where pure = return (<*>) = ap instance Monad P where return = returnP (>>=) = thenP fail m = getPos >>= \pos -> failP pos [m] -- | execute the given parser on the supplied input stream. -- returns 'ParseError' if the parser failed, and a pair of -- result and remaining name supply otherwise -- -- Synopsis: @execParser parser inputStream initialPos predefinedTypedefs uniqNameSupply@ execParser :: P a -> InputStream -> Position -> [Ident] -> [Name] -> Either ParseError (a,[Name]) execParser (P parser) input pos builtins names = case parser initialState of PFailed message errpos -> Left (ParseError (message,errpos)) POk st result -> Right (result, namesupply st) where initialState = PState { curPos = pos, curInput = input, prevToken = internalErr "CLexer.execParser: Touched undefined token!", savedToken = internalErr "CLexer.execParser: Touched undefined token (safed token)!", namesupply = names, tyidents = Set.fromList builtins, scopes = [] } {-# INLINE returnP #-} returnP :: a -> P a returnP a = P $ \s -> POk s a {-# INLINE thenP #-} thenP :: P a -> (a -> P b) -> P b (P m) `thenP` k = P $ \s -> case m s of POk s' a -> (unP (k a)) s' PFailed err pos -> PFailed err pos failP :: Position -> [String] -> P a failP pos msg = P $ \_ -> PFailed msg pos getNewName :: P Name getNewName = P $ \s@PState{namesupply=(n:ns)} -> n `seq` POk s{namesupply=ns} n setPos :: Position -> P () setPos pos = P $ \s -> POk s{curPos=pos} () getPos :: P Position getPos = P $ \s@PState{curPos=pos} -> POk s pos addTypedef :: Ident -> P () addTypedef ident = (P $ \s@PState{tyidents=tyids} -> POk s{tyidents = ident `Set.insert` tyids} ()) shadowTypedef :: Ident -> P () shadowTypedef ident = (P $ \s@PState{tyidents=tyids} -> -- optimisation: mostly the ident will not be in -- the tyident set so do a member lookup to avoid -- churn induced by calling delete POk s{tyidents = if ident `Set.member` tyids then ident `Set.delete` tyids else tyids } ()) isTypeIdent :: Ident -> P Bool isTypeIdent ident = P $ \s@PState{tyidents=tyids} -> POk s $! Set.member ident tyids enterScope :: P () enterScope = P $ \s@PState{tyidents=tyids,scopes=ss} -> POk s{scopes=tyids:ss} () leaveScope :: P () leaveScope = P $ \s@PState{scopes=ss} -> case ss of [] -> error "leaveScope: already in global scope" (tyids:ss') -> POk s{tyidents=tyids, scopes=ss'} () getInput :: P InputStream getInput = P $ \s@PState{curInput=i} -> POk s i setInput :: InputStream -> P () setInput i = P $ \s -> POk s{curInput=i} () getLastToken :: P CToken getLastToken = P $ \s@PState{prevToken=tok} -> POk s tok getSavedToken :: P CToken getSavedToken = P $ \s@PState{savedToken=tok} -> POk s tok -- | @setLastToken modifyCache tok@ setLastToken :: CToken -> P () setLastToken CTokEof = P $ \s -> POk s{savedToken=(prevToken s)} () setLastToken tok = P $ \s -> POk s{prevToken=tok,savedToken=(prevToken s)} () -- | handle an End-Of-File token (changes savedToken) handleEofToken :: P () handleEofToken = P $ \s -> POk s{savedToken=(prevToken s)} () getCurrentPosition :: P Position getCurrentPosition = P $ \s@PState{curPos=pos} -> POk s pos
ian-ross/language-c
src/Language/C/Parser/ParserMonad.hs
bsd-3-clause
6,570
0
14
1,812
1,747
997
750
131
2
{-# LANGUAGE RebindableSyntax #-} {-# LANGUAGE ViewPatterns #-} module Init where import Options import Type import Data.Array.Accelerate as A hiding ( fromInteger, V3 ) import Data.Array.Accelerate.Linear as A import Prelude ( fromInteger ) import qualified Prelude as P -- | Deposit some energy at the origin. The simulation is symmetric so we only -- simulate one quadrant, being sure to maintain the boundary conditions. -- initEnergy :: Int -> Acc (Field Energy) initEnergy numElem = let sh = constant (Z :. numElem :. numElem :. numElem) f :: Exp Ix -> Exp Energy f (unlift -> Z :. z :. y :. x) = if z == 0 && y == 0 && x == 0 then 3.948746e+7 else 0 in A.generate sh f -- | Initialise the nodal coordinates to a regular hexahedron mesh. The -- coordinates of the mesh nodes will change as the simulation progresses. -- -- We don't need the nodal point lattice to record the indices of our neighbours -- because we have native multidimensional arrays. -- initMesh :: Int -> Acc (Field Position) initMesh numElem = let numNode = numElem + 1 sh = constant (Z :. numNode :. numNode :. numNode) n = P.fromIntegral numElem f :: Exp Ix -> Exp Position f (unlift -> Z :. k :. j :. i) = let x = _WIDTH * A.fromIntegral i / n y = _HEIGHT * A.fromIntegral j / n z = _DEPTH * A.fromIntegral k / n in lift (V3 x y z) in A.generate sh f -- | Initialise the volume of each element. -- -- Since we begin with a regular hexahedral mesh we just compute the volume -- directly and initialise all elements to that value. -- initElemVolume :: Int -> Acc (Field Volume) initElemVolume numElem = let sh = constant (Z :. numElem :. numElem :. numElem) w = _WIDTH / P.fromIntegral numElem h = _HEIGHT / P.fromIntegral numElem d = _DEPTH / P.fromIntegral numElem v = w * h * d in A.fill sh v -- | Initialise the mass at each node. This is the average of the contribution -- of each of the surrounding elements. -- -- Again, since we begin with a regular mesh, we just compute this value -- directly, but we could equivalently read from the array of element volumes. -- initNodeMass :: Int -> Acc (Field Mass) initNodeMass numElem = let numNode = numElem + 1 sh = constant (Z :. numNode :. numNode :. numNode) w = _WIDTH / P.fromIntegral numElem h = _HEIGHT / P.fromIntegral numElem d = _DEPTH / P.fromIntegral numElem v = w * h * d at z y x = if 0 <= z && z < constant numElem && 0 <= y && y < constant numElem && 0 <= x && x < constant numElem then v else 0 -- This corresponds to the node -> surrounding elements index mapping neighbours :: Exp Ix -> Exp Mass neighbours (unlift -> Z :. z :. y :. x) = ( at z y x + at z y (x-1) + at z (y-1) (x-1) + at z (y-1) x + at (z-1) y x + at (z-1) y (x-1) + at (z-1) (y-1) (x-1) + at (z-1) (y-1) x ) / 8 in generate sh neighbours
tmcdonell/accelerate-lulesh
src/Init.hs
bsd-3-clause
3,314
0
20
1,113
932
488
444
68
2
{- (c) The University of Glasgow 2006 (c) The GRASP/AQUA Project, Glasgow University, 1993-1998 This module defines interface types and binders -} {-# LANGUAGE CPP, FlexibleInstances, BangPatterns #-} {-# LANGUAGE MultiWayIf #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE LambdaCase #-} -- FlexibleInstances for Binary (DefMethSpec IfaceType) module GHC.Iface.Type ( IfExtName, IfLclName, IfaceType(..), IfacePredType, IfaceKind, IfaceCoercion(..), IfaceMCoercion(..), IfaceUnivCoProv(..), IfaceTyCon(..), IfaceTyConInfo(..), IfaceTyConSort(..), IfaceTyLit(..), IfaceAppArgs(..), IfaceContext, IfaceBndr(..), IfaceOneShot(..), IfaceLamBndr, IfaceTvBndr, IfaceIdBndr, IfaceTyConBinder, IfaceForAllBndr, ArgFlag(..), AnonArgFlag(..), ForallVisFlag(..), ShowForAllFlag(..), mkIfaceForAllTvBndr, mkIfaceTyConKind, ifForAllBndrVar, ifForAllBndrName, ifaceBndrName, ifTyConBinderVar, ifTyConBinderName, -- Equality testing isIfaceLiftedTypeKind, -- Conversion from IfaceAppArgs to IfaceTypes/ArgFlags appArgsIfaceTypes, appArgsIfaceTypesArgFlags, -- Printing SuppressBndrSig(..), UseBndrParens(..), pprIfaceType, pprParendIfaceType, pprPrecIfaceType, pprIfaceContext, pprIfaceContextArr, pprIfaceIdBndr, pprIfaceLamBndr, pprIfaceTvBndr, pprIfaceTyConBinders, pprIfaceBndrs, pprIfaceAppArgs, pprParendIfaceAppArgs, pprIfaceForAllPart, pprIfaceForAllPartMust, pprIfaceForAll, pprIfaceSigmaType, pprIfaceTyLit, pprIfaceCoercion, pprParendIfaceCoercion, splitIfaceSigmaTy, pprIfaceTypeApp, pprUserIfaceForAll, pprIfaceCoTcApp, pprTyTcApp, pprIfacePrefixApp, isIfaceTauType, suppressIfaceInvisibles, stripIfaceInvisVars, stripInvisArgs, mkIfaceTySubst, substIfaceTyVar, substIfaceAppArgs, inDomIfaceTySubst ) where #include "HsVersions.h" import GhcPrelude import {-# SOURCE #-} TysWiredIn ( coercibleTyCon, heqTyCon , liftedRepDataConTyCon, tupleTyConName ) import {-# SOURCE #-} Type ( isRuntimeRepTy ) import DynFlags import TyCon hiding ( pprPromotionQuote ) import CoAxiom import Var import PrelNames import Name import BasicTypes import Binary import Outputable import FastString import FastStringEnv import Util import Data.Maybe( isJust ) import qualified Data.Semigroup as Semi import Control.DeepSeq {- ************************************************************************ * * Local (nested) binders * * ************************************************************************ -} type IfLclName = FastString -- A local name in iface syntax type IfExtName = Name -- An External or WiredIn Name can appear in Iface syntax -- (However Internal or System Names never should) data IfaceBndr -- Local (non-top-level) binders = IfaceIdBndr {-# UNPACK #-} !IfaceIdBndr | IfaceTvBndr {-# UNPACK #-} !IfaceTvBndr type IfaceIdBndr = (IfLclName, IfaceType) type IfaceTvBndr = (IfLclName, IfaceKind) ifaceTvBndrName :: IfaceTvBndr -> IfLclName ifaceTvBndrName (n,_) = n ifaceIdBndrName :: IfaceIdBndr -> IfLclName ifaceIdBndrName (n,_) = n ifaceBndrName :: IfaceBndr -> IfLclName ifaceBndrName (IfaceTvBndr bndr) = ifaceTvBndrName bndr ifaceBndrName (IfaceIdBndr bndr) = ifaceIdBndrName bndr ifaceBndrType :: IfaceBndr -> IfaceType ifaceBndrType (IfaceIdBndr (_, t)) = t ifaceBndrType (IfaceTvBndr (_, t)) = t type IfaceLamBndr = (IfaceBndr, IfaceOneShot) data IfaceOneShot -- See Note [Preserve OneShotInfo] in CoreTicy = IfaceNoOneShot -- and Note [The oneShot function] in MkId | IfaceOneShot {- %************************************************************************ %* * IfaceType %* * %************************************************************************ -} ------------------------------- type IfaceKind = IfaceType -- | A kind of universal type, used for types and kinds. -- -- Any time a 'Type' is pretty-printed, it is first converted to an 'IfaceType' -- before being printed. See Note [Pretty printing via Iface syntax] in PprTyThing data IfaceType = IfaceFreeTyVar TyVar -- See Note [Free tyvars in IfaceType] | IfaceTyVar IfLclName -- Type/coercion variable only, not tycon | IfaceLitTy IfaceTyLit | IfaceAppTy IfaceType IfaceAppArgs -- See Note [Suppressing invisible arguments] for -- an explanation of why the second field isn't -- IfaceType, analogous to AppTy. | IfaceFunTy AnonArgFlag IfaceType IfaceType | IfaceForAllTy IfaceForAllBndr IfaceType | IfaceTyConApp IfaceTyCon IfaceAppArgs -- Not necessarily saturated -- Includes newtypes, synonyms, tuples | IfaceCastTy IfaceType IfaceCoercion | IfaceCoercionTy IfaceCoercion | IfaceTupleTy -- Saturated tuples (unsaturated ones use IfaceTyConApp) TupleSort -- What sort of tuple? PromotionFlag -- A bit like IfaceTyCon IfaceAppArgs -- arity = length args -- For promoted data cons, the kind args are omitted type IfacePredType = IfaceType type IfaceContext = [IfacePredType] data IfaceTyLit = IfaceNumTyLit Integer | IfaceStrTyLit FastString deriving (Eq) type IfaceTyConBinder = VarBndr IfaceBndr TyConBndrVis type IfaceForAllBndr = VarBndr IfaceBndr ArgFlag -- | Make an 'IfaceForAllBndr' from an 'IfaceTvBndr'. mkIfaceForAllTvBndr :: ArgFlag -> IfaceTvBndr -> IfaceForAllBndr mkIfaceForAllTvBndr vis var = Bndr (IfaceTvBndr var) vis -- | Build the 'tyConKind' from the binders and the result kind. -- Keep in sync with 'mkTyConKind' in types/TyCon. mkIfaceTyConKind :: [IfaceTyConBinder] -> IfaceKind -> IfaceKind mkIfaceTyConKind bndrs res_kind = foldr mk res_kind bndrs where mk :: IfaceTyConBinder -> IfaceKind -> IfaceKind mk (Bndr tv (AnonTCB af)) k = IfaceFunTy af (ifaceBndrType tv) k mk (Bndr tv (NamedTCB vis)) k = IfaceForAllTy (Bndr tv vis) k -- | Stores the arguments in a type application as a list. -- See @Note [Suppressing invisible arguments]@. data IfaceAppArgs = IA_Nil | IA_Arg IfaceType -- The type argument ArgFlag -- The argument's visibility. We store this here so -- that we can: -- -- 1. Avoid pretty-printing invisible (i.e., specified -- or inferred) arguments when -- -fprint-explicit-kinds isn't enabled, or -- 2. When -fprint-explicit-kinds *is*, enabled, print -- specified arguments in @(...) and inferred -- arguments in @{...}. IfaceAppArgs -- The rest of the arguments instance Semi.Semigroup IfaceAppArgs where IA_Nil <> xs = xs IA_Arg ty argf rest <> xs = IA_Arg ty argf (rest Semi.<> xs) instance Monoid IfaceAppArgs where mempty = IA_Nil mappend = (Semi.<>) -- Encodes type constructors, kind constructors, -- coercion constructors, the lot. -- We have to tag them in order to pretty print them -- properly. data IfaceTyCon = IfaceTyCon { ifaceTyConName :: IfExtName , ifaceTyConInfo :: IfaceTyConInfo } deriving (Eq) -- | The various types of TyCons which have special, built-in syntax. data IfaceTyConSort = IfaceNormalTyCon -- ^ a regular tycon | IfaceTupleTyCon !Arity !TupleSort -- ^ e.g. @(a, b, c)@ or @(#a, b, c#)@. -- The arity is the tuple width, not the tycon arity -- (which is twice the width in the case of unboxed -- tuples). | IfaceSumTyCon !Arity -- ^ e.g. @(a | b | c)@ | IfaceEqualityTyCon -- ^ A heterogeneous equality TyCon -- (i.e. eqPrimTyCon, eqReprPrimTyCon, heqTyCon) -- that is actually being applied to two types -- of the same kind. This affects pretty-printing -- only: see Note [Equality predicates in IfaceType] deriving (Eq) {- Note [Free tyvars in IfaceType] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Nowadays (since Nov 16, 2016) we pretty-print a Type by converting to an IfaceType and pretty printing that. This eliminates a lot of pretty-print duplication, and it matches what we do with pretty- printing TyThings. See Note [Pretty printing via Iface syntax] in PprTyThing. It works fine for closed types, but when printing debug traces (e.g. when using -ddump-tc-trace) we print a lot of /open/ types. These types are full of TcTyVars, and it's absolutely crucial to print them in their full glory, with their unique, TcTyVarDetails etc. So we simply embed a TyVar in IfaceType with the IfaceFreeTyVar constructor. Note that: * We never expect to serialise an IfaceFreeTyVar into an interface file, nor to deserialise one. IfaceFreeTyVar is used only in the "convert to IfaceType and then pretty-print" pipeline. We do the same for covars, naturally. Note [Equality predicates in IfaceType] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ GHC has several varieties of type equality (see Note [The equality types story] in TysPrim for details). In an effort to avoid confusing users, we suppress the differences during pretty printing unless certain flags are enabled. Here is how each equality predicate* is printed in homogeneous and heterogeneous contexts, depending on which combination of the -fprint-explicit-kinds and -fprint-equality-relations flags is used: -------------------------------------------------------------------------------------------- | Predicate | Neither flag | -fprint-explicit-kinds | |-------------------------------|----------------------------|-----------------------------| | a ~ b (homogeneous) | a ~ b | (a :: Type) ~ (b :: Type) | | a ~~ b, homogeneously | a ~ b | (a :: Type) ~ (b :: Type) | | a ~~ b, heterogeneously | a ~~ c | (a :: Type) ~~ (c :: k) | | a ~# b, homogeneously | a ~ b | (a :: Type) ~ (b :: Type) | | a ~# b, heterogeneously | a ~~ c | (a :: Type) ~~ (c :: k) | | Coercible a b (homogeneous) | Coercible a b | Coercible @Type a b | | a ~R# b, homogeneously | Coercible a b | Coercible @Type a b | | a ~R# b, heterogeneously | a ~R# b | (a :: Type) ~R# (c :: k) | |-------------------------------|----------------------------|-----------------------------| | Predicate | -fprint-equality-relations | Both flags | |-------------------------------|----------------------------|-----------------------------| | a ~ b (homogeneous) | a ~ b | (a :: Type) ~ (b :: Type) | | a ~~ b, homogeneously | a ~~ b | (a :: Type) ~~ (b :: Type) | | a ~~ b, heterogeneously | a ~~ c | (a :: Type) ~~ (c :: k) | | a ~# b, homogeneously | a ~# b | (a :: Type) ~# (b :: Type) | | a ~# b, heterogeneously | a ~# c | (a :: Type) ~# (c :: k) | | Coercible a b (homogeneous) | Coercible a b | Coercible @Type a b | | a ~R# b, homogeneously | a ~R# b | (a :: Type) ~R# (b :: Type) | | a ~R# b, heterogeneously | a ~R# b | (a :: Type) ~R# (c :: k) | -------------------------------------------------------------------------------------------- (* There is no heterogeneous, representational, lifted equality counterpart to (~~). There could be, but there seems to be no use for it.) This table adheres to the following rules: A. With -fprint-equality-relations, print the true equality relation. B. Without -fprint-equality-relations: i. If the equality is representational and homogeneous, use Coercible. ii. Otherwise, if the equality is representational, use ~R#. iii. If the equality is nominal and homogeneous, use ~. iv. Otherwise, if the equality is nominal, use ~~. C. With -fprint-explicit-kinds, print kinds on both sides of an infix operator, as above; or print the kind with Coercible. D. Without -fprint-explicit-kinds, don't print kinds. A hetero-kinded equality is used homogeneously when it is applied to two identical kinds. Unfortunately, determining this from an IfaceType isn't possible since we can't see through type synonyms. Consequently, we need to record whether this particular application is homogeneous in IfaceTyConSort for the purposes of pretty-printing. See Note [The equality types story] in TysPrim. -} data IfaceTyConInfo -- Used to guide pretty-printing -- and to disambiguate D from 'D (they share a name) = IfaceTyConInfo { ifaceTyConIsPromoted :: PromotionFlag , ifaceTyConSort :: IfaceTyConSort } deriving (Eq) data IfaceMCoercion = IfaceMRefl | IfaceMCo IfaceCoercion data IfaceCoercion = IfaceReflCo IfaceType | IfaceGReflCo Role IfaceType (IfaceMCoercion) | IfaceFunCo Role IfaceCoercion IfaceCoercion | IfaceTyConAppCo Role IfaceTyCon [IfaceCoercion] | IfaceAppCo IfaceCoercion IfaceCoercion | IfaceForAllCo IfaceBndr IfaceCoercion IfaceCoercion | IfaceCoVarCo IfLclName | IfaceAxiomInstCo IfExtName BranchIndex [IfaceCoercion] | IfaceAxiomRuleCo IfLclName [IfaceCoercion] -- There are only a fixed number of CoAxiomRules, so it suffices -- to use an IfaceLclName to distinguish them. -- See Note [Adding built-in type families] in TcTypeNats | IfaceUnivCo IfaceUnivCoProv Role IfaceType IfaceType | IfaceSymCo IfaceCoercion | IfaceTransCo IfaceCoercion IfaceCoercion | IfaceNthCo Int IfaceCoercion | IfaceLRCo LeftOrRight IfaceCoercion | IfaceInstCo IfaceCoercion IfaceCoercion | IfaceKindCo IfaceCoercion | IfaceSubCo IfaceCoercion | IfaceFreeCoVar CoVar -- See Note [Free tyvars in IfaceType] | IfaceHoleCo CoVar -- ^ See Note [Holes in IfaceCoercion] data IfaceUnivCoProv = IfaceUnsafeCoerceProv | IfacePhantomProv IfaceCoercion | IfaceProofIrrelProv IfaceCoercion | IfacePluginProv String {- Note [Holes in IfaceCoercion] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When typechecking fails the typechecker will produce a HoleCo to stand in place of the unproven assertion. While we generally don't want to let these unproven assertions leak into interface files, we still need to be able to pretty-print them as we use IfaceType's pretty-printer to render Types. For this reason IfaceCoercion has a IfaceHoleCo constructor; however, we fails when asked to serialize to a IfaceHoleCo to ensure that they don't end up in an interface file. %************************************************************************ %* * Functions over IFaceTypes * * ************************************************************************ -} ifaceTyConHasKey :: IfaceTyCon -> Unique -> Bool ifaceTyConHasKey tc key = ifaceTyConName tc `hasKey` key isIfaceLiftedTypeKind :: IfaceKind -> Bool isIfaceLiftedTypeKind (IfaceTyConApp tc IA_Nil) = isLiftedTypeKindTyConName (ifaceTyConName tc) isIfaceLiftedTypeKind (IfaceTyConApp tc (IA_Arg (IfaceTyConApp ptr_rep_lifted IA_Nil) Required IA_Nil)) = tc `ifaceTyConHasKey` tYPETyConKey && ptr_rep_lifted `ifaceTyConHasKey` liftedRepDataConKey isIfaceLiftedTypeKind _ = False splitIfaceSigmaTy :: IfaceType -> ([IfaceForAllBndr], [IfacePredType], IfaceType) -- Mainly for printing purposes -- -- Here we split nested IfaceSigmaTy properly. -- -- @ -- forall t. T t => forall m a b. M m => (a -> m b) -> t a -> m (t b) -- @ -- -- If you called @splitIfaceSigmaTy@ on this type: -- -- @ -- ([t, m, a, b], [T t, M m], (a -> m b) -> t a -> m (t b)) -- @ splitIfaceSigmaTy ty = case (bndrs, theta) of ([], []) -> (bndrs, theta, tau) _ -> let (bndrs', theta', tau') = splitIfaceSigmaTy tau in (bndrs ++ bndrs', theta ++ theta', tau') where (bndrs, rho) = split_foralls ty (theta, tau) = split_rho rho split_foralls (IfaceForAllTy bndr ty) = case split_foralls ty of { (bndrs, rho) -> (bndr:bndrs, rho) } split_foralls rho = ([], rho) split_rho (IfaceFunTy InvisArg ty1 ty2) = case split_rho ty2 of { (ps, tau) -> (ty1:ps, tau) } split_rho tau = ([], tau) suppressIfaceInvisibles :: DynFlags -> [IfaceTyConBinder] -> [a] -> [a] suppressIfaceInvisibles dflags tys xs | gopt Opt_PrintExplicitKinds dflags = xs | otherwise = suppress tys xs where suppress _ [] = [] suppress [] a = a suppress (k:ks) (x:xs) | isInvisibleTyConBinder k = suppress ks xs | otherwise = x : suppress ks xs stripIfaceInvisVars :: DynFlags -> [IfaceTyConBinder] -> [IfaceTyConBinder] stripIfaceInvisVars dflags tyvars | gopt Opt_PrintExplicitKinds dflags = tyvars | otherwise = filterOut isInvisibleTyConBinder tyvars -- | Extract an 'IfaceBndr' from an 'IfaceForAllBndr'. ifForAllBndrVar :: IfaceForAllBndr -> IfaceBndr ifForAllBndrVar = binderVar -- | Extract the variable name from an 'IfaceForAllBndr'. ifForAllBndrName :: IfaceForAllBndr -> IfLclName ifForAllBndrName fab = ifaceBndrName (ifForAllBndrVar fab) -- | Extract an 'IfaceBndr' from an 'IfaceTyConBinder'. ifTyConBinderVar :: IfaceTyConBinder -> IfaceBndr ifTyConBinderVar = binderVar -- | Extract the variable name from an 'IfaceTyConBinder'. ifTyConBinderName :: IfaceTyConBinder -> IfLclName ifTyConBinderName tcb = ifaceBndrName (ifTyConBinderVar tcb) ifTypeIsVarFree :: IfaceType -> Bool -- Returns True if the type definitely has no variables at all -- Just used to control pretty printing ifTypeIsVarFree ty = go ty where go (IfaceTyVar {}) = False go (IfaceFreeTyVar {}) = False go (IfaceAppTy fun args) = go fun && go_args args go (IfaceFunTy _ arg res) = go arg && go res go (IfaceForAllTy {}) = False go (IfaceTyConApp _ args) = go_args args go (IfaceTupleTy _ _ args) = go_args args go (IfaceLitTy _) = True go (IfaceCastTy {}) = False -- Safe go (IfaceCoercionTy {}) = False -- Safe go_args IA_Nil = True go_args (IA_Arg arg _ args) = go arg && go_args args {- Note [Substitution on IfaceType] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Substitutions on IfaceType are done only during pretty-printing to construct the result type of a GADT, and does not deal with binders (eg IfaceForAll), so it doesn't need fancy capture stuff. -} type IfaceTySubst = FastStringEnv IfaceType -- Note [Substitution on IfaceType] mkIfaceTySubst :: [(IfLclName,IfaceType)] -> IfaceTySubst -- See Note [Substitution on IfaceType] mkIfaceTySubst eq_spec = mkFsEnv eq_spec inDomIfaceTySubst :: IfaceTySubst -> IfaceTvBndr -> Bool -- See Note [Substitution on IfaceType] inDomIfaceTySubst subst (fs, _) = isJust (lookupFsEnv subst fs) substIfaceType :: IfaceTySubst -> IfaceType -> IfaceType -- See Note [Substitution on IfaceType] substIfaceType env ty = go ty where go (IfaceFreeTyVar tv) = IfaceFreeTyVar tv go (IfaceTyVar tv) = substIfaceTyVar env tv go (IfaceAppTy t ts) = IfaceAppTy (go t) (substIfaceAppArgs env ts) go (IfaceFunTy af t1 t2) = IfaceFunTy af (go t1) (go t2) go ty@(IfaceLitTy {}) = ty go (IfaceTyConApp tc tys) = IfaceTyConApp tc (substIfaceAppArgs env tys) go (IfaceTupleTy s i tys) = IfaceTupleTy s i (substIfaceAppArgs env tys) go (IfaceForAllTy {}) = pprPanic "substIfaceType" (ppr ty) go (IfaceCastTy ty co) = IfaceCastTy (go ty) (go_co co) go (IfaceCoercionTy co) = IfaceCoercionTy (go_co co) go_mco IfaceMRefl = IfaceMRefl go_mco (IfaceMCo co) = IfaceMCo $ go_co co go_co (IfaceReflCo ty) = IfaceReflCo (go ty) go_co (IfaceGReflCo r ty mco) = IfaceGReflCo r (go ty) (go_mco mco) go_co (IfaceFunCo r c1 c2) = IfaceFunCo r (go_co c1) (go_co c2) go_co (IfaceTyConAppCo r tc cos) = IfaceTyConAppCo r tc (go_cos cos) go_co (IfaceAppCo c1 c2) = IfaceAppCo (go_co c1) (go_co c2) go_co (IfaceForAllCo {}) = pprPanic "substIfaceCoercion" (ppr ty) go_co (IfaceFreeCoVar cv) = IfaceFreeCoVar cv go_co (IfaceCoVarCo cv) = IfaceCoVarCo cv go_co (IfaceHoleCo cv) = IfaceHoleCo cv go_co (IfaceAxiomInstCo a i cos) = IfaceAxiomInstCo a i (go_cos cos) go_co (IfaceUnivCo prov r t1 t2) = IfaceUnivCo (go_prov prov) r (go t1) (go t2) go_co (IfaceSymCo co) = IfaceSymCo (go_co co) go_co (IfaceTransCo co1 co2) = IfaceTransCo (go_co co1) (go_co co2) go_co (IfaceNthCo n co) = IfaceNthCo n (go_co co) go_co (IfaceLRCo lr co) = IfaceLRCo lr (go_co co) go_co (IfaceInstCo c1 c2) = IfaceInstCo (go_co c1) (go_co c2) go_co (IfaceKindCo co) = IfaceKindCo (go_co co) go_co (IfaceSubCo co) = IfaceSubCo (go_co co) go_co (IfaceAxiomRuleCo n cos) = IfaceAxiomRuleCo n (go_cos cos) go_cos = map go_co go_prov IfaceUnsafeCoerceProv = IfaceUnsafeCoerceProv go_prov (IfacePhantomProv co) = IfacePhantomProv (go_co co) go_prov (IfaceProofIrrelProv co) = IfaceProofIrrelProv (go_co co) go_prov (IfacePluginProv str) = IfacePluginProv str substIfaceAppArgs :: IfaceTySubst -> IfaceAppArgs -> IfaceAppArgs substIfaceAppArgs env args = go args where go IA_Nil = IA_Nil go (IA_Arg ty arg tys) = IA_Arg (substIfaceType env ty) arg (go tys) substIfaceTyVar :: IfaceTySubst -> IfLclName -> IfaceType substIfaceTyVar env tv | Just ty <- lookupFsEnv env tv = ty | otherwise = IfaceTyVar tv {- ************************************************************************ * * Functions over IfaceAppArgs * * ************************************************************************ -} stripInvisArgs :: DynFlags -> IfaceAppArgs -> IfaceAppArgs stripInvisArgs dflags tys | gopt Opt_PrintExplicitKinds dflags = tys | otherwise = suppress_invis tys where suppress_invis c = case c of IA_Nil -> IA_Nil IA_Arg t argf ts | isVisibleArgFlag argf -> IA_Arg t argf $ suppress_invis ts -- Keep recursing through the remainder of the arguments, as it's -- possible that there are remaining invisible ones. -- See the "In type declarations" section of Note [VarBndrs, -- TyCoVarBinders, TyConBinders, and visibility] in TyCoRep. | otherwise -> suppress_invis ts appArgsIfaceTypes :: IfaceAppArgs -> [IfaceType] appArgsIfaceTypes IA_Nil = [] appArgsIfaceTypes (IA_Arg t _ ts) = t : appArgsIfaceTypes ts appArgsIfaceTypesArgFlags :: IfaceAppArgs -> [(IfaceType, ArgFlag)] appArgsIfaceTypesArgFlags IA_Nil = [] appArgsIfaceTypesArgFlags (IA_Arg t a ts) = (t, a) : appArgsIfaceTypesArgFlags ts ifaceVisAppArgsLength :: IfaceAppArgs -> Int ifaceVisAppArgsLength = go 0 where go !n IA_Nil = n go n (IA_Arg _ argf rest) | isVisibleArgFlag argf = go (n+1) rest | otherwise = go n rest {- Note [Suppressing invisible arguments] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We use the IfaceAppArgs data type to specify which of the arguments to a type should be displayed when pretty-printing, under the control of -fprint-explicit-kinds. See also Type.filterOutInvisibleTypes. For example, given T :: forall k. (k->*) -> k -> * -- Ordinary kind polymorphism 'Just :: forall k. k -> 'Maybe k -- Promoted we want T * Tree Int prints as T Tree Int 'Just * prints as Just * For type constructors (IfaceTyConApp), IfaceAppArgs is a quite natural fit, since the corresponding Core constructor: data Type = ... | TyConApp TyCon [Type] Already puts all of its arguments into a list. So when converting a Type to an IfaceType (see toIfaceAppArgsX in GHC.Core.ToIface), we simply use the kind of the TyCon (which is cached) to guide the process of converting the argument Types into an IfaceAppArgs list. We also want this behavior for IfaceAppTy, since given: data Proxy (a :: k) f :: forall (t :: forall a. a -> Type). Proxy Type (t Bool True) We want to print the return type as `Proxy (t True)` without the use of -fprint-explicit-kinds (#15330). Accomplishing this is trickier than in the tycon case, because the corresponding Core constructor for IfaceAppTy: data Type = ... | AppTy Type Type Only stores one argument at a time. Therefore, when converting an AppTy to an IfaceAppTy (in toIfaceTypeX in GHC.CoreToIface), we: 1. Flatten the chain of AppTys down as much as possible 2. Use typeKind to determine the function Type's kind 3. Use this kind to guide the process of converting the argument Types into an IfaceAppArgs list. By flattening the arguments like this, we obtain two benefits: (a) We can reuse the same machinery to pretty-print IfaceTyConApp arguments as we do IfaceTyApp arguments, which means that we only need to implement the logic to filter out invisible arguments once. (b) Unlike for tycons, finding the kind of a type in general (through typeKind) is not a constant-time operation, so by flattening the arguments first, we decrease the number of times we have to call typeKind. Note [Pretty-printing invisible arguments] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Note [Suppressing invisible arguments] is all about how to avoid printing invisible arguments when the -fprint-explicit-kinds flag is disables. Well, what about when it's enabled? Then we can and should print invisible kind arguments, and this Note explains how we do it. As two running examples, consider the following code: {-# LANGUAGE PolyKinds #-} data T1 a data T2 (a :: k) When displaying these types (with -fprint-explicit-kinds on), we could just do the following: T1 k a T2 k a That certainly gets the job done. But it lacks a crucial piece of information: is the `k` argument inferred or specified? To communicate this, we use visible kind application syntax to distinguish the two cases: T1 @{k} a T2 @k a Here, @{k} indicates that `k` is an inferred argument, and @k indicates that `k` is a specified argument. (See Note [VarBndrs, TyCoVarBinders, TyConBinders, and visibility] in TyCoRep for a lengthier explanation on what "inferred" and "specified" mean.) ************************************************************************ * * Pretty-printing * * ************************************************************************ -} if_print_coercions :: SDoc -- ^ if printing coercions -> SDoc -- ^ otherwise -> SDoc if_print_coercions yes no = sdocWithDynFlags $ \dflags -> getPprStyle $ \style -> if gopt Opt_PrintExplicitCoercions dflags || dumpStyle style || debugStyle style then yes else no pprIfaceInfixApp :: PprPrec -> SDoc -> SDoc -> SDoc -> SDoc pprIfaceInfixApp ctxt_prec pp_tc pp_ty1 pp_ty2 = maybeParen ctxt_prec opPrec $ sep [pp_ty1, pp_tc <+> pp_ty2] pprIfacePrefixApp :: PprPrec -> SDoc -> [SDoc] -> SDoc pprIfacePrefixApp ctxt_prec pp_fun pp_tys | null pp_tys = pp_fun | otherwise = maybeParen ctxt_prec appPrec $ hang pp_fun 2 (sep pp_tys) isIfaceTauType :: IfaceType -> Bool isIfaceTauType (IfaceForAllTy _ _) = False isIfaceTauType (IfaceFunTy InvisArg _ _) = False isIfaceTauType _ = True -- ----------------------------- Printing binders ------------------------------------ instance Outputable IfaceBndr where ppr (IfaceIdBndr bndr) = pprIfaceIdBndr bndr ppr (IfaceTvBndr bndr) = char '@' <> pprIfaceTvBndr bndr (SuppressBndrSig False) (UseBndrParens False) pprIfaceBndrs :: [IfaceBndr] -> SDoc pprIfaceBndrs bs = sep (map ppr bs) pprIfaceLamBndr :: IfaceLamBndr -> SDoc pprIfaceLamBndr (b, IfaceNoOneShot) = ppr b pprIfaceLamBndr (b, IfaceOneShot) = ppr b <> text "[OneShot]" pprIfaceIdBndr :: IfaceIdBndr -> SDoc pprIfaceIdBndr (name, ty) = parens (ppr name <+> dcolon <+> ppr ty) {- Note [Suppressing binder signatures] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When printing the binders in a 'forall', we want to keep the kind annotations: forall (a :: k). blah ^^^^ good On the other hand, when we print the binders of a data declaration in :info, the kind information would be redundant due to the standalone kind signature: type F :: Symbol -> Type type F (s :: Symbol) = blah ^^^^^^^^^ redundant Here we'd like to omit the kind annotation: type F :: Symbol -> Type type F s = blah -} -- | Do we want to suppress kind annotations on binders? -- See Note [Suppressing binder signatures] newtype SuppressBndrSig = SuppressBndrSig Bool newtype UseBndrParens = UseBndrParens Bool pprIfaceTvBndr :: IfaceTvBndr -> SuppressBndrSig -> UseBndrParens -> SDoc pprIfaceTvBndr (tv, ki) (SuppressBndrSig suppress_sig) (UseBndrParens use_parens) | suppress_sig = ppr tv | isIfaceLiftedTypeKind ki = ppr tv | otherwise = maybe_parens (ppr tv <+> dcolon <+> ppr ki) where maybe_parens | use_parens = parens | otherwise = id pprIfaceTyConBinders :: SuppressBndrSig -> [IfaceTyConBinder] -> SDoc pprIfaceTyConBinders suppress_sig = sep . map go where go :: IfaceTyConBinder -> SDoc go (Bndr (IfaceIdBndr bndr) _) = pprIfaceIdBndr bndr go (Bndr (IfaceTvBndr bndr) vis) = -- See Note [Pretty-printing invisible arguments] case vis of AnonTCB VisArg -> ppr_bndr (UseBndrParens True) AnonTCB InvisArg -> char '@' <> braces (ppr_bndr (UseBndrParens False)) -- The above case is rare. (See Note [AnonTCB InvisArg] in TyCon.) -- Should we print these differently? NamedTCB Required -> ppr_bndr (UseBndrParens True) NamedTCB Specified -> char '@' <> ppr_bndr (UseBndrParens True) NamedTCB Inferred -> char '@' <> braces (ppr_bndr (UseBndrParens False)) where ppr_bndr = pprIfaceTvBndr bndr suppress_sig instance Binary IfaceBndr where put_ bh (IfaceIdBndr aa) = do putByte bh 0 put_ bh aa put_ bh (IfaceTvBndr ab) = do putByte bh 1 put_ bh ab get bh = do h <- getByte bh case h of 0 -> do aa <- get bh return (IfaceIdBndr aa) _ -> do ab <- get bh return (IfaceTvBndr ab) instance Binary IfaceOneShot where put_ bh IfaceNoOneShot = do putByte bh 0 put_ bh IfaceOneShot = do putByte bh 1 get bh = do h <- getByte bh case h of 0 -> do return IfaceNoOneShot _ -> do return IfaceOneShot -- ----------------------------- Printing IfaceType ------------------------------------ --------------------------------- instance Outputable IfaceType where ppr ty = pprIfaceType ty pprIfaceType, pprParendIfaceType :: IfaceType -> SDoc pprIfaceType = pprPrecIfaceType topPrec pprParendIfaceType = pprPrecIfaceType appPrec pprPrecIfaceType :: PprPrec -> IfaceType -> SDoc -- We still need `eliminateRuntimeRep`, since the `pprPrecIfaceType` maybe -- called from other places, besides `:type` and `:info`. pprPrecIfaceType prec ty = eliminateRuntimeRep (ppr_ty prec) ty ppr_sigma :: PprPrec -> IfaceType -> SDoc ppr_sigma ctxt_prec ty = maybeParen ctxt_prec funPrec (pprIfaceSigmaType ShowForAllMust ty) ppr_ty :: PprPrec -> IfaceType -> SDoc ppr_ty ctxt_prec ty@(IfaceForAllTy {}) = ppr_sigma ctxt_prec ty ppr_ty ctxt_prec ty@(IfaceFunTy InvisArg _ _) = ppr_sigma ctxt_prec ty ppr_ty _ (IfaceFreeTyVar tyvar) = ppr tyvar -- This is the main reason for IfaceFreeTyVar! ppr_ty _ (IfaceTyVar tyvar) = ppr tyvar -- See Note [TcTyVars in IfaceType] ppr_ty ctxt_prec (IfaceTyConApp tc tys) = pprTyTcApp ctxt_prec tc tys ppr_ty ctxt_prec (IfaceTupleTy i p tys) = pprTuple ctxt_prec i p tys ppr_ty _ (IfaceLitTy n) = pprIfaceTyLit n -- Function types ppr_ty ctxt_prec (IfaceFunTy _ ty1 ty2) -- Should be VisArg = -- We don't want to lose synonyms, so we mustn't use splitFunTys here. maybeParen ctxt_prec funPrec $ sep [ppr_ty funPrec ty1, sep (ppr_fun_tail ty2)] where ppr_fun_tail (IfaceFunTy VisArg ty1 ty2) = (arrow <+> ppr_ty funPrec ty1) : ppr_fun_tail ty2 ppr_fun_tail other_ty = [arrow <+> pprIfaceType other_ty] ppr_ty ctxt_prec (IfaceAppTy t ts) = if_print_coercions ppr_app_ty ppr_app_ty_no_casts where ppr_app_ty = sdocWithDynFlags $ \dflags -> pprIfacePrefixApp ctxt_prec (ppr_ty funPrec t) (map (ppr_app_arg appPrec) (tys_wo_kinds dflags)) tys_wo_kinds dflags = appArgsIfaceTypesArgFlags $ stripInvisArgs dflags ts -- Strip any casts from the head of the application ppr_app_ty_no_casts = case t of IfaceCastTy head _ -> ppr_ty ctxt_prec (mk_app_tys head ts) _ -> ppr_app_ty mk_app_tys :: IfaceType -> IfaceAppArgs -> IfaceType mk_app_tys (IfaceTyConApp tc tys1) tys2 = IfaceTyConApp tc (tys1 `mappend` tys2) mk_app_tys t1 tys2 = IfaceAppTy t1 tys2 ppr_ty ctxt_prec (IfaceCastTy ty co) = if_print_coercions (parens (ppr_ty topPrec ty <+> text "|>" <+> ppr co)) (ppr_ty ctxt_prec ty) ppr_ty ctxt_prec (IfaceCoercionTy co) = if_print_coercions (ppr_co ctxt_prec co) (text "<>") {- Note [Defaulting RuntimeRep variables] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ RuntimeRep variables are considered by many (most?) users to be little more than syntactic noise. When the notion was introduced there was a significant and understandable push-back from those with pedagogy in mind, which argued that RuntimeRep variables would throw a wrench into nearly any teach approach since they appear in even the lowly ($) function's type, ($) :: forall (w :: RuntimeRep) a (b :: TYPE w). (a -> b) -> a -> b which is significantly less readable than its non RuntimeRep-polymorphic type of ($) :: (a -> b) -> a -> b Moreover, unboxed types don't appear all that often in run-of-the-mill Haskell programs, so it makes little sense to make all users pay this syntactic overhead. For this reason it was decided that we would hide RuntimeRep variables for now (see #11549). We do this by defaulting all type variables of kind RuntimeRep to LiftedRep. This is done in a pass right before pretty-printing (defaultRuntimeRepVars, controlled by -fprint-explicit-runtime-reps) This applies to /quantified/ variables like 'w' above. What about variables that are /free/ in the type being printed, which certainly happens in error messages. Suppose (#16074) we are reporting a mismatch between two skolems (a :: RuntimeRep) ~ (b :: RuntimeRep) We certainly don't want to say "Can't match LiftedRep ~ LiftedRep"! But if we are printing the type (forall (a :: Type r). blah we do want to turn that (free) r into LiftedRep, so it prints as (forall a. blah) Conclusion: keep track of whether we we are in the kind of a binder; ohly if so, convert free RuntimeRep variables to LiftedRep. -} -- | Default 'RuntimeRep' variables to 'LiftedPtr'. e.g. -- -- @ -- ($) :: forall (r :: GHC.Types.RuntimeRep) a (b :: TYPE r). -- (a -> b) -> a -> b -- @ -- -- turns in to, -- -- @ ($) :: forall a (b :: *). (a -> b) -> a -> b @ -- -- We do this to prevent RuntimeRep variables from incurring a significant -- syntactic overhead in otherwise simple type signatures (e.g. ($)). See -- Note [Defaulting RuntimeRep variables] and #11549 for further discussion. -- defaultRuntimeRepVars :: IfaceType -> IfaceType defaultRuntimeRepVars ty = go False emptyFsEnv ty where go :: Bool -- True <=> Inside the kind of a binder -> FastStringEnv () -- Set of enclosing forall-ed RuntimeRep variables -> IfaceType -- (replace them with LiftedRep) -> IfaceType go ink subs (IfaceForAllTy (Bndr (IfaceTvBndr (var, var_kind)) argf) ty) | isRuntimeRep var_kind , isInvisibleArgFlag argf -- Don't default *visible* quantification -- or we get the mess in #13963 = let subs' = extendFsEnv subs var () -- Record that we should replace it with LiftedRep, -- and recurse, discarding the forall in go ink subs' ty go ink subs (IfaceForAllTy bndr ty) = IfaceForAllTy (go_ifacebndr subs bndr) (go ink subs ty) go _ subs ty@(IfaceTyVar tv) | tv `elemFsEnv` subs = IfaceTyConApp liftedRep IA_Nil | otherwise = ty go in_kind _ ty@(IfaceFreeTyVar tv) -- See Note [Defaulting RuntimeRep variables], about free vars | in_kind && Type.isRuntimeRepTy (tyVarKind tv) = IfaceTyConApp liftedRep IA_Nil | otherwise = ty go ink subs (IfaceTyConApp tc tc_args) = IfaceTyConApp tc (go_args ink subs tc_args) go ink subs (IfaceTupleTy sort is_prom tc_args) = IfaceTupleTy sort is_prom (go_args ink subs tc_args) go ink subs (IfaceFunTy af arg res) = IfaceFunTy af (go ink subs arg) (go ink subs res) go ink subs (IfaceAppTy t ts) = IfaceAppTy (go ink subs t) (go_args ink subs ts) go ink subs (IfaceCastTy x co) = IfaceCastTy (go ink subs x) co go _ _ ty@(IfaceLitTy {}) = ty go _ _ ty@(IfaceCoercionTy {}) = ty go_ifacebndr :: FastStringEnv () -> IfaceForAllBndr -> IfaceForAllBndr go_ifacebndr subs (Bndr (IfaceIdBndr (n, t)) argf) = Bndr (IfaceIdBndr (n, go True subs t)) argf go_ifacebndr subs (Bndr (IfaceTvBndr (n, t)) argf) = Bndr (IfaceTvBndr (n, go True subs t)) argf go_args :: Bool -> FastStringEnv () -> IfaceAppArgs -> IfaceAppArgs go_args _ _ IA_Nil = IA_Nil go_args ink subs (IA_Arg ty argf args) = IA_Arg (go ink subs ty) argf (go_args ink subs args) liftedRep :: IfaceTyCon liftedRep = IfaceTyCon dc_name (IfaceTyConInfo IsPromoted IfaceNormalTyCon) where dc_name = getName liftedRepDataConTyCon isRuntimeRep :: IfaceType -> Bool isRuntimeRep (IfaceTyConApp tc _) = tc `ifaceTyConHasKey` runtimeRepTyConKey isRuntimeRep _ = False eliminateRuntimeRep :: (IfaceType -> SDoc) -> IfaceType -> SDoc eliminateRuntimeRep f ty = sdocWithDynFlags $ \dflags -> getPprStyle $ \sty -> if userStyle sty && not (gopt Opt_PrintExplicitRuntimeReps dflags) then f (defaultRuntimeRepVars ty) else f ty instance Outputable IfaceAppArgs where ppr tca = pprIfaceAppArgs tca pprIfaceAppArgs, pprParendIfaceAppArgs :: IfaceAppArgs -> SDoc pprIfaceAppArgs = ppr_app_args topPrec pprParendIfaceAppArgs = ppr_app_args appPrec ppr_app_args :: PprPrec -> IfaceAppArgs -> SDoc ppr_app_args ctx_prec = go where go :: IfaceAppArgs -> SDoc go IA_Nil = empty go (IA_Arg t argf ts) = ppr_app_arg ctx_prec (t, argf) <+> go ts -- See Note [Pretty-printing invisible arguments] ppr_app_arg :: PprPrec -> (IfaceType, ArgFlag) -> SDoc ppr_app_arg ctx_prec (t, argf) = sdocWithDynFlags $ \dflags -> let print_kinds = gopt Opt_PrintExplicitKinds dflags in case argf of Required -> ppr_ty ctx_prec t Specified | print_kinds -> char '@' <> ppr_ty appPrec t Inferred | print_kinds -> char '@' <> braces (ppr_ty topPrec t) _ -> empty ------------------- pprIfaceForAllPart :: [IfaceForAllBndr] -> [IfacePredType] -> SDoc -> SDoc pprIfaceForAllPart tvs ctxt sdoc = ppr_iface_forall_part ShowForAllWhen tvs ctxt sdoc -- | Like 'pprIfaceForAllPart', but always uses an explicit @forall@. pprIfaceForAllPartMust :: [IfaceForAllBndr] -> [IfacePredType] -> SDoc -> SDoc pprIfaceForAllPartMust tvs ctxt sdoc = ppr_iface_forall_part ShowForAllMust tvs ctxt sdoc pprIfaceForAllCoPart :: [(IfLclName, IfaceCoercion)] -> SDoc -> SDoc pprIfaceForAllCoPart tvs sdoc = sep [ pprIfaceForAllCo tvs, sdoc ] ppr_iface_forall_part :: ShowForAllFlag -> [IfaceForAllBndr] -> [IfacePredType] -> SDoc -> SDoc ppr_iface_forall_part show_forall tvs ctxt sdoc = sep [ case show_forall of ShowForAllMust -> pprIfaceForAll tvs ShowForAllWhen -> pprUserIfaceForAll tvs , pprIfaceContextArr ctxt , sdoc] -- | Render the "forall ... ." or "forall ... ->" bit of a type. pprIfaceForAll :: [IfaceForAllBndr] -> SDoc pprIfaceForAll [] = empty pprIfaceForAll bndrs@(Bndr _ vis : _) = sep [ add_separator (forAllLit <+> fsep docs) , pprIfaceForAll bndrs' ] where (bndrs', docs) = ppr_itv_bndrs bndrs vis add_separator stuff = case vis of Required -> stuff <+> arrow _inv -> stuff <> dot -- | Render the ... in @(forall ... .)@ or @(forall ... ->)@. -- Returns both the list of not-yet-rendered binders and the doc. -- No anonymous binders here! ppr_itv_bndrs :: [IfaceForAllBndr] -> ArgFlag -- ^ visibility of the first binder in the list -> ([IfaceForAllBndr], [SDoc]) ppr_itv_bndrs all_bndrs@(bndr@(Bndr _ vis) : bndrs) vis1 | vis `sameVis` vis1 = let (bndrs', doc) = ppr_itv_bndrs bndrs vis1 in (bndrs', pprIfaceForAllBndr bndr : doc) | otherwise = (all_bndrs, []) ppr_itv_bndrs [] _ = ([], []) pprIfaceForAllCo :: [(IfLclName, IfaceCoercion)] -> SDoc pprIfaceForAllCo [] = empty pprIfaceForAllCo tvs = text "forall" <+> pprIfaceForAllCoBndrs tvs <> dot pprIfaceForAllCoBndrs :: [(IfLclName, IfaceCoercion)] -> SDoc pprIfaceForAllCoBndrs bndrs = hsep $ map pprIfaceForAllCoBndr bndrs pprIfaceForAllBndr :: IfaceForAllBndr -> SDoc pprIfaceForAllBndr bndr = case bndr of Bndr (IfaceTvBndr tv) Inferred -> sdocWithDynFlags $ \dflags -> if gopt Opt_PrintExplicitForalls dflags then braces $ pprIfaceTvBndr tv suppress_sig (UseBndrParens False) else pprIfaceTvBndr tv suppress_sig (UseBndrParens True) Bndr (IfaceTvBndr tv) _ -> pprIfaceTvBndr tv suppress_sig (UseBndrParens True) Bndr (IfaceIdBndr idv) _ -> pprIfaceIdBndr idv where -- See Note [Suppressing binder signatures] suppress_sig = SuppressBndrSig False pprIfaceForAllCoBndr :: (IfLclName, IfaceCoercion) -> SDoc pprIfaceForAllCoBndr (tv, kind_co) = parens (ppr tv <+> dcolon <+> pprIfaceCoercion kind_co) -- | Show forall flag -- -- Unconditionally show the forall quantifier with ('ShowForAllMust') -- or when ('ShowForAllWhen') the names used are free in the binder -- or when compiling with -fprint-explicit-foralls. data ShowForAllFlag = ShowForAllMust | ShowForAllWhen pprIfaceSigmaType :: ShowForAllFlag -> IfaceType -> SDoc pprIfaceSigmaType show_forall ty = eliminateRuntimeRep ppr_fn ty where ppr_fn iface_ty = let (tvs, theta, tau) = splitIfaceSigmaTy iface_ty in ppr_iface_forall_part show_forall tvs theta (ppr tau) pprUserIfaceForAll :: [IfaceForAllBndr] -> SDoc pprUserIfaceForAll tvs = sdocWithDynFlags $ \dflags -> -- See Note [When to print foralls] in this module. ppWhen (any tv_has_kind_var tvs || any tv_is_required tvs || gopt Opt_PrintExplicitForalls dflags) $ pprIfaceForAll tvs where tv_has_kind_var (Bndr (IfaceTvBndr (_,kind)) _) = not (ifTypeIsVarFree kind) tv_has_kind_var _ = False tv_is_required = isVisibleArgFlag . binderArgFlag {- Note [When to print foralls] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We opt to explicitly pretty-print `forall`s if any of the following criteria are met: 1. -fprint-explicit-foralls is on. 2. A bound type variable has a polymorphic kind. E.g., forall k (a::k). Proxy a -> Proxy a Since a's kind mentions a variable k, we print the foralls. 3. A bound type variable is a visible argument (#14238). Suppose we are printing the kind of: T :: forall k -> k -> Type The "forall k ->" notation means that this kind argument is required. That is, it must be supplied at uses of T. E.g., f :: T (Type->Type) Monad -> Int So we print an explicit "T :: forall k -> k -> Type", because omitting it and printing "T :: k -> Type" would be utterly misleading. See Note [VarBndrs, TyCoVarBinders, TyConBinders, and visibility] in TyCoRep. N.B. Until now (Aug 2018) we didn't check anything for coercion variables. Note [Printing foralls in type family instances] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We use the same criteria as in Note [When to print foralls] to determine whether a type family instance should be pretty-printed with an explicit `forall`. Example: type family Foo (a :: k) :: k where Foo Maybe = [] Foo (a :: Type) = Int Foo a = a Without -fprint-explicit-foralls enabled, this will be pretty-printed as: type family Foo (a :: k) :: k where Foo Maybe = [] Foo a = Int forall k (a :: k). Foo a = a Note that only the third equation has an explicit forall, since it has a type variable with a non-Type kind. (If -fprint-explicit-foralls were enabled, then the second equation would be preceded with `forall a.`.) There is one tricky point in the implementation: what visibility do we give the type variables in a type family instance? Type family instances only store type *variables*, not type variable *binders*, and only the latter has visibility information. We opt to default the visibility of each of these type variables to Specified because users can't ever instantiate these variables manually, so the choice of visibility is only relevant to pretty-printing. (This is why the `k` in `forall k (a :: k). ...` above is printed the way it is, even though it wasn't written explicitly in the original source code.) We adopt the same strategy for data family instances. Example: data family DF (a :: k) data instance DF '[a, b] = DFList That data family instance is pretty-printed as: data instance forall j (a :: j) (b :: j). DF '[a, b] = DFList This is despite that the representation tycon for this data instance (call it $DF:List) actually has different visibilities for its binders. However, the visibilities of these binders are utterly irrelevant to the programmer, who cares only about the specificity of variables in `DF`'s type, not $DF:List's type. Therefore, we opt to pretty-print all variables in data family instances as Specified. Note [Printing promoted type constructors] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider this GHCi session (#14343) > _ :: Proxy '[ 'True ] error: Found hole: _ :: Proxy '['True] This would be bad, because the '[' looks like a character literal. Solution: in type-level lists and tuples, add a leading space if the first type is itself promoted. See pprSpaceIfPromotedTyCon. -} ------------------- -- | Prefix a space if the given 'IfaceType' is a promoted 'TyCon'. -- See Note [Printing promoted type constructors] pprSpaceIfPromotedTyCon :: IfaceType -> SDoc -> SDoc pprSpaceIfPromotedTyCon (IfaceTyConApp tyCon _) = case ifaceTyConIsPromoted (ifaceTyConInfo tyCon) of IsPromoted -> (space <>) _ -> id pprSpaceIfPromotedTyCon _ = id -- See equivalent function in TyCoRep.hs pprIfaceTyList :: PprPrec -> IfaceType -> IfaceType -> SDoc -- Given a type-level list (t1 ': t2), see if we can print -- it in list notation [t1, ...]. -- Precondition: Opt_PrintExplicitKinds is off pprIfaceTyList ctxt_prec ty1 ty2 = case gather ty2 of (arg_tys, Nothing) -> char '\'' <> brackets (pprSpaceIfPromotedTyCon ty1 (fsep (punctuate comma (map (ppr_ty topPrec) (ty1:arg_tys))))) (arg_tys, Just tl) -> maybeParen ctxt_prec funPrec $ hang (ppr_ty funPrec ty1) 2 (fsep [ colon <+> ppr_ty funPrec ty | ty <- arg_tys ++ [tl]]) where gather :: IfaceType -> ([IfaceType], Maybe IfaceType) -- (gather ty) = (tys, Nothing) means ty is a list [t1, .., tn] -- = (tys, Just tl) means ty is of form t1:t2:...tn:tl gather (IfaceTyConApp tc tys) | tc `ifaceTyConHasKey` consDataConKey , IA_Arg _ argf (IA_Arg ty1 Required (IA_Arg ty2 Required IA_Nil)) <- tys , isInvisibleArgFlag argf , (args, tl) <- gather ty2 = (ty1:args, tl) | tc `ifaceTyConHasKey` nilDataConKey = ([], Nothing) gather ty = ([], Just ty) pprIfaceTypeApp :: PprPrec -> IfaceTyCon -> IfaceAppArgs -> SDoc pprIfaceTypeApp prec tc args = pprTyTcApp prec tc args pprTyTcApp :: PprPrec -> IfaceTyCon -> IfaceAppArgs -> SDoc pprTyTcApp ctxt_prec tc tys = sdocWithDynFlags $ \dflags -> getPprStyle $ \style -> pprTyTcApp' ctxt_prec tc tys dflags style pprTyTcApp' :: PprPrec -> IfaceTyCon -> IfaceAppArgs -> DynFlags -> PprStyle -> SDoc pprTyTcApp' ctxt_prec tc tys dflags style | ifaceTyConName tc `hasKey` ipClassKey , IA_Arg (IfaceLitTy (IfaceStrTyLit n)) Required (IA_Arg ty Required IA_Nil) <- tys = maybeParen ctxt_prec funPrec $ char '?' <> ftext n <> text "::" <> ppr_ty topPrec ty | IfaceTupleTyCon arity sort <- ifaceTyConSort info , not (debugStyle style) , arity == ifaceVisAppArgsLength tys = pprTuple ctxt_prec sort (ifaceTyConIsPromoted info) tys | IfaceSumTyCon arity <- ifaceTyConSort info = pprSum arity (ifaceTyConIsPromoted info) tys | tc `ifaceTyConHasKey` consDataConKey , not (gopt Opt_PrintExplicitKinds dflags) , IA_Arg _ argf (IA_Arg ty1 Required (IA_Arg ty2 Required IA_Nil)) <- tys , isInvisibleArgFlag argf = pprIfaceTyList ctxt_prec ty1 ty2 | tc `ifaceTyConHasKey` tYPETyConKey , IA_Arg (IfaceTyConApp rep IA_Nil) Required IA_Nil <- tys , rep `ifaceTyConHasKey` liftedRepDataConKey = ppr_kind_type ctxt_prec | otherwise = getPprDebug $ \dbg -> if | not dbg && tc `ifaceTyConHasKey` errorMessageTypeErrorFamKey -- Suppress detail unless you _really_ want to see -> text "(TypeError ...)" | Just doc <- ppr_equality ctxt_prec tc (appArgsIfaceTypes tys) -> doc | otherwise -> ppr_iface_tc_app ppr_app_arg ctxt_prec tc tys_wo_kinds where info = ifaceTyConInfo tc tys_wo_kinds = appArgsIfaceTypesArgFlags $ stripInvisArgs dflags tys ppr_kind_type :: PprPrec -> SDoc ppr_kind_type ctxt_prec = sdocWithDynFlags $ \dflags -> if useStarIsType dflags then maybeParen ctxt_prec starPrec $ unicodeSyntax (char '★') (char '*') else text "Type" -- | Pretty-print a type-level equality. -- Returns (Just doc) if the argument is a /saturated/ application -- of eqTyCon (~) -- eqPrimTyCon (~#) -- eqReprPrimTyCon (~R#) -- heqTyCon (~~) -- -- See Note [Equality predicates in IfaceType] -- and Note [The equality types story] in TysPrim ppr_equality :: PprPrec -> IfaceTyCon -> [IfaceType] -> Maybe SDoc ppr_equality ctxt_prec tc args | hetero_eq_tc , [k1, k2, t1, t2] <- args = Just $ print_equality (k1, k2, t1, t2) | hom_eq_tc , [k, t1, t2] <- args = Just $ print_equality (k, k, t1, t2) | otherwise = Nothing where homogeneous = tc_name `hasKey` eqTyConKey -- (~) || hetero_tc_used_homogeneously where hetero_tc_used_homogeneously = case ifaceTyConSort $ ifaceTyConInfo tc of IfaceEqualityTyCon -> True _other -> False -- True <=> a heterogeneous equality whose arguments -- are (in this case) of the same kind tc_name = ifaceTyConName tc pp = ppr_ty hom_eq_tc = tc_name `hasKey` eqTyConKey -- (~) hetero_eq_tc = tc_name `hasKey` eqPrimTyConKey -- (~#) || tc_name `hasKey` eqReprPrimTyConKey -- (~R#) || tc_name `hasKey` heqTyConKey -- (~~) nominal_eq_tc = tc_name `hasKey` heqTyConKey -- (~~) || tc_name `hasKey` eqPrimTyConKey -- (~#) print_equality args = sdocWithDynFlags $ \dflags -> getPprStyle $ \style -> print_equality' args style dflags print_equality' (ki1, ki2, ty1, ty2) style dflags | -- If -fprint-equality-relations is on, just print the original TyCon print_eqs = ppr_infix_eq (ppr tc) | -- Homogeneous use of heterogeneous equality (ty1 ~~ ty2) -- or unlifted equality (ty1 ~# ty2) nominal_eq_tc, homogeneous = ppr_infix_eq (text "~") | -- Heterogeneous use of unlifted equality (ty1 ~# ty2) not homogeneous = ppr_infix_eq (ppr heqTyCon) | -- Homogeneous use of representational unlifted equality (ty1 ~R# ty2) tc_name `hasKey` eqReprPrimTyConKey, homogeneous = let ki | print_kinds = [pp appPrec ki1] | otherwise = [] in pprIfacePrefixApp ctxt_prec (ppr coercibleTyCon) (ki ++ [pp appPrec ty1, pp appPrec ty2]) -- The other cases work as you'd expect | otherwise = ppr_infix_eq (ppr tc) where ppr_infix_eq :: SDoc -> SDoc ppr_infix_eq eq_op = pprIfaceInfixApp ctxt_prec eq_op (pp_ty_ki ty1 ki1) (pp_ty_ki ty2 ki2) where pp_ty_ki ty ki | print_kinds = parens (pp topPrec ty <+> dcolon <+> pp opPrec ki) | otherwise = pp opPrec ty print_kinds = gopt Opt_PrintExplicitKinds dflags print_eqs = gopt Opt_PrintEqualityRelations dflags || dumpStyle style || debugStyle style pprIfaceCoTcApp :: PprPrec -> IfaceTyCon -> [IfaceCoercion] -> SDoc pprIfaceCoTcApp ctxt_prec tc tys = ppr_iface_tc_app (\prec (co, _) -> ppr_co prec co) ctxt_prec tc (map (, Required) tys) -- We are trying to re-use ppr_iface_tc_app here, which requires its -- arguments to be accompanied by visibilities. But visibility is -- irrelevant when printing coercions, so just default everything to -- Required. -- | Pretty-prints an application of a type constructor to some arguments -- (whose visibilities are known). This is polymorphic (over @a@) since we use -- this function to pretty-print two different things: -- -- 1. Types (from `pprTyTcApp'`) -- -- 2. Coercions (from 'pprIfaceCoTcApp') ppr_iface_tc_app :: (PprPrec -> (a, ArgFlag) -> SDoc) -> PprPrec -> IfaceTyCon -> [(a, ArgFlag)] -> SDoc ppr_iface_tc_app pp _ tc [ty] | tc `ifaceTyConHasKey` listTyConKey = pprPromotionQuote tc <> brackets (pp topPrec ty) ppr_iface_tc_app pp ctxt_prec tc tys | tc `ifaceTyConHasKey` liftedTypeKindTyConKey = ppr_kind_type ctxt_prec | not (isSymOcc (nameOccName (ifaceTyConName tc))) = pprIfacePrefixApp ctxt_prec (ppr tc) (map (pp appPrec) tys) | [ ty1@(_, Required) , ty2@(_, Required) ] <- tys -- Infix, two visible arguments (we know nothing of precedence though). -- Don't apply this special case if one of the arguments is invisible, -- lest we print something like (@LiftedRep -> @LiftedRep) (#15941). = pprIfaceInfixApp ctxt_prec (ppr tc) (pp opPrec ty1) (pp opPrec ty2) | otherwise = pprIfacePrefixApp ctxt_prec (parens (ppr tc)) (map (pp appPrec) tys) pprSum :: Arity -> PromotionFlag -> IfaceAppArgs -> SDoc pprSum _arity is_promoted args = -- drop the RuntimeRep vars. -- See Note [Unboxed tuple RuntimeRep vars] in TyCon let tys = appArgsIfaceTypes args args' = drop (length tys `div` 2) tys in pprPromotionQuoteI is_promoted <> sumParens (pprWithBars (ppr_ty topPrec) args') pprTuple :: PprPrec -> TupleSort -> PromotionFlag -> IfaceAppArgs -> SDoc pprTuple ctxt_prec sort promoted args = case promoted of IsPromoted -> let tys = appArgsIfaceTypes args args' = drop (length tys `div` 2) tys spaceIfPromoted = case args' of arg0:_ -> pprSpaceIfPromotedTyCon arg0 _ -> id in ppr_tuple_app args' $ pprPromotionQuoteI IsPromoted <> tupleParens sort (spaceIfPromoted (pprWithCommas pprIfaceType args')) NotPromoted | ConstraintTuple <- sort , IA_Nil <- args -> maybeParen ctxt_prec sigPrec $ text "() :: Constraint" | otherwise -> -- drop the RuntimeRep vars. -- See Note [Unboxed tuple RuntimeRep vars] in TyCon let tys = appArgsIfaceTypes args args' = case sort of UnboxedTuple -> drop (length tys `div` 2) tys _ -> tys in ppr_tuple_app args' $ pprPromotionQuoteI promoted <> tupleParens sort (pprWithCommas pprIfaceType args') where ppr_tuple_app :: [IfaceType] -> SDoc -> SDoc ppr_tuple_app args_wo_runtime_reps ppr_args_w_parens -- Special-case unary boxed tuples so that they are pretty-printed as -- `Unit x`, not `(x)` | [_] <- args_wo_runtime_reps , BoxedTuple <- sort = let unit_tc_info = IfaceTyConInfo promoted IfaceNormalTyCon unit_tc = IfaceTyCon (tupleTyConName sort 1) unit_tc_info in pprPrecIfaceType ctxt_prec $ IfaceTyConApp unit_tc args | otherwise = ppr_args_w_parens pprIfaceTyLit :: IfaceTyLit -> SDoc pprIfaceTyLit (IfaceNumTyLit n) = integer n pprIfaceTyLit (IfaceStrTyLit n) = text (show n) pprIfaceCoercion, pprParendIfaceCoercion :: IfaceCoercion -> SDoc pprIfaceCoercion = ppr_co topPrec pprParendIfaceCoercion = ppr_co appPrec ppr_co :: PprPrec -> IfaceCoercion -> SDoc ppr_co _ (IfaceReflCo ty) = angleBrackets (ppr ty) <> ppr_role Nominal ppr_co _ (IfaceGReflCo r ty IfaceMRefl) = angleBrackets (ppr ty) <> ppr_role r ppr_co ctxt_prec (IfaceGReflCo r ty (IfaceMCo co)) = ppr_special_co ctxt_prec (text "GRefl" <+> ppr r <+> pprParendIfaceType ty) [co] ppr_co ctxt_prec (IfaceFunCo r co1 co2) = maybeParen ctxt_prec funPrec $ sep (ppr_co funPrec co1 : ppr_fun_tail co2) where ppr_fun_tail (IfaceFunCo r co1 co2) = (arrow <> ppr_role r <+> ppr_co funPrec co1) : ppr_fun_tail co2 ppr_fun_tail other_co = [arrow <> ppr_role r <+> pprIfaceCoercion other_co] ppr_co _ (IfaceTyConAppCo r tc cos) = parens (pprIfaceCoTcApp topPrec tc cos) <> ppr_role r ppr_co ctxt_prec (IfaceAppCo co1 co2) = maybeParen ctxt_prec appPrec $ ppr_co funPrec co1 <+> pprParendIfaceCoercion co2 ppr_co ctxt_prec co@(IfaceForAllCo {}) = maybeParen ctxt_prec funPrec $ pprIfaceForAllCoPart tvs (pprIfaceCoercion inner_co) where (tvs, inner_co) = split_co co split_co (IfaceForAllCo (IfaceTvBndr (name, _)) kind_co co') = let (tvs, co'') = split_co co' in ((name,kind_co):tvs,co'') split_co (IfaceForAllCo (IfaceIdBndr (name, _)) kind_co co') = let (tvs, co'') = split_co co' in ((name,kind_co):tvs,co'') split_co co' = ([], co') -- Why these three? See Note [TcTyVars in IfaceType] ppr_co _ (IfaceFreeCoVar covar) = ppr covar ppr_co _ (IfaceCoVarCo covar) = ppr covar ppr_co _ (IfaceHoleCo covar) = braces (ppr covar) ppr_co ctxt_prec (IfaceUnivCo IfaceUnsafeCoerceProv r ty1 ty2) = maybeParen ctxt_prec appPrec $ text "UnsafeCo" <+> ppr r <+> pprParendIfaceType ty1 <+> pprParendIfaceType ty2 ppr_co _ (IfaceUnivCo prov role ty1 ty2) = text "Univ" <> (parens $ sep [ ppr role <+> pprIfaceUnivCoProv prov , dcolon <+> ppr ty1 <> comma <+> ppr ty2 ]) ppr_co ctxt_prec (IfaceInstCo co ty) = maybeParen ctxt_prec appPrec $ text "Inst" <+> pprParendIfaceCoercion co <+> pprParendIfaceCoercion ty ppr_co ctxt_prec (IfaceAxiomRuleCo tc cos) = maybeParen ctxt_prec appPrec $ ppr tc <+> parens (interpp'SP cos) ppr_co ctxt_prec (IfaceAxiomInstCo n i cos) = ppr_special_co ctxt_prec (ppr n <> brackets (ppr i)) cos ppr_co ctxt_prec (IfaceSymCo co) = ppr_special_co ctxt_prec (text "Sym") [co] ppr_co ctxt_prec (IfaceTransCo co1 co2) = maybeParen ctxt_prec opPrec $ ppr_co opPrec co1 <+> semi <+> ppr_co opPrec co2 ppr_co ctxt_prec (IfaceNthCo d co) = ppr_special_co ctxt_prec (text "Nth:" <> int d) [co] ppr_co ctxt_prec (IfaceLRCo lr co) = ppr_special_co ctxt_prec (ppr lr) [co] ppr_co ctxt_prec (IfaceSubCo co) = ppr_special_co ctxt_prec (text "Sub") [co] ppr_co ctxt_prec (IfaceKindCo co) = ppr_special_co ctxt_prec (text "Kind") [co] ppr_special_co :: PprPrec -> SDoc -> [IfaceCoercion] -> SDoc ppr_special_co ctxt_prec doc cos = maybeParen ctxt_prec appPrec (sep [doc, nest 4 (sep (map pprParendIfaceCoercion cos))]) ppr_role :: Role -> SDoc ppr_role r = underscore <> pp_role where pp_role = case r of Nominal -> char 'N' Representational -> char 'R' Phantom -> char 'P' ------------------ pprIfaceUnivCoProv :: IfaceUnivCoProv -> SDoc pprIfaceUnivCoProv IfaceUnsafeCoerceProv = text "unsafe" pprIfaceUnivCoProv (IfacePhantomProv co) = text "phantom" <+> pprParendIfaceCoercion co pprIfaceUnivCoProv (IfaceProofIrrelProv co) = text "irrel" <+> pprParendIfaceCoercion co pprIfaceUnivCoProv (IfacePluginProv s) = text "plugin" <+> doubleQuotes (text s) ------------------- instance Outputable IfaceTyCon where ppr tc = pprPromotionQuote tc <> ppr (ifaceTyConName tc) pprPromotionQuote :: IfaceTyCon -> SDoc pprPromotionQuote tc = pprPromotionQuoteI $ ifaceTyConIsPromoted $ ifaceTyConInfo tc pprPromotionQuoteI :: PromotionFlag -> SDoc pprPromotionQuoteI NotPromoted = empty pprPromotionQuoteI IsPromoted = char '\'' instance Outputable IfaceCoercion where ppr = pprIfaceCoercion instance Binary IfaceTyCon where put_ bh (IfaceTyCon n i) = put_ bh n >> put_ bh i get bh = do n <- get bh i <- get bh return (IfaceTyCon n i) instance Binary IfaceTyConSort where put_ bh IfaceNormalTyCon = putByte bh 0 put_ bh (IfaceTupleTyCon arity sort) = putByte bh 1 >> put_ bh arity >> put_ bh sort put_ bh (IfaceSumTyCon arity) = putByte bh 2 >> put_ bh arity put_ bh IfaceEqualityTyCon = putByte bh 3 get bh = do n <- getByte bh case n of 0 -> return IfaceNormalTyCon 1 -> IfaceTupleTyCon <$> get bh <*> get bh 2 -> IfaceSumTyCon <$> get bh _ -> return IfaceEqualityTyCon instance Binary IfaceTyConInfo where put_ bh (IfaceTyConInfo i s) = put_ bh i >> put_ bh s get bh = IfaceTyConInfo <$> get bh <*> get bh instance Outputable IfaceTyLit where ppr = pprIfaceTyLit instance Binary IfaceTyLit where put_ bh (IfaceNumTyLit n) = putByte bh 1 >> put_ bh n put_ bh (IfaceStrTyLit n) = putByte bh 2 >> put_ bh n get bh = do tag <- getByte bh case tag of 1 -> do { n <- get bh ; return (IfaceNumTyLit n) } 2 -> do { n <- get bh ; return (IfaceStrTyLit n) } _ -> panic ("get IfaceTyLit " ++ show tag) instance Binary IfaceAppArgs where put_ bh tk = case tk of IA_Arg t a ts -> putByte bh 0 >> put_ bh t >> put_ bh a >> put_ bh ts IA_Nil -> putByte bh 1 get bh = do c <- getByte bh case c of 0 -> do t <- get bh a <- get bh ts <- get bh return $! IA_Arg t a ts 1 -> return IA_Nil _ -> panic ("get IfaceAppArgs " ++ show c) ------------------- -- Some notes about printing contexts -- -- In the event that we are printing a singleton context (e.g. @Eq a@) we can -- omit parentheses. However, we must take care to set the precedence correctly -- to opPrec, since something like @a :~: b@ must be parenthesized (see -- #9658). -- -- When printing a larger context we use 'fsep' instead of 'sep' so that -- the context doesn't get displayed as a giant column. Rather than, -- instance (Eq a, -- Eq b, -- Eq c, -- Eq d, -- Eq e, -- Eq f, -- Eq g, -- Eq h, -- Eq i, -- Eq j, -- Eq k, -- Eq l) => -- Eq (a, b, c, d, e, f, g, h, i, j, k, l) -- -- we want -- -- instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g, Eq h, Eq i, -- Eq j, Eq k, Eq l) => -- Eq (a, b, c, d, e, f, g, h, i, j, k, l) -- | Prints "(C a, D b) =>", including the arrow. -- Used when we want to print a context in a type, so we -- use 'funPrec' to decide whether to parenthesise a singleton -- predicate; e.g. Num a => a -> a pprIfaceContextArr :: [IfacePredType] -> SDoc pprIfaceContextArr [] = empty pprIfaceContextArr [pred] = ppr_ty funPrec pred <+> darrow pprIfaceContextArr preds = ppr_parend_preds preds <+> darrow -- | Prints a context or @()@ if empty -- You give it the context precedence pprIfaceContext :: PprPrec -> [IfacePredType] -> SDoc pprIfaceContext _ [] = text "()" pprIfaceContext prec [pred] = ppr_ty prec pred pprIfaceContext _ preds = ppr_parend_preds preds ppr_parend_preds :: [IfacePredType] -> SDoc ppr_parend_preds preds = parens (fsep (punctuate comma (map ppr preds))) instance Binary IfaceType where put_ _ (IfaceFreeTyVar tv) = pprPanic "Can't serialise IfaceFreeTyVar" (ppr tv) put_ bh (IfaceForAllTy aa ab) = do putByte bh 0 put_ bh aa put_ bh ab put_ bh (IfaceTyVar ad) = do putByte bh 1 put_ bh ad put_ bh (IfaceAppTy ae af) = do putByte bh 2 put_ bh ae put_ bh af put_ bh (IfaceFunTy af ag ah) = do putByte bh 3 put_ bh af put_ bh ag put_ bh ah put_ bh (IfaceTyConApp tc tys) = do { putByte bh 5; put_ bh tc; put_ bh tys } put_ bh (IfaceCastTy a b) = do { putByte bh 6; put_ bh a; put_ bh b } put_ bh (IfaceCoercionTy a) = do { putByte bh 7; put_ bh a } put_ bh (IfaceTupleTy s i tys) = do { putByte bh 8; put_ bh s; put_ bh i; put_ bh tys } put_ bh (IfaceLitTy n) = do { putByte bh 9; put_ bh n } get bh = do h <- getByte bh case h of 0 -> do aa <- get bh ab <- get bh return (IfaceForAllTy aa ab) 1 -> do ad <- get bh return (IfaceTyVar ad) 2 -> do ae <- get bh af <- get bh return (IfaceAppTy ae af) 3 -> do af <- get bh ag <- get bh ah <- get bh return (IfaceFunTy af ag ah) 5 -> do { tc <- get bh; tys <- get bh ; return (IfaceTyConApp tc tys) } 6 -> do { a <- get bh; b <- get bh ; return (IfaceCastTy a b) } 7 -> do { a <- get bh ; return (IfaceCoercionTy a) } 8 -> do { s <- get bh; i <- get bh; tys <- get bh ; return (IfaceTupleTy s i tys) } _ -> do n <- get bh return (IfaceLitTy n) instance Binary IfaceMCoercion where put_ bh IfaceMRefl = do putByte bh 1 put_ bh (IfaceMCo co) = do putByte bh 2 put_ bh co get bh = do tag <- getByte bh case tag of 1 -> return IfaceMRefl 2 -> do a <- get bh return $ IfaceMCo a _ -> panic ("get IfaceMCoercion " ++ show tag) instance Binary IfaceCoercion where put_ bh (IfaceReflCo a) = do putByte bh 1 put_ bh a put_ bh (IfaceGReflCo a b c) = do putByte bh 2 put_ bh a put_ bh b put_ bh c put_ bh (IfaceFunCo a b c) = do putByte bh 3 put_ bh a put_ bh b put_ bh c put_ bh (IfaceTyConAppCo a b c) = do putByte bh 4 put_ bh a put_ bh b put_ bh c put_ bh (IfaceAppCo a b) = do putByte bh 5 put_ bh a put_ bh b put_ bh (IfaceForAllCo a b c) = do putByte bh 6 put_ bh a put_ bh b put_ bh c put_ bh (IfaceCoVarCo a) = do putByte bh 7 put_ bh a put_ bh (IfaceAxiomInstCo a b c) = do putByte bh 8 put_ bh a put_ bh b put_ bh c put_ bh (IfaceUnivCo a b c d) = do putByte bh 9 put_ bh a put_ bh b put_ bh c put_ bh d put_ bh (IfaceSymCo a) = do putByte bh 10 put_ bh a put_ bh (IfaceTransCo a b) = do putByte bh 11 put_ bh a put_ bh b put_ bh (IfaceNthCo a b) = do putByte bh 12 put_ bh a put_ bh b put_ bh (IfaceLRCo a b) = do putByte bh 13 put_ bh a put_ bh b put_ bh (IfaceInstCo a b) = do putByte bh 14 put_ bh a put_ bh b put_ bh (IfaceKindCo a) = do putByte bh 15 put_ bh a put_ bh (IfaceSubCo a) = do putByte bh 16 put_ bh a put_ bh (IfaceAxiomRuleCo a b) = do putByte bh 17 put_ bh a put_ bh b put_ _ (IfaceFreeCoVar cv) = pprPanic "Can't serialise IfaceFreeCoVar" (ppr cv) put_ _ (IfaceHoleCo cv) = pprPanic "Can't serialise IfaceHoleCo" (ppr cv) -- See Note [Holes in IfaceCoercion] get bh = do tag <- getByte bh case tag of 1 -> do a <- get bh return $ IfaceReflCo a 2 -> do a <- get bh b <- get bh c <- get bh return $ IfaceGReflCo a b c 3 -> do a <- get bh b <- get bh c <- get bh return $ IfaceFunCo a b c 4 -> do a <- get bh b <- get bh c <- get bh return $ IfaceTyConAppCo a b c 5 -> do a <- get bh b <- get bh return $ IfaceAppCo a b 6 -> do a <- get bh b <- get bh c <- get bh return $ IfaceForAllCo a b c 7 -> do a <- get bh return $ IfaceCoVarCo a 8 -> do a <- get bh b <- get bh c <- get bh return $ IfaceAxiomInstCo a b c 9 -> do a <- get bh b <- get bh c <- get bh d <- get bh return $ IfaceUnivCo a b c d 10-> do a <- get bh return $ IfaceSymCo a 11-> do a <- get bh b <- get bh return $ IfaceTransCo a b 12-> do a <- get bh b <- get bh return $ IfaceNthCo a b 13-> do a <- get bh b <- get bh return $ IfaceLRCo a b 14-> do a <- get bh b <- get bh return $ IfaceInstCo a b 15-> do a <- get bh return $ IfaceKindCo a 16-> do a <- get bh return $ IfaceSubCo a 17-> do a <- get bh b <- get bh return $ IfaceAxiomRuleCo a b _ -> panic ("get IfaceCoercion " ++ show tag) instance Binary IfaceUnivCoProv where put_ bh IfaceUnsafeCoerceProv = putByte bh 1 put_ bh (IfacePhantomProv a) = do putByte bh 2 put_ bh a put_ bh (IfaceProofIrrelProv a) = do putByte bh 3 put_ bh a put_ bh (IfacePluginProv a) = do putByte bh 4 put_ bh a get bh = do tag <- getByte bh case tag of 1 -> return $ IfaceUnsafeCoerceProv 2 -> do a <- get bh return $ IfacePhantomProv a 3 -> do a <- get bh return $ IfaceProofIrrelProv a 4 -> do a <- get bh return $ IfacePluginProv a _ -> panic ("get IfaceUnivCoProv " ++ show tag) instance Binary (DefMethSpec IfaceType) where put_ bh VanillaDM = putByte bh 0 put_ bh (GenericDM t) = putByte bh 1 >> put_ bh t get bh = do h <- getByte bh case h of 0 -> return VanillaDM _ -> do { t <- get bh; return (GenericDM t) } instance NFData IfaceType where rnf = \case IfaceFreeTyVar f1 -> f1 `seq` () IfaceTyVar f1 -> rnf f1 IfaceLitTy f1 -> rnf f1 IfaceAppTy f1 f2 -> rnf f1 `seq` rnf f2 IfaceFunTy f1 f2 f3 -> f1 `seq` rnf f2 `seq` rnf f3 IfaceForAllTy f1 f2 -> f1 `seq` rnf f2 IfaceTyConApp f1 f2 -> rnf f1 `seq` rnf f2 IfaceCastTy f1 f2 -> rnf f1 `seq` rnf f2 IfaceCoercionTy f1 -> rnf f1 IfaceTupleTy f1 f2 f3 -> f1 `seq` f2 `seq` rnf f3 instance NFData IfaceTyLit where rnf = \case IfaceNumTyLit f1 -> rnf f1 IfaceStrTyLit f1 -> rnf f1 instance NFData IfaceCoercion where rnf = \case IfaceReflCo f1 -> rnf f1 IfaceGReflCo f1 f2 f3 -> f1 `seq` rnf f2 `seq` rnf f3 IfaceFunCo f1 f2 f3 -> f1 `seq` rnf f2 `seq` rnf f3 IfaceTyConAppCo f1 f2 f3 -> f1 `seq` rnf f2 `seq` rnf f3 IfaceAppCo f1 f2 -> rnf f1 `seq` rnf f2 IfaceForAllCo f1 f2 f3 -> rnf f1 `seq` rnf f2 `seq` rnf f3 IfaceCoVarCo f1 -> rnf f1 IfaceAxiomInstCo f1 f2 f3 -> rnf f1 `seq` rnf f2 `seq` rnf f3 IfaceAxiomRuleCo f1 f2 -> rnf f1 `seq` rnf f2 IfaceUnivCo f1 f2 f3 f4 -> rnf f1 `seq` f2 `seq` rnf f3 `seq` rnf f4 IfaceSymCo f1 -> rnf f1 IfaceTransCo f1 f2 -> rnf f1 `seq` rnf f2 IfaceNthCo f1 f2 -> rnf f1 `seq` rnf f2 IfaceLRCo f1 f2 -> f1 `seq` rnf f2 IfaceInstCo f1 f2 -> rnf f1 `seq` rnf f2 IfaceKindCo f1 -> rnf f1 IfaceSubCo f1 -> rnf f1 IfaceFreeCoVar f1 -> f1 `seq` () IfaceHoleCo f1 -> f1 `seq` () instance NFData IfaceUnivCoProv where rnf x = seq x () instance NFData IfaceMCoercion where rnf x = seq x () instance NFData IfaceOneShot where rnf x = seq x () instance NFData IfaceTyConSort where rnf = \case IfaceNormalTyCon -> () IfaceTupleTyCon arity sort -> rnf arity `seq` sort `seq` () IfaceSumTyCon arity -> rnf arity IfaceEqualityTyCon -> () instance NFData IfaceTyConInfo where rnf (IfaceTyConInfo f s) = f `seq` rnf s instance NFData IfaceTyCon where rnf (IfaceTyCon nm info) = rnf nm `seq` rnf info instance NFData IfaceBndr where rnf = \case IfaceIdBndr id_bndr -> rnf id_bndr IfaceTvBndr tv_bndr -> rnf tv_bndr instance NFData IfaceAppArgs where rnf = \case IA_Nil -> () IA_Arg f1 f2 f3 -> rnf f1 `seq` f2 `seq` rnf f3
sdiehl/ghc
compiler/GHC/Iface/Type.hs
bsd-3-clause
76,725
2
19
21,467
16,370
8,196
8,174
1,217
32
module Module4.Task29 where import Prelude hiding (lookup) class MapLike m where empty :: m k v lookup :: Ord k => k -> m k v -> Maybe v insert :: Ord k => k -> v -> m k v -> m k v delete :: Ord k => k -> m k v -> m k v fromList :: Ord k => [(k,v)] -> m k v fromList [] = empty fromList ((k,v):xs) = insert k v (fromList xs) newtype ArrowMap k v = ArrowMap { getArrowMap :: k -> Maybe v } instance MapLike ArrowMap where empty = ArrowMap $ const Nothing lookup key (ArrowMap map) = map key insert key value (ArrowMap map) = ArrowMap (\k -> if k == key then Just value else map k) delete key (ArrowMap map) = ArrowMap (\k -> if k == key then Nothing else map k)
dstarcev/stepic-haskell
src/Module4/Task29.hs
bsd-3-clause
717
0
11
204
350
180
170
18
0
module Main where import Hexdump main :: IO () main = hexPrint
wangbj/hexdump
app/Main.hs
bsd-3-clause
65
0
6
14
22
13
9
4
1
module Valkyria.Client where import Valkyria.Env import Prelude ()
nfjinjing/source-code-server
src/Valkyria/Client.hs
bsd-3-clause
71
0
4
12
17
11
6
3
0
{-# LANGUAGE PackageImports #-} module Data.Function (module M) where import "base" Data.Function as M
silkapp/base-noprelude
src/Data/Function.hs
bsd-3-clause
108
0
4
18
21
15
6
3
0
module BCalib.Imports ( module X ) where import Control.Lens as X hiding (Empty) import Data.Atlas.Histogramming as X import Data.Monoid as X hiding (First (..), Last (..), (<>)) import Data.Semigroup as X import Data.TTree as X
cspollard/bcalib
src/BCalib/Imports.hs
bsd-3-clause
381
0
6
187
76
54
22
8
0
{-# LANGUAGE ViewPatterns, OverloadedStrings #-} module Text.XML.PList ( {-| This module is like `Text.XML.ToJSON', but it handles plist xml file. -} parseXML , xmlToJSON , tokensToJSON , elementToJSON , plistValue , module Text.XML.ToJSON ) where import Control.Monad (liftM) import Data.Text (Text) import qualified Data.Text as T import qualified Data.ByteString.Lazy as L import qualified Data.HashMap.Strict as HM import qualified Data.Vector as V import Data.Aeson (Value(..), FromJSON, fromJSON, Result(Error, Success)) import Data.Attoparsec.Text (parseOnly, number) import Data.Conduit (($=), ($$), MonadThrow(monadThrow)) import qualified Data.Conduit.List as C import Text.XML.ToJSON.Builder (Element(..)) import qualified Text.XML.ToJSON as ToJSON import qualified Text.HTML.TagStream.Text as T import Text.XML.ToJSON hiding (parseXML, xmlToJSON, tokensToJSON, elementToJSON) -- | parse xml to haskell data type by using aeson's `FromJSON'. parseXML :: (MonadThrow m, FromJSON a) => L.ByteString -> m a parseXML s = xmlToJSON s >>= convert where convert v = case fromJSON v of Error err -> monadThrow (JSONParseError err) Success a -> return a -- | Convert plist lazy bytestring to aeson `Value' xmlToJSON :: MonadThrow m => L.ByteString -> m Value xmlToJSON s = liftM (elementToJSON . tokensToElement) $ C.sourceList (L.toChunks s) $= T.tokenStreamBS $$ C.consume -- | Convert plist `Element' to aeson `Value' elementToJSON :: Element -> Value elementToJSON (Element _ _ [("plist", Element _ _ (item : _))]) = plistValue item elementToJSON _ = error "invalid plist root element." -- |Convert plist xml format of `T.Token's to aeson `Value', combining of `tokensToElement' and `elementToJSON' tokensToJSON :: [T.Token] -> Value tokensToJSON = elementToJSON . tokensToElement plistValue :: (Text, Element) -> Value plistValue (t, elm) = case t of "string" -> String (getText elm) "data" -> String (getText elm) "integer" -> parseNumber (getText elm) "float" -> parseNumber (getText elm) "real" -> parseNumber (getText elm) "dict" -> plistObject elm "true" -> Bool True "false" -> Bool False "date" -> error "date support is not implemented" "array" -> Array $ V.fromList $ map plistValue (elChildren elm) _ -> Object $ HM.fromList [("type", String t), ("value", ToJSON.elementToJSON elm)] where parseNumber :: Text -> Value parseNumber "" = Null parseNumber s = either (error . ("parse number failed:"++)) Number $ parseOnly number s plistObject :: Element -> Value plistObject (Element _ _ cs) = Object $ HM.fromList $ mergeKeyValues cs mergeKeyValues :: [(Text, Element)] -> [(Text, Value)] mergeKeyValues xs = loop xs [] where loop [] kv = kv loop ((isKey -> True, getText -> key) : rest) kv = case rest of (item@(isKey -> False, _) : rest') -> loop rest' ((key, plistValue item) : kv) _ -> loop rest ((key, Null) : kv) loop ((tag,_):_) _ = error $ "expect <key> but got <"++T.unpack tag++">" isKey s = s == "key" getText :: Element -> Text getText (Element [] vs []) = T.concat vs getText _ = error "not a text node [getValue]"
yihuang/xml2json
Text/XML/PList.hs
bsd-3-clause
3,395
0
16
811
1,034
571
463
70
16
{-# LANGUAGE ScopedTypeVariables #-} import Prelude hiding (sequence, mapM, foldl) import Data.Traversable import Data.Foldable import Control.Monad (void) import Control.Monad.Trans.Either import Control.Monad.IO.Class import qualified Data.Vector as V import qualified Data.Vector.Generic as VG import Data.Vector.Algorithms.Heap (sort) import Options.Applicative import Linear import Control.Lens hiding (argument) import Graphics.Rendering.Chart hiding (Point) import Graphics.Rendering.Chart.Backend.Cairo import Data.Default import Data.Colour import Data.Colour.Names import ModelFit.Model.Named import ModelFit.Types (Point, ptX, ptY, withVar) import ModelFit.Fit import ModelFit.FitExpr import ModelFit.Models.Lifetime import CsvUtils data Opts = Opts { irfPath :: FilePath , fluorPath :: [FilePath] , components :: Int } opts :: Parser Opts opts = Opts <$> strOption (long "irf" <> metavar "FILE" <> help "IRF curve path") <*> many (strArgument (metavar "FILE" <> help "Fluorescence curve path")) <*> option auto (long "components" <> short 'n' <> value 1 <> metavar "N" <> help "Number of components") printing :: EitherT String IO () -> IO () printing action = runEitherT action >>= either print return jiffy :: Double jiffy = 8 dropLast n v = V.take (V.length v - n) v mode :: (VG.Vector v a, Ord a, Eq a) => v a -> Maybe a mode v | VG.null v = Nothing mode v = go (VG.head v', 1) $ VG.tail v' where v' = sorted v sorted :: (VG.Vector v a, Ord a) => v a -> v a sorted v = VG.create $ do v' <- VG.thaw v sort v' return v' go (a,n) v | VG.null v = Just a | n' > n = go (a', n') y | otherwise = go (a, n) y where (x,y) = VG.span (== a') v n' = VG.length x a' = VG.head v sumModels :: (Applicative f, Num a) => [f (b -> a)] -> f (b -> a) sumModels = foldl (\accum m->liftOp (+) <$> accum <*> m) (pure $ const 0) main :: IO () main = printing $ do args <- liftIO $ execParser $ info (helper <*> opts) (fullDesc <> progDesc "Fit fluorescence decays") let withPoissonVar = withVar id irfPts <- V.take 4090 . V.map withPoissonVar <$> readPoints (irfPath args) fluorPts <- mapM (\fname->V.take 3200 . V.map withPoissonVar <$> readPoints fname) (fluorPath args) let Just irfBg = mode $ V.map (^. ptY) $ V.take 1500 irfPts -- HACK let irfHist = V.convert $ V.map (subtract irfBg) $ V.drop offset $ V.map (^. ptY) irfPts offset = 0 --period = round $ 1/80e6 / (jiffy * 1e-12) period = findPeriod irfHist irfLength = round $ 2.0 * realToFrac period liftIO $ putStrLn $ "Period: "++show period liftIO $ putStrLn $ "IRF background: "++show irfBg let irf = mkIrf irfHist irfLength let fitFluor :: String -- ^ Curve name -> [FitExprM Double Double] -- ^ lifetimes -> V.Vector (Point Double Double) -- ^ points -> GlobalFitM Double Double Double (FitDesc Double Double Double) fitFluor name taus pts = do let fluorBg = V.head pts ^. ptY Just fluorAmp = maximumOf (each . ptY) pts fitPts = V.convert $ V.take period pts let component :: Int -> FitExprM Double Double -> FitExprM Double (Double -> Double) component i tau = lifetimeModel <$> p where p = sequenceA $ LifetimeP { _decayTime = tau , _amplitude = param (name++"-amp"++show i) (fluorAmp / 2) } decayModel <- expr $ sumModels $ zipWith component [1..] taus --background <- expr $ const <$> param "bg" fluorBg let background = return $ const 0 convolved <- expr $ convolvedModel irf (V.length pts) jiffy <$> hoist decayModel m <- expr $ liftOp (+) <$> hoist convolved <*> hoist background fit fitPts $ hoist m let (fds, curves, p0, params) = runGlobalFitM $ do taus <- mapM (\i->globalParam ("tau"++show i) (realToFrac $ i*1000)) [1..components args] forM (zip ["curve"++show i | i <- [1..]] fluorPts) $ \(name,pts) -> fitFluor name taus pts let fit = either (error . show) id $ leastSquares curves p0 liftIO $ print $ fmap (flip evalParam p0 . Param) params liftIO $ print $ fmap (flip evalParam fit . Param) params forM_ (zip3 (fluorPath args) fluorPts fds) $ \(fname,pts,fd) -> do let path = fname++".png" ts = V.toList $ V.map (^. ptX) pts liftIO $ void $ renderableToFile def path $ toRenderable $ plotFit ts pts (fitEval fd fit) liftIO $ putStrLn $ "Reduced Chi squared: "++show (reducedChiSquared fd fit) return () plotFit :: forall x y. (Show x, Show y, RealFloat y, Fractional x, PlotValue x, PlotValue y) => [x] -> V.Vector (Point x y) -> (x -> y) -> StackedLayouts x plotFit ts pts fit = def & slayouts_layouts .~ [StackedLayout layout] where layout = def & layout_plots .~ plots & layout_y_axis . laxis_generate .~ autoScaledLogAxis def plots :: [Plot x y] plots = [ toPlot $ def & plot_points_values .~ zip [realToFrac $ jiffy*i | i <- [0..]] (toListOf (each . ptY) pts) & plot_points_style . point_color .~ opaque green & plot_points_title .~ "Observed" , toPlot $ def & plot_lines_values .~ [map (\x->(x, fit x)) ts] & plot_lines_style . line_color .~ opaque red & plot_lines_title .~ "Fit" ]
bgamari/model-fit
LifetimeFit.hs
bsd-3-clause
5,723
0
24
1,691
2,124
1,076
1,048
-1
-1
module Pos.Infra.Slotting.Class ( module X ) where import Pos.Core.Slotting as X
input-output-hk/pos-haskell-prototype
infra/src/Pos/Infra/Slotting/Class.hs
mit
106
0
4
36
22
16
6
3
0
{-# LANGUAGE CPP, MultiParamTypeClasses, FunctionalDependencies #-} {-# LANGUAGE FlexibleInstances #-} module Language.Haskell.GhcMod.FillSig ( sig , refine , auto ) where import Data.Char (isSymbol) import Data.Function (on) import Data.List (find, nub, sortBy) import qualified Data.Map as M import Data.Maybe (isJust, catMaybes) import Exception (ghandle, SomeException(..)) import GHC (GhcMonad, Id, ParsedModule(..), TypecheckedModule(..), DynFlags, SrcSpan, Type, GenLocated(L)) import qualified GHC as G import qualified Name as G import qualified Language.Haskell.GhcMod.Gap as Gap import Language.Haskell.GhcMod.Convert import Language.Haskell.GhcMod.Monad import Language.Haskell.GhcMod.SrcUtils import Language.Haskell.GhcMod.Types import Outputable (PprStyle) import qualified Type as Ty import qualified HsBinds as Ty import qualified Class as Ty import qualified Var as Ty import qualified HsPat as Ty import qualified Language.Haskell.Exts.Annotated as HE import Djinn.GHC ---------------------------------------------------------------- -- INTIAL CODE FROM FUNCTION OR INSTANCE SIGNATURE ---------------------------------------------------------------- -- Possible signatures we can find: function or instance data SigInfo = Signature SrcSpan [G.RdrName] (G.HsType G.RdrName) | InstanceDecl SrcSpan G.Class | TyFamDecl SrcSpan G.RdrName TyFamType {- True if closed -} [G.RdrName] -- Signature for fallback operation via haskell-src-exts data HESigInfo = HESignature HE.SrcSpan [HE.Name HE.SrcSpanInfo] (HE.Type HE.SrcSpanInfo) | HEFamSignature HE.SrcSpan TyFamType (HE.Name HE.SrcSpanInfo) [HE.Name HE.SrcSpanInfo] data TyFamType = Closed | Open | Data initialTyFamString :: TyFamType -> (String, String) initialTyFamString Closed = ("instance", "") initialTyFamString Open = ("function", "type instance ") initialTyFamString Data = ("function", "data instance ") -- | Create a initial body from a signature. sig :: IOish m => FilePath -- ^ A target file. -> Int -- ^ Line number. -> Int -- ^ Column number. -> GhcModT m String sig file lineNo colNo = ghandle handler body where body = inModuleContext file $ \dflag style -> do opt <- options modSum <- Gap.fileModSummary file whenFound opt (getSignature modSum lineNo colNo) $ \s -> case s of Signature loc names ty -> ("function", fourInts loc, map (initialBody dflag style ty) names) InstanceDecl loc cls -> ("instance", fourInts loc, map (\x -> initialBody dflag style (G.idType x) x) (Ty.classMethods cls)) TyFamDecl loc name flavour vars -> let (rTy, initial) = initialTyFamString flavour in (rTy, fourInts loc, [initial ++ initialFamBody dflag style name vars]) handler (SomeException _) = do opt <- options -- Code cannot be parsed by ghc module -- Fallback: try to get information via haskell-src-exts whenFound opt (getSignatureFromHE file lineNo colNo) $ \x -> case x of HESignature loc names ty -> ("function", fourIntsHE loc, map (initialBody undefined undefined ty) names) HEFamSignature loc flavour name vars -> let (rTy, initial) = initialTyFamString flavour in (rTy, fourIntsHE loc, [initial ++ initialFamBody undefined undefined name vars]) ---------------------------------------------------------------- -- a. Code for getting the information -- Get signature from ghc parsing and typechecking getSignature :: GhcMonad m => G.ModSummary -> Int -> Int -> m (Maybe SigInfo) getSignature modSum lineNo colNo = do p@ParsedModule{pm_parsed_source = ps} <- G.parseModule modSum -- Inspect the parse tree to find the signature case listifyParsedSpans ps (lineNo, colNo) :: [G.LHsDecl G.RdrName] of [L loc (G.SigD (Ty.TypeSig names (L _ ty)))] -> -- We found a type signature return $ Just $ Signature loc (map G.unLoc names) ty [L _ (G.InstD _)] -> do -- We found an instance declaration TypecheckedModule{tm_renamed_source = Just tcs ,tm_checked_module_info = minfo} <- G.typecheckModule p let lst = listifyRenamedSpans tcs (lineNo, colNo) case Gap.getClass lst of Just (clsName,loc) -> obtainClassInfo minfo clsName loc _ -> return Nothing #if __GLASGOW_HASKELL__ >= 708 [L loc (G.TyClD (G.FamDecl (G.FamilyDecl info (L _ name) (G.HsQTvs _ vars) _)))] -> do #elif __GLASGOW_HASKELL__ >= 706 [L loc (G.TyClD (G.TyFamily info (L _ name) (G.HsQTvs _ vars) _))] -> do #else [L loc (G.TyClD (G.TyFamily info (L _ name) vars _))] -> do #endif #if __GLASGOW_HASKELL__ >= 708 let flavour = case info of G.ClosedTypeFamily _ -> Closed G.OpenTypeFamily -> Open G.DataFamily -> Data #else let flavour = case info of -- Closed type families where introduced in GHC 7.8 G.TypeFamily -> Open G.DataFamily -> Data #endif #if __GLASGOW_HASKELL__ >= 706 getTyFamVarName x = case x of L _ (G.UserTyVar n) -> n L _ (G.KindedTyVar n _) -> n #else getTyFamVarName x = case x of -- In GHC 7.4, HsTyVarBndr's have an extra arg L _ (G.UserTyVar n _) -> n L _ (G.KindedTyVar n _ _) -> n #endif in return $ Just (TyFamDecl loc name flavour $ map getTyFamVarName vars) _ -> return Nothing where obtainClassInfo :: GhcMonad m => G.ModuleInfo -> G.Name -> SrcSpan -> m (Maybe SigInfo) obtainClassInfo minfo clsName loc = do tyThing <- G.modInfoLookupName minfo clsName return $ do Ty.ATyCon clsCon <- tyThing -- In Maybe cls <- G.tyConClass_maybe clsCon return $ InstanceDecl loc cls -- Get signature from haskell-src-exts getSignatureFromHE :: GhcMonad m => FilePath -> Int -> Int -> m (Maybe HESigInfo) getSignatureFromHE file lineNo colNo = do presult <- liftIO $ HE.parseFile file return $ case presult of HE.ParseOk (HE.Module _ _ _ _ mdecls) -> do decl <- find (typeSigInRangeHE lineNo colNo) mdecls case decl of HE.TypeSig (HE.SrcSpanInfo s _) names ty -> return $ HESignature s names ty HE.TypeFamDecl (HE.SrcSpanInfo s _) declHead _ -> let (name, tys) = dHeadTyVars declHead in return $ HEFamSignature s Open name (map cleanTyVarBind tys) HE.DataFamDecl (HE.SrcSpanInfo s _) _ declHead _ -> let (name, tys) = dHeadTyVars declHead in return $ HEFamSignature s Open name (map cleanTyVarBind tys) _ -> fail "" _ -> Nothing where cleanTyVarBind (HE.KindedVar _ n _) = n cleanTyVarBind (HE.UnkindedVar _ n) = n #if MIN_VERSION_haskell_src_exts(1,16,0) dHeadTyVars :: HE.DeclHead l -> (HE.Name l, [HE.TyVarBind l]) dHeadTyVars (HE.DHead _ name) = (name, []) dHeadTyVars (HE.DHApp _ r ty) = (++[ty]) `fmap` (dHeadTyVars r) dHeadTyVars (HE.DHInfix _ ty name) = (name, [ty]) dHeadTyVars (HE.DHParen _ r) = dHeadTyVars r #else dHeadTyVars :: HE.DeclHead l -> (HE.Name l, [HE.TyVarBind l]) dHeadTyVars (HE.DHead _ n tys) = (n, tys) #endif ---------------------------------------------------------------- -- b. Code for generating initial code -- A list of function arguments, and whether they are functions or normal -- arguments is built from either a function signature or an instance signature data FnArg = FnArgFunction | FnArgNormal | FnExplicitName String initialBody :: FnArgsInfo ty name => DynFlags -> PprStyle -> ty -> name -> String initialBody dflag style ty name = initialBody' (getFnName dflag style name) (getFnArgs ty) initialBody' :: String -> [FnArg] -> String initialBody' fname args = initialHead fname args ++ " = " ++ n ++ "_body" where n = if isSymbolName fname then "" else '_':fname initialFamBody :: FnArgsInfo ty name => DynFlags -> PprStyle -> name -> [name] -> String initialFamBody dflag style name args = initialHead fnName fnArgs ++ " = ()" where fnName = getFnName dflag style name fnArgs = map (FnExplicitName . getFnName dflag style) args initialHead :: String -> [FnArg] -> String initialHead fname args = case initialBodyArgs args infiniteVars infiniteFns of [] -> fname arglist -> if isSymbolName fname then head arglist ++ " " ++ fname ++ " " ++ unwords (tail arglist) else fname ++ " " ++ unwords arglist initialBodyArgs :: [FnArg] -> [String] -> [String] -> [String] initialBodyArgs [] _ _ = [] initialBodyArgs (FnArgFunction:xs) vs (f:fs) = f : initialBodyArgs xs vs fs initialBodyArgs (FnArgNormal:xs) (v:vs) fs = v : initialBodyArgs xs vs fs initialBodyArgs (FnExplicitName n:xs) vs fs = n : initialBodyArgs xs vs fs initialBodyArgs _ _ _ = error "initialBodyArgs: This should never happen" -- Lists are infinite initialHead1 :: String -> [FnArg] -> [String] -> String initialHead1 fname args elts = case initialBodyArgs1 args elts of [] -> fname arglist -> if isSymbolName fname then head arglist ++ " " ++ fname ++ " " ++ unwords (tail arglist) else fname ++ " " ++ unwords arglist initialBodyArgs1 :: [FnArg] -> [String] -> [String] initialBodyArgs1 args elts = take (length args) elts -- Getting the initial body of function and instances differ -- This is because for functions we only use the parsed file -- (so the full file doesn't have to be type correct) -- but for instances we need to get information about the class class FnArgsInfo ty name | ty -> name, name -> ty where getFnName :: DynFlags -> PprStyle -> name -> String getFnArgs :: ty -> [FnArg] instance FnArgsInfo (G.HsType G.RdrName) (G.RdrName) where getFnName dflag style name = showOccName dflag style $ Gap.occName name getFnArgs (G.HsForAllTy _ _ _ (L _ iTy)) = getFnArgs iTy getFnArgs (G.HsParTy (L _ iTy)) = getFnArgs iTy getFnArgs (G.HsFunTy (L _ lTy) (L _ rTy)) = (if fnarg lTy then FnArgFunction else FnArgNormal):getFnArgs rTy where fnarg ty = case ty of (G.HsForAllTy _ _ _ (L _ iTy)) -> fnarg iTy (G.HsParTy (L _ iTy)) -> fnarg iTy (G.HsFunTy _ _) -> True _ -> False getFnArgs _ = [] instance FnArgsInfo (HE.Type HE.SrcSpanInfo) (HE.Name HE.SrcSpanInfo) where getFnName _ _ (HE.Ident _ s) = s getFnName _ _ (HE.Symbol _ s) = s getFnArgs (HE.TyForall _ _ _ iTy) = getFnArgs iTy getFnArgs (HE.TyParen _ iTy) = getFnArgs iTy getFnArgs (HE.TyFun _ lTy rTy) = (if fnarg lTy then FnArgFunction else FnArgNormal):getFnArgs rTy where fnarg ty = case ty of (HE.TyForall _ _ _ iTy) -> fnarg iTy (HE.TyParen _ iTy) -> fnarg iTy (HE.TyFun _ _ _) -> True _ -> False getFnArgs _ = [] instance FnArgsInfo Type Id where getFnName dflag style method = showOccName dflag style $ G.getOccName method getFnArgs = getFnArgs' . Ty.dropForAlls where getFnArgs' ty | Just (lTy,rTy) <- Ty.splitFunTy_maybe ty = maybe (if Ty.isPredTy lTy then getFnArgs' rTy else FnArgNormal:getFnArgs' rTy) (\_ -> FnArgFunction:getFnArgs' rTy) $ Ty.splitFunTy_maybe lTy getFnArgs' ty | Just (_,iTy) <- Ty.splitForAllTy_maybe ty = getFnArgs' iTy getFnArgs' _ = [] -- Infinite supply of variable and function variable names infiniteVars, infiniteFns :: [String] infiniteVars = infiniteSupply ["x","y","z","t","u","v","w"] infiniteFns = infiniteSupply ["f","g","h"] infiniteSupply :: [String] -> [String] infiniteSupply initialSupply = initialSupply ++ concatMap (\n -> map (\v -> v ++ show n) initialSupply) ([1 .. ] :: [Integer]) -- Check whether a String is a symbol name isSymbolName :: String -> Bool isSymbolName (c:_) = c `elem` "!#$%&*+./<=>?@\\^|-~" || isSymbol c isSymbolName [] = error "This should never happen" ---------------------------------------------------------------- -- REWRITE A HOLE / UNDEFINED VIA A FUNCTION ---------------------------------------------------------------- refine :: IOish m => FilePath -- ^ A target file. -> Int -- ^ Line number. -> Int -- ^ Column number. -> Expression -- ^ A Haskell expression. -> GhcModT m String refine file lineNo colNo expr = ghandle handler body where body = inModuleContext file $ \dflag style -> do opt <- options modSum <- Gap.fileModSummary file p <- G.parseModule modSum tcm@TypecheckedModule{tm_typechecked_source = tcs} <- G.typecheckModule p ety <- G.exprType expr whenFound opt (findVar dflag style tcm tcs lineNo colNo) $ \(loc, name, rty, paren) -> let eArgs = getFnArgs ety rArgs = getFnArgs rty diffArgs' = length eArgs - length rArgs diffArgs = if diffArgs' < 0 then 0 else diffArgs' iArgs = take diffArgs eArgs text = initialHead1 expr iArgs (infinitePrefixSupply name) in (fourInts loc, doParen paren text) handler (SomeException _) = emptyResult =<< options -- Look for the variable in the specified position findVar :: GhcMonad m => DynFlags -> PprStyle -> G.TypecheckedModule -> G.TypecheckedSource -> Int -> Int -> m (Maybe (SrcSpan, String, Type, Bool)) findVar dflag style tcm tcs lineNo colNo = let lst = sortBy (cmp `on` G.getLoc) $ listifySpans tcs (lineNo, colNo) :: [G.LHsExpr Id] in case lst of e@(L _ (G.HsVar i)):others -> do tyInfo <- Gap.getType tcm e let name = getFnName dflag style i if (name == "undefined" || head name == '_') && isJust tyInfo then let Just (s,t) = tyInfo b = case others of -- If inside an App, we need -- parenthesis [] -> False L _ (G.HsApp (L _ a1) (L _ a2)):_ -> isSearchedVar i a1 || isSearchedVar i a2 _ -> False in return $ Just (s, name, t, b) else return Nothing _ -> return Nothing infinitePrefixSupply :: String -> [String] infinitePrefixSupply "undefined" = repeat "undefined" infinitePrefixSupply p = map (\n -> p ++ "_" ++ show n) ([1 ..] :: [Integer]) doParen :: Bool -> String -> String doParen False s = s doParen True s = if ' ' `elem` s then '(':s ++ ")" else s isSearchedVar :: Id -> G.HsExpr Id -> Bool isSearchedVar i (G.HsVar i2) = i == i2 isSearchedVar _ _ = False ---------------------------------------------------------------- -- REFINE AUTOMATICALLY ---------------------------------------------------------------- auto :: IOish m => FilePath -- ^ A target file. -> Int -- ^ Line number. -> Int -- ^ Column number. -> GhcModT m String auto file lineNo colNo = ghandle handler body where body = inModuleContext file $ \dflag style -> do opt <- options modSum <- Gap.fileModSummary file p <- G.parseModule modSum tcm@TypecheckedModule { tm_typechecked_source = tcs , tm_checked_module_info = minfo } <- G.typecheckModule p whenFound' opt (findVar dflag style tcm tcs lineNo colNo) $ \(loc, _name, rty, paren) -> do topLevel <- getEverythingInTopLevel minfo let (f,pats) = getPatsForVariable tcs (lineNo,colNo) -- Remove self function to prevent recursion, and id to trim -- cases filterFn (n,_) = let funName = G.getOccString n recName = G.getOccString (G.getName f) in funName `notElem` recName:notWantedFuns -- Find without using other functions in top-level localBnds = M.unions $ map (\(L _ pat) -> getBindingsForPat pat) pats lbn = filter filterFn (M.toList localBnds) djinnsEmpty <- djinn True (Just minfo) lbn rty (Max 10) 100000 let -- Find with the entire top-level almostEnv = M.toList $ M.union localBnds topLevel env = filter filterFn almostEnv djinns <- djinn True (Just minfo) env rty (Max 10) 100000 return ( fourInts loc , map (doParen paren) $ nub (djinnsEmpty ++ djinns)) handler (SomeException _) = emptyResult =<< options -- Functions we do not want in completions notWantedFuns :: [String] notWantedFuns = ["id", "asTypeOf", "const"] -- Get all things defined in top-level getEverythingInTopLevel :: GhcMonad m => G.ModuleInfo -> m (M.Map G.Name Type) getEverythingInTopLevel m = do let modInfo = tyThingsToInfo (G.modInfoTyThings m) topNames = G.modInfoTopLevelScope m case topNames of Just topNames' -> do topThings <- mapM G.lookupGlobalName topNames' let topThings' = catMaybes topThings topInfo = tyThingsToInfo topThings' return $ M.union modInfo topInfo Nothing -> return modInfo tyThingsToInfo :: [Ty.TyThing] -> M.Map G.Name Type tyThingsToInfo [] = M.empty tyThingsToInfo (G.AnId i : xs) = M.insert (G.getName i) (Ty.varType i) (tyThingsToInfo xs) -- Getting information about constructors is not needed -- because they will be added by djinn-ghc when traversing types -- #if __GLASGOW_HASKELL__ >= 708 -- tyThingToInfo (G.AConLike (G.RealDataCon con)) = return [(Ty.dataConName con, Ty.dataConUserType con)] -- #else -- tyThingToInfo (G.AConLike con) = return [(Ty.dataConName con, Ty.dataConUserType con)] -- #endif tyThingsToInfo (_:xs) = tyThingsToInfo xs -- Find the Id of the function and the pattern where the hole is located getPatsForVariable :: G.TypecheckedSource -> (Int,Int) -> (Id, [Ty.LPat Id]) getPatsForVariable tcs (lineNo, colNo) = let (L _ bnd:_) = sortBy (cmp `on` G.getLoc) $ listifySpans tcs (lineNo, colNo) :: [G.LHsBind Id] in case bnd of G.PatBind { Ty.pat_lhs = L ploc pat } -> case pat of Ty.ConPatIn (L _ i) _ -> (i, [L ploc pat]) _ -> (error "This should never happen", []) G.FunBind { Ty.fun_id = L _ funId } -> let m = sortBy (cmp `on` G.getLoc) $ listifySpans tcs (lineNo, colNo) #if __GLASGOW_HASKELL__ >= 708 :: [G.LMatch Id (G.LHsExpr Id)] #else :: [G.LMatch Id] #endif (L _ (G.Match pats _ _):_) = m in (funId, pats) _ -> (error "This should never happen", []) getBindingsForPat :: Ty.Pat Id -> M.Map G.Name Type getBindingsForPat (Ty.VarPat i) = M.singleton (G.getName i) (Ty.varType i) getBindingsForPat (Ty.LazyPat (L _ l)) = getBindingsForPat l getBindingsForPat (Ty.BangPat (L _ b)) = getBindingsForPat b getBindingsForPat (Ty.AsPat (L _ a) (L _ i)) = M.insert (G.getName a) (Ty.varType a) (getBindingsForPat i) #if __GLASGOW_HASKELL__ >= 708 getBindingsForPat (Ty.ListPat l _ _) = M.unions $ map (\(L _ i) -> getBindingsForPat i) l #else getBindingsForPat (Ty.ListPat l _) = M.unions $ map (\(L _ i) -> getBindingsForPat i) l #endif getBindingsForPat (Ty.TuplePat l _ _) = M.unions $ map (\(L _ i) -> getBindingsForPat i) l getBindingsForPat (Ty.PArrPat l _) = M.unions $ map (\(L _ i) -> getBindingsForPat i) l getBindingsForPat (Ty.ViewPat _ (L _ i) _) = getBindingsForPat i getBindingsForPat (Ty.SigPatIn (L _ i) _) = getBindingsForPat i getBindingsForPat (Ty.SigPatOut (L _ i) _) = getBindingsForPat i getBindingsForPat (Ty.ConPatIn (L _ i) d) = M.insert (G.getName i) (Ty.varType i) (getBindingsForRecPat d) getBindingsForPat (Ty.ConPatOut { Ty.pat_args = d }) = getBindingsForRecPat d getBindingsForPat _ = M.empty getBindingsForRecPat :: Ty.HsConPatDetails Id -> M.Map G.Name Type getBindingsForRecPat (Ty.PrefixCon args) = M.unions $ map (\(L _ i) -> getBindingsForPat i) args getBindingsForRecPat (Ty.InfixCon (L _ a1) (L _ a2)) = M.union (getBindingsForPat a1) (getBindingsForPat a2) getBindingsForRecPat (Ty.RecCon (Ty.HsRecFields { Ty.rec_flds = fields })) = getBindingsForRecFields fields where getBindingsForRecFields [] = M.empty getBindingsForRecFields (Ty.HsRecField {Ty.hsRecFieldArg = (L _ a)}:fs) = M.union (getBindingsForPat a) (getBindingsForRecFields fs)
cabrera/ghc-mod
Language/Haskell/GhcMod/FillSig.hs
bsd-3-clause
21,204
0
25
5,847
6,267
3,231
3,036
348
7
{-# LANGUAGE DataKinds #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE ViewPatterns #-} -- | Parsing command line targets module Stack.Build.Target ( -- * Types ComponentName , UnresolvedComponent (..) , RawTarget (..) , LocalPackageView (..) , SimpleTarget (..) , NeedTargets (..) -- * Parsers , parseRawTarget , parseTargets ) where import Control.Applicative import Control.Arrow (second) import Control.Monad.Catch (MonadCatch, throwM) import Control.Monad.IO.Class import Data.Either (partitionEithers) import Data.Foldable import Data.List.Extra (groupSort) import Data.List.NonEmpty (NonEmpty((:|))) import qualified Data.List.NonEmpty as NonEmpty import Data.Map (Map) import qualified Data.Map as Map import Data.Maybe (mapMaybe) import Data.Set (Set) import qualified Data.Set as Set import Data.Text (Text) import qualified Data.Text as T import Path import Path.Extra (rejectMissingDir) import Path.IO import Prelude hiding (concat, concatMap) -- Fix redundant import warnings import Stack.Types.PackageIdentifier import Stack.Types.PackageName import Stack.Types.Version import Stack.Types.Config import Stack.Types.Build import Stack.Types.Package -- | The name of a component, which applies to executables, test suites, and benchmarks type ComponentName = Text newtype RawInput = RawInput { unRawInput :: Text } -- | Either a fully resolved component, or a component name that could be -- either an executable, test, or benchmark data UnresolvedComponent = ResolvedComponent !NamedComponent | UnresolvedComponent !ComponentName deriving (Show, Eq, Ord) -- | Raw command line input, without checking against any databases or list of -- locals. Does not deal with directories data RawTarget (a :: RawTargetType) where RTPackageComponent :: !PackageName -> !UnresolvedComponent -> RawTarget a RTComponent :: !ComponentName -> RawTarget a RTPackage :: !PackageName -> RawTarget a RTPackageIdentifier :: !PackageIdentifier -> RawTarget 'HasIdents deriving instance Show (RawTarget a) deriving instance Eq (RawTarget a) deriving instance Ord (RawTarget a) data RawTargetType = HasIdents | NoIdents -- | If this function returns @Nothing@, the input should be treated as a -- directory. parseRawTarget :: Text -> Maybe (RawTarget 'HasIdents) parseRawTarget t = (RTPackageIdentifier <$> parsePackageIdentifierFromString s) <|> (RTPackage <$> parsePackageNameFromString s) <|> (RTComponent <$> T.stripPrefix ":" t) <|> parsePackageComponent where s = T.unpack t parsePackageComponent = case T.splitOn ":" t of [pname, "lib"] | Just pname' <- parsePackageNameFromString (T.unpack pname) -> Just $ RTPackageComponent pname' $ ResolvedComponent CLib [pname, cname] | Just pname' <- parsePackageNameFromString (T.unpack pname) -> Just $ RTPackageComponent pname' $ UnresolvedComponent cname [pname, typ, cname] | Just pname' <- parsePackageNameFromString (T.unpack pname) , Just wrapper <- parseCompType typ -> Just $ RTPackageComponent pname' $ ResolvedComponent $ wrapper cname _ -> Nothing parseCompType t' = case t' of "exe" -> Just CExe "test" -> Just CTest "bench" -> Just CBench _ -> Nothing -- | A view of a local package needed for resolving components data LocalPackageView = LocalPackageView { lpvVersion :: !Version , lpvRoot :: !(Path Abs Dir) , lpvCabalFP :: !(Path Abs File) , lpvComponents :: !(Set NamedComponent) , lpvExtraDep :: !TreatLikeExtraDep } -- | Same as @parseRawTarget@, but also takes directories into account. parseRawTargetDirs :: (MonadIO m, MonadCatch m) => Path Abs Dir -- ^ current directory -> Map PackageName LocalPackageView -> Text -> m (Either Text [(RawInput, RawTarget 'HasIdents)]) parseRawTargetDirs root locals t = case parseRawTarget t of Just rt -> return $ Right [(ri, rt)] Nothing -> do mdir <- forgivingAbsence (resolveDir root (T.unpack t)) >>= rejectMissingDir case mdir of Nothing -> return $ Left $ "Directory not found: " `T.append` t Just dir -> case mapMaybe (childOf dir) $ Map.toList locals of [] -> return $ Left $ "No local directories found as children of " `T.append` t names -> return $ Right $ map ((ri, ) . RTPackage) names where ri = RawInput t childOf dir (name, lpv) = if (dir == lpvRoot lpv || isParentOf dir (lpvRoot lpv)) && not (lpvExtraDep lpv) then Just name else Nothing data SimpleTarget = STUnknown | STNonLocal | STLocalComps !(Set NamedComponent) | STLocalAll deriving (Show, Eq, Ord) resolveIdents :: Map PackageName Version -- ^ snapshot -> Map PackageName Version -- ^ extra deps -> Map PackageName LocalPackageView -> (RawInput, RawTarget 'HasIdents) -> Either Text ((RawInput, RawTarget 'NoIdents), Map PackageName Version) resolveIdents _ _ _ (ri, RTPackageComponent x y) = Right ((ri, RTPackageComponent x y), Map.empty) resolveIdents _ _ _ (ri, RTComponent x) = Right ((ri, RTComponent x), Map.empty) resolveIdents _ _ _ (ri, RTPackage x) = Right ((ri, RTPackage x), Map.empty) resolveIdents snap extras locals (ri, RTPackageIdentifier (PackageIdentifier name version)) = fmap ((ri, RTPackage name), ) newExtras where newExtras = case (Map.lookup name locals, mfound) of -- Error if it matches a local package, pkg idents not -- supported for local. (Just _, _) -> Left $ T.concat [ packageNameText name , " target has a specific version number, but it is a local package." , "\nTo avoid confusion, we will not install the specified version or build the local one." , "\nTo build the local package, specify the target without an explicit version." ] -- If the found version matches, no need for an extra-dep. (_, Just foundVersion) | foundVersion == version -> Right Map.empty -- Otherwise, if there is no specified version or a -- mismatch, add an extra-dep. _ -> Right $ Map.singleton name version mfound = asum (map (Map.lookup name) [extras, snap]) resolveRawTarget :: Map PackageName Version -- ^ snapshot -> Map PackageName Version -- ^ extra deps -> Map PackageName LocalPackageView -> (RawInput, RawTarget 'NoIdents) -> Either Text (PackageName, (RawInput, SimpleTarget)) resolveRawTarget snap extras locals (ri, rt) = go rt where go (RTPackageComponent name ucomp) = case Map.lookup name locals of Nothing -> Left $ T.pack $ "Unknown local package: " ++ packageNameString name Just lpv -> case ucomp of ResolvedComponent comp | comp `Set.member` lpvComponents lpv -> Right (name, (ri, STLocalComps $ Set.singleton comp)) | otherwise -> Left $ T.pack $ concat [ "Component " , show comp , " does not exist in package " , packageNameString name ] UnresolvedComponent comp -> case filter (isCompNamed comp) $ Set.toList $ lpvComponents lpv of [] -> Left $ T.concat [ "Component " , comp , " does not exist in package " , T.pack $ packageNameString name ] [x] -> Right (name, (ri, STLocalComps $ Set.singleton x)) matches -> Left $ T.concat [ "Ambiguous component name " , comp , " for package " , T.pack $ packageNameString name , ": " , T.pack $ show matches ] go (RTComponent cname) = let allPairs = concatMap (\(name, lpv) -> map (name,) $ Set.toList $ lpvComponents lpv) (Map.toList locals) in case filter (isCompNamed cname . snd) allPairs of [] -> Left $ "Could not find a component named " `T.append` cname [(name, comp)] -> Right (name, (ri, STLocalComps $ Set.singleton comp)) matches -> Left $ T.concat [ "Ambiugous component name " , cname , ", matches: " , T.pack $ show matches ] go (RTPackage name) = case Map.lookup name locals of Just _lpv -> Right (name, (ri, STLocalAll)) Nothing -> case Map.lookup name extras of Just _ -> Right (name, (ri, STNonLocal)) Nothing -> case Map.lookup name snap of Just _ -> Right (name, (ri, STNonLocal)) Nothing -> Right (name, (ri, STUnknown)) isCompNamed :: Text -> NamedComponent -> Bool isCompNamed _ CLib = False isCompNamed t1 (CExe t2) = t1 == t2 isCompNamed t1 (CTest t2) = t1 == t2 isCompNamed t1 (CBench t2) = t1 == t2 simplifyTargets :: [(PackageName, (RawInput, SimpleTarget))] -> ([Text], Map PackageName SimpleTarget) simplifyTargets = foldMap go . collect where go :: (PackageName, NonEmpty (RawInput, SimpleTarget)) -> ([Text], Map PackageName SimpleTarget) go (name, (_, st) :| []) = ([], Map.singleton name st) go (name, pairs) = case partitionEithers $ map (getLocalComp . snd) (NonEmpty.toList pairs) of ([], comps) -> ([], Map.singleton name $ STLocalComps $ Set.unions comps) _ -> let err = T.pack $ concat [ "Overlapping targets provided for package " , packageNameString name , ": " , show $ map (unRawInput . fst) (NonEmpty.toList pairs) ] in ([err], Map.empty) collect :: Ord a => [(a, b)] -> [(a, NonEmpty b)] collect = map (second NonEmpty.fromList) . groupSort getLocalComp (STLocalComps comps) = Right comps getLocalComp _ = Left () -- | Need targets, e.g. `stack build` or allow none? data NeedTargets = NeedTargets | AllowNoTargets parseTargets :: (MonadCatch m, MonadIO m) => NeedTargets -- ^ need at least one target -> Bool -- ^ using implicit global project? -> Map PackageName Version -- ^ snapshot -> Map PackageName Version -- ^ extra deps -> Map PackageName LocalPackageView -> Path Abs Dir -- ^ current directory -> [Text] -- ^ command line targets -> m (Map PackageName Version, Map PackageName SimpleTarget) parseTargets needTargets implicitGlobal snap extras locals currDir textTargets' = do let nonExtraDeps = Map.keys $ Map.filter (not . lpvExtraDep) locals textTargets = if null textTargets' then map (T.pack . packageNameString) nonExtraDeps else textTargets' erawTargets <- mapM (parseRawTargetDirs currDir locals) textTargets let (errs1, rawTargets) = partitionEithers erawTargets (errs2, unzip -> (rawTargets', newExtras)) = partitionEithers $ map (resolveIdents snap extras locals) $ concat rawTargets (errs3, targetTypes) = partitionEithers $ map (resolveRawTarget snap extras locals) rawTargets' (errs4, targets) = simplifyTargets targetTypes errs = concat [errs1, errs2, errs3, errs4] if null errs then if Map.null targets then case needTargets of AllowNoTargets -> return (Map.empty, Map.empty) NeedTargets | null textTargets' && implicitGlobal -> throwM $ TargetParseException ["The specified targets matched no packages.\nPerhaps you need to run 'stack init'?"] | null textTargets' && null nonExtraDeps -> throwM $ TargetParseException ["The project contains no local packages (packages not marked with 'extra-dep')"] | otherwise -> throwM $ TargetParseException ["The specified targets matched no packages"] else return (Map.unions newExtras, targets) else throwM $ TargetParseException errs
AndrewRademacher/stack
src/Stack/Build/Target.hs
bsd-3-clause
13,848
0
21
4,894
3,316
1,758
1,558
290
12
{-#LANGUAGE ForeignFunctionInterface #-} module Cudd.GC ( cuddEnableGarbageCollection, cuddDisableGarbageCollection, cuddGarbageCollectionEnabled, c_preGCHook_sample, c_postGCHook_sample, regPreGCHook, regPostGCHook ) where import System.IO import Foreign import Foreign.Ptr import Foreign.C.Types import Foreign.C.String import Foreign.ForeignPtr import Control.Monad import Control.Monad.ST import Control.Monad.ST.Unsafe import Cudd.Hook import Cudd.C import Cudd.Imperative foreign import ccall safe "Cudd_EnableGarbageCollection" c_cuddEnableGarbageCollection :: Ptr CDDManager -> IO () cuddEnableGarbageCollection :: DDManager s u -> ST s () cuddEnableGarbageCollection (DDManager m) = unsafeIOToST $ c_cuddEnableGarbageCollection m foreign import ccall safe "Cudd_DisableGarbageCollection" c_cuddDisableGarbageCollection :: Ptr CDDManager -> IO () cuddDisableGarbageCollection :: DDManager s u -> ST s () cuddDisableGarbageCollection (DDManager m) = unsafeIOToST $ c_cuddDisableGarbageCollection m foreign import ccall safe "Cudd_GarbageCollectionEnabled" c_cuddGarbageCollectionEnabled :: Ptr CDDManager -> IO CInt cuddGarbageCollectionEnabled :: DDManager s u -> ST s Int cuddGarbageCollectionEnabled (DDManager m) = unsafeIOToST $ liftM fromIntegral $ c_cuddGarbageCollectionEnabled m foreign import ccall safe "&preGCHook_sample" c_preGCHook_sample :: HookFP foreign import ccall safe "&postGCHook_sample" c_postGCHook_sample :: HookFP regPreGCHook :: DDManager s u -> HookFP -> ST s Int regPreGCHook m func = cuddAddHook m func CuddPreGcHook regPostGCHook :: DDManager s u -> HookFP -> ST s Int regPostGCHook m func = cuddAddHook m func CuddPostGcHook
maweki/haskell_cudd
Cudd/GC.hs
bsd-3-clause
1,715
10
8
234
418
219
199
41
1
-- | Process utilities module Haskus.System.Process ( threadDelaySec , threadDelayMilliSec , threadDelayMicroSec , threadWaitRead , threadWaitWrite , yield , sysFork ) where import Haskus.System.Sys import Haskus.System.Linux.Handle import Haskus.Utils.Flow import Haskus.Format.Text (Text) import System.Posix.Types (Fd(..)) import qualified Control.Concurrent as CC -- | Delay the thread (seconds) threadDelaySec :: MonadIO m => Word -> m () threadDelaySec = threadDelayMicroSec . (*1000000) -- | Delay the thread (milliseconds) threadDelayMilliSec :: MonadIO m => Word -> m () threadDelayMilliSec = threadDelayMicroSec . (*1000) -- | Delay the thread (microseconds) threadDelayMicroSec :: MonadIO m => Word -> m () threadDelayMicroSec = liftIO . CC.threadDelay . fromIntegral -- | Wait until a handle is readable threadWaitRead :: MonadIO m => Handle -> m () threadWaitRead h = liftIO (CC.threadWaitRead (handleToFd h)) -- | Wait until a handle is writeable threadWaitWrite :: MonadIO m => Handle -> m () threadWaitWrite h = liftIO (CC.threadWaitWrite (handleToFd h)) -- | Convert a handle into an Fd handleToFd :: Handle -> Fd handleToFd (Handle fd) = Fd (fromIntegral fd) -- | Switch to another thread cooperatively yield :: MonadIO m => m () yield = liftIO CC.yield -- | Fork a thread sysFork :: Text -> Sys () -> Sys () sysFork name f = do act <- forkSys name f void $ liftIO $ CC.forkIO act
hsyl20/ViperVM
haskus-system/src/lib/Haskus/System/Process.hs
bsd-3-clause
1,439
0
9
257
408
220
188
32
1
{-# LANGUAGE OverloadedStrings, CPP #-} -- | High-ish level bindings to the HTML5 audio tag and JS API. module Haste.Audio ( module Events, Audio, AudioSettings (..), AudioType (..), AudioSource (..), AudioPreload (..), AudioState (..), Seek (..), defaultAudioSettings, mkSource, newAudio, setSource, getState, setMute, isMute, toggleMute, setLooping, isLooping, toggleLooping, getVolume, setVolume, modVolume, play, pause, stop, togglePlaying, seek, getDuration, getCurrentTime ) where import Haste.Audio.Events as Events import Haste.DOM.JSString import Haste.Foreign import Haste.JSType import Haste.Prim #if __GLASGOW_HASKELL__ < 710 import Control.Applicative #endif import Control.Monad import Control.Monad.IO.Class import Data.String -- | Represents an audio player. data Audio = Audio Elem instance IsElem Audio where elemOf (Audio e) = e fromElem e = do tn <- getProp e "tagName" return $ case tn of "AUDIO" -> Just $ Audio e _ -> Nothing data AudioState = Playing | Paused | Ended deriving (Show, Eq) data AudioType = MP3 | OGG | WAV deriving (Show, Eq) data AudioSource = AudioSource !AudioType !JSString deriving (Show, Eq) data AudioPreload = None | Metadata | Auto deriving Eq data Seek = Start | End | Seconds Double deriving Eq instance JSType AudioPreload where toJSString None = "none" toJSString Metadata = "metadata" toJSString Auto = "auto" fromJSString "none" = Just None fromJSString "metadata" = Just Metadata fromJSString "auto" = Just Auto fromJSString _ = Nothing data AudioSettings = AudioSettings { -- | Show controls? -- Default: False audioControls :: !Bool, -- | Immediately start playing? -- Default: False audioAutoplay :: !Bool, -- | Initially looping? -- Default: False audioLooping :: !Bool, -- | How much audio to preload. -- Default: Auto audioPreload :: !AudioPreload, -- | Initially muted? -- Default: False audioMuted :: !Bool, -- | Initial volume -- Default: 0 audioVolume :: !Double } defaultAudioSettings :: AudioSettings defaultAudioSettings = AudioSettings { audioControls = False, audioAutoplay = False, audioLooping = False, audioPreload = Auto, audioMuted = False, audioVolume = 0 } -- | Create an audio source with automatically detected media type, based on -- the given URL's file extension. -- Returns Nothing if the given URL has an unrecognized media type. mkSource :: JSString -> Maybe AudioSource mkSource url = case take 3 $ reverse $ fromJSStr url of "3pm" -> Just $ AudioSource MP3 url "ggo" -> Just $ AudioSource OGG url "vaw" -> Just $ AudioSource WAV url _ -> Nothing instance IsString AudioSource where fromString s = case mkSource $ Data.String.fromString s of Just src -> src _ -> error $ "Not a valid audio source: " ++ s mimeStr :: AudioType -> JSString mimeStr MP3 = "audio/mpeg" mimeStr OGG = "audio/ogg" mimeStr WAV = "audio/wav" -- | Create a new audio element. newAudio :: MonadIO m => AudioSettings -> [AudioSource] -> m Audio newAudio cfg sources = liftIO $ do srcs <- forM sources $ \(AudioSource t url) -> do newElem "source" `with` ["type" =: mimeStr t, "src" =: toJSString url] Audio <$> newElem "audio" `with` [ "controls" =: falseAsEmpty (audioControls cfg), "autoplay" =: falseAsEmpty (audioAutoplay cfg), "loop" =: falseAsEmpty (audioLooping cfg), "muted" =: falseAsEmpty (audioMuted cfg), "volume" =: toJSString (audioVolume cfg), "preload" =: toJSString (audioPreload cfg), children srcs ] -- | Returns "true" or "", depending on the given boolean. falseAsEmpty :: Bool -> JSString falseAsEmpty True = "true" falseAsEmpty _ = "" -- | (Un)mute the given audio object. setMute :: MonadIO m => Audio -> Bool -> m () setMute (Audio e) = setAttr e "muted" . falseAsEmpty -- | Is the given audio object muted? isMute :: MonadIO m => Audio -> m Bool isMute (Audio e) = liftIO $ maybe False id . fromJSString <$> getProp e "muted" -- | Mute/unmute. toggleMute :: MonadIO m => Audio -> m () toggleMute a = isMute a >>= setMute a . not -- | Set whether the given sound should loop upon completion or not. setLooping :: MonadIO m => Audio -> Bool -> m () setLooping (Audio e) = setAttr e "loop" . falseAsEmpty -- | Is the given audio object looping? isLooping :: MonadIO m => Audio -> m Bool isLooping (Audio e) = liftIO $ maybe False id . fromJSString <$> getProp e "looping" -- | Toggle looping on/off. toggleLooping :: MonadIO m => Audio -> m () toggleLooping a = isLooping a >>= setLooping a . not -- | Starts playing audio from the given element. play :: MonadIO m => Audio -> m () play a@(Audio e) = do st <- getState a when (st == Ended) $ seek a Start liftIO $ play' e where play' :: Elem -> IO () play' = ffi "(function(x){x.play();})" -- | Get the current state of the given audio object. getState :: MonadIO m => Audio -> m AudioState getState (Audio e) = liftIO $ do ended <- maybe False id . fromJSString <$> getProp e "ended" if ended then return Ended else maybe Playing paused . fromJSString <$> getProp e "paused" where paused True = Paused paused _ = Playing -- | Pause the given audio element. pause :: MonadIO m => Audio -> m () pause (Audio e) = liftIO $ pause' e pause' :: Elem -> IO () pause' = ffi "(function(x){x.pause();})" -- | If playing, stop. Otherwise, start playing. togglePlaying :: MonadIO m => Audio -> m () togglePlaying a = do st <- getState a case st of Playing -> pause a Ended -> seek a Start >> play a Paused -> play a -- | Stop playing a track, and seek back to its beginning. stop :: MonadIO m => Audio -> m () stop a = pause a >> seek a Start -- | Get the volume for the given audio element as a value between 0 and 1. getVolume :: MonadIO m => Audio -> m Double getVolume (Audio e) = liftIO $ maybe 0 id . fromJSString <$> getProp e "volume" -- | Set the volume for the given audio element. The value will be clamped to -- [0, 1]. setVolume :: MonadIO m => Audio -> Double -> m () setVolume (Audio e) = setProp e "volume" . toJSString . clamp -- | Modify the volume for the given audio element. The resulting volume will -- be clamped to [0, 1]. modVolume :: MonadIO m => Audio -> Double -> m () modVolume a diff = getVolume a >>= setVolume a . (+ diff) -- | Clamp a value to [0, 1]. clamp :: Double -> Double clamp = max 0 . min 1 -- | Seek to the specified time. seek :: MonadIO m => Audio -> Seek -> m () seek a@(Audio e) st = liftIO $ do case st of Start -> seek' e 0 End -> getDuration a >>= seek' e Seconds s -> seek' e s where seek' :: Elem -> Double -> IO () seek' = ffi "(function(e,t) {e.currentTime = t;})" -- | Get the duration of the loaded sound, in seconds. getDuration :: MonadIO m => Audio -> m Double getDuration (Audio e) = do dur <- getProp e "duration" case fromJSString dur of Just d -> return d _ -> return 0 -- | Get the current play time of the loaded sound, in seconds. getCurrentTime :: MonadIO m => Audio -> m Double getCurrentTime (Audio e) = do dur <- getProp e "currentTime" case fromJSString dur of Just d -> return d _ -> return 0 -- | Set the source of the given audio element. setSource :: MonadIO m => Audio -> AudioSource -> m () setSource (Audio e) (AudioSource _ url) = setProp e "src" (toJSString url)
akru/haste-compiler
libraries/haste-lib/src/Haste/Audio.hs
bsd-3-clause
7,613
0
16
1,807
2,207
1,132
1,075
181
4
{-# LANGUAGE QuasiQuotes #-} module Objc (objcTests) where import Test.Framework import Test.Framework.Providers.HUnit import Test.HUnit (Assertion, (@?=)) import Language.C.Quote.ObjC objcTests :: Test objcTests = testGroup "Objective-C" [ testCase "Objective-C params" objcProp , testCase "Objective-C property" objcDict , testCase "Objective-C method parameters" objcParam , testCase "Objective-C method definition" objcMethodDefinition , testCase "Objective-C classmethod" objcArgumentCls , testCase "Objective-C argument" objcArgument , testCase "Objective-C arguments" objcArguments , testCase "Objective-C varargument" objcVarArgument , testCase "Objective-C literals" objcLits ] where objcDict :: Assertion objcDict = [cexp| @{$dictelems:(elems [("a","b"),("c", "d")])} |] @?= [cexp| @{@"a" : @"b",@"c": @"d"}|] where elems = map (\(k,v) -> [objcdictelem|$exp:(objcLit k) : $exp:(objcLit v)|] ) objcProp :: Assertion objcProp = [cedecl| @interface Foo - (void) foo; $prop:propdec1 $props:propdec2 $prop:propdec3 @end |] @?= [cedecl| @interface Foo - (void) foo; @property (nonatomic, retain) int i; @property (nonatomic, retain) float j; @property (nonatomic, retain) char k; @property (nonatomic) double l; @end |] where propdec n typ = [objcprop|@property ($propattrs:r) $ty:typ $id:n;|] propdec' n typ = [objcprop|@property ($propattr:p) $ty:typ $id:n;|] p = [objcpropattr|nonatomic|] q = [objcpropattr|retain|] r = [p,q] propdec1 = propdec "i" [cty|int|] propdec2 = map (\(n,t) -> propdec n t) [("j", [cty|float|]), ("k", [cty|char|])] propdec3 = propdec' "l" [cty|double|] objcParam :: Assertion objcParam = [cedecl| @interface Foo - (void) $methparams:paramNew ; $methproto:val ; @end |] @?= [cedecl| @interface Foo - (void) foo:(int)str fo:(int)str1; + (int) test1:(int)str2; @end |] where paramNew1 = [objcmethparam|$id:("foo"):(int)str |] paramNew2 = [objcmethparam|fo:(int)str1 |] paramNew3 = [objcmethparam|test1:(int)str2 |] paramNew = [paramNew1, paramNew2] val = [objcmethproto|+ (int) $methparam:paramNew3|] objcMethodDefinition :: Assertion objcMethodDefinition = [cedecl| @implementation fooclass $methdefs:(val) $methdef:(val3) @end |] @?= [cedecl| @implementation fooclass + (int) test1:(int)foo { } - (char) test2:(char)bar { } + (float) test3:(double)baz { } @end |] where val3 = [objcmethdef|+ (float) $methparam:paramNew5 {} |] paramNew5 = [objcmethparam|test3:(double)baz |] val2 = [objcmethdef|+ (int) $methparam:paramNew3 {} |] paramNew3 = [objcmethparam|test1:(int)foo |] val1 = [objcmethdef|- (char) $methparam:paramNew4 {} |] paramNew4 = [objcmethparam|test2:(char)bar |] val = [val2, val1] objcArgumentCls :: Assertion objcArgumentCls = [citem|[somename test];|] @?= [citem|[$recv:(k) $id:("test")];|] where k = [objcmethrecv|somename|] objcArgument :: Assertion objcArgument = [citem|[$recv:(k) $kwarg:(p)];|] @?= [citem|[somename doSome:@"string"];|] where k = [objcmethrecv|somename|] p = [objcarg|doSome:@"string"|] objcArguments :: Assertion objcArguments = [citem|[$recv:(k) $kwargs:(r)];|] @?= [citem|[somename doSome:@"string" doSomeMore:@"moreStrings"];|] where k = [objcmethrecv|somename|] p = [objcarg|doSome:@"string"|] q = [objcarg|doSomeMore:@"moreStrings"|] r = [p,q] objcVarArgument :: Assertion objcVarArgument = [citem|[$recv:(k) $kwarg:(r) $args:(p)];|] @?= [citem|[NSString stringWithFormat:@"A string: %@, a float: %1.2f", @"string", 31415.9265];|] where k = [objcmethrecv|NSString|] r = [objcarg|stringWithFormat:@"A string: %@, a float: %1.2f"|] p = [a, b] a = [cexp|@"string"|] b = [cexp|31415.9265|] objcLits :: Assertion objcLits = [cexp|@[$(objcLit "foo"), $(objcLit True), $(objcLit False), $(objcLit 'a'), nil]|] @?= [cexp|@[@"foo", @YES, @NO, @'a', nil]|]
mwu-tow/language-c-quote
tests/unit/Objc.hs
bsd-3-clause
4,607
0
12
1,314
737
505
232
86
1
{-# LANGUAGE CPP #-} -- Vectorise a modules type and class declarations. -- -- This produces new type constructors and family instances top be included in the module toplevel -- as well as bindings for worker functions, dfuns, and the like. module Vectorise.Type.Env ( vectTypeEnv, ) where #include "HsVersions.h" import GhcPrelude import Vectorise.Env import Vectorise.Vect import Vectorise.Monad import Vectorise.Builtins import Vectorise.Type.TyConDecl import Vectorise.Type.Classify import Vectorise.Generic.PADict import Vectorise.Generic.PAMethods import Vectorise.Generic.PData import Vectorise.Generic.Description import Vectorise.Utils import CoreSyn import CoreUtils import CoreUnfold import DataCon import TyCon import CoAxiom import Type import FamInstEnv import Id import MkId import NameEnv import NameSet import UniqFM import OccName import Unique import Util import Outputable import DynFlags import FastString import MonadUtils import Control.Monad import Data.Maybe import Data.List -- Note [Pragmas to vectorise tycons] -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -- -- All imported type constructors that are not mapped to a vectorised type in the vectorisation map -- (possibly because the defining module was not compiled with vectorisation) may be used in scalar -- code encapsulated in vectorised code. If a such a type constructor 'T' is a member of the -- 'Scalar' class (and hence also of 'PData' and 'PRepr'), it may also be used in vectorised code, -- where 'T' represents itself, but the representation of 'T' still remains opaque in vectorised -- code (i.e., it can only be used in scalar code). -- -- An example is the treatment of 'Int'. 'Int's can be used in vectorised code and remain unchanged -- by vectorisation. However, the representation of 'Int' by the 'I#' data constructor wrapping an -- 'Int#' is not exposed in vectorised code. Instead, computations involving the representation need -- to be confined to scalar code. -- -- VECTORISE pragmas for type constructors cover four different flavours of vectorising data type -- constructors: -- -- (1) Data type constructor 'T' that together with its constructors 'Cn' may be used in vectorised -- code, where 'T' and the 'Cn' are automatically vectorised in the same manner as data types -- declared in a vectorised module. This includes the case where the vectoriser determines that -- the original representation of 'T' may be used in vectorised code (as it does not embed any -- parallel arrays.) This case is for type constructors that are *imported* from a non- -- vectorised module, but that we want to use with full vectorisation support. -- -- An example is the treatment of 'Ordering' and '[]'. The former remains unchanged by -- vectorisation, whereas the latter is fully vectorised. -- -- 'PData' and 'PRepr' instances are automatically generated by the vectoriser. -- -- Type constructors declared with {-# VECTORISE type T #-} are treated in this manner. -- -- (2) Data type constructor 'T' that may be used in vectorised code, where 'T' is represented by an -- explicitly given 'Tv', but the representation of 'T' is opaque in vectorised code (i.e., the -- constructors of 'T' may not occur in vectorised code). -- -- An example is the treatment of '[::]'. The type '[::]' can be used in vectorised code and is -- vectorised to 'PArray'. However, the representation of '[::]' is not exposed in vectorised -- code. Instead, computations involving the representation need to be confined to scalar code. -- -- 'PData' and 'PRepr' instances need to be explicitly supplied for 'T' (they are not generated -- by the vectoriser). -- -- Type constructors declared with {-# VECTORISE type T = Tv #-} are treated in this manner -- manner. (The vectoriser never treats a type constructor automatically in this manner.) -- -- (3) Data type constructor 'T' that does not contain any parallel arrays and has explicitly -- provided 'PData' and 'PRepr' instances (and maybe also a 'Scalar' instance), which together -- with the type's constructors 'Cn' may be used in vectorised code. The type 'T' and its -- constructors 'Cn' are represented by themselves in vectorised code. -- -- An example is 'Bool', which is represented by itself in vectorised code (as it cannot embed -- any parallel arrays). However, we do not want any automatic generation of class and family -- instances, which is why Case (1) does not apply. -- -- 'PData' and 'PRepr' instances need to be explicitly supplied for 'T' (they are not generated -- by the vectoriser). -- -- Type constructors declared with {-# VECTORISE SCALAR type T #-} are treated in this manner. -- -- (4) Data type constructor 'T' that does not contain any parallel arrays and that, in vectorised -- code, is represented by an explicitly given 'Tv', but the representation of 'T' is opaque in -- vectorised code and 'T' is regarded to be scalar — i.e., it may be used in encapsulated -- scalar subcomputations. -- -- An example is the treatment of '(->)'. Types '(->)' can be used in vectorised code and are -- vectorised to '(:->)'. However, the representation of '(->)' is not exposed in vectorised -- code. Instead, computations involving the representation need to be confined to scalar code -- and may be part of encapsulated scalar computations. -- -- 'PData' and 'PRepr' instances need to be explicitly supplied for 'T' (they are not generated -- by the vectoriser). -- -- Type constructors declared with {-# VECTORISE SCALAR type T = Tv #-} are treated in this -- manner. (The vectoriser never treats a type constructor automatically in this manner.) -- -- In addition, we have also got a single pragma form for type classes: {-# VECTORISE class C #-}. -- It implies that the class type constructor may be used in vectorised code together with its data -- constructor. We generally produce a vectorised version of the data type and data constructor. -- We do not generate 'PData' and 'PRepr' instances for class type constructors. This pragma is the -- default for all type classes declared in a vectorised module, but the pragma can also be used -- explitly on imported classes. -- Note [Vectorising classes] -- ~~~~~~~~~~~~~~~~~~~~~~~~~~ -- -- We vectorise classes essentially by just vectorising their desugared Core representation, but we -- do generate a 'Class' structure along the way (see 'Vectorise.Type.TyConDecl.vectTyConDecl'). -- -- Here is an example illustrating the mapping — assume -- -- class Num a where -- (+) :: a -> a -> a -- -- It desugars to -- -- data Num a = D:Num { (+) :: a -> a -> a } -- -- which we vectorise to -- -- data V:Num a = D:V:Num { ($v+) :: PArray a :-> PArray a :-> PArray a } -- -- while adding the following entries to the vectorisation map: -- -- tycon : Num --> V:Num -- datacon: D:Num --> D:V:Num -- var : (+) --> ($v+) -- |Vectorise type constructor including class type constructors. -- vectTypeEnv :: [TyCon] -- Type constructors defined in this module -> [CoreVect] -- All 'VECTORISE [SCALAR] type' declarations in this module -> [CoreVect] -- All 'VECTORISE class' declarations in this module -> VM ( [TyCon] -- old TyCons ++ new TyCons , [FamInst] -- New type family instances. , [(Var, CoreExpr)]) -- New top level bindings. vectTypeEnv tycons vectTypeDecls vectClassDecls = do { traceVt "** vectTypeEnv" $ ppr tycons ; let -- {-# VECTORISE type T -#} (ONLY the imported tycons) impVectTyCons = ( [tycon | VectType False tycon Nothing <- vectTypeDecls] ++ [tycon | VectClass tycon <- vectClassDecls]) \\ tycons -- {-# VECTORISE type T = Tv -#} (imported & local tycons with an /RHS/) vectTyConsWithRHS = [ (tycon, rhs) | VectType False tycon (Just rhs) <- vectTypeDecls] -- {-# VECTORISE SCALAR type T = Tv -#} (imported & local tycons with an /RHS/) scalarTyConsWithRHS = [ (tycon, rhs) | VectType True tycon (Just rhs) <- vectTypeDecls] -- {-# VECTORISE SCALAR type T -#} (imported & local /scalar/ tycons without an RHS) scalarTyConsNoRHS = [tycon | VectType True tycon Nothing <- vectTypeDecls] -- Check that is not a VECTORISE SCALAR tycon nor VECTORISE tycons with explicit rhs? vectSpecialTyConNames = mkNameSet . map tyConName $ scalarTyConsNoRHS ++ map fst (vectTyConsWithRHS ++ scalarTyConsWithRHS) notVectSpecialTyCon tc = not $ (tyConName tc) `elemNameSet` vectSpecialTyConNames -- Build a map containing all vectorised type constructor. If the vectorised type -- constructor differs from the original one, then it is mapped to 'True'; if they are -- both the same, then it maps to 'False'. ; vectTyCons <- globalVectTyCons ; let vectTyConBase = mapUFM_Directly isDistinct vectTyCons -- 'True' iff tc /= V[[tc]] isDistinct u tc = u /= getUnique tc vectTyConFlavour = vectTyConBase `plusNameEnv` mkNameEnv [ (tyConName tycon, True) | (tycon, _) <- vectTyConsWithRHS ++ scalarTyConsWithRHS] `plusNameEnv` mkNameEnv [ (tyConName tycon, False) -- original representation | tycon <- scalarTyConsNoRHS] -- Split the list of 'TyCons' into the ones (1) that we must vectorise and those (2) -- that we could, but don't need to vectorise. Type constructors that are not data -- type constructors or use non-Haskell98 features are being dropped. They may not -- appear in vectorised code. (We also drop the local type constructors appearing in a -- VECTORISE SCALAR pragma or a VECTORISE pragma with an explicit right-hand side, as -- these are being handled separately. NB: Some type constructors may be marked SCALAR -- /and/ have an explicit right-hand side.) -- -- Furthermore, 'par_tcs' are those type constructors (converted or not) whose -- definition, directly or indirectly, depends on parallel arrays. Finally, 'drop_tcs' -- are all type constructors that cannot be vectorised. ; parallelTyCons <- (`extendNameSetList` map (tyConName . fst) vectTyConsWithRHS) <$> globalParallelTyCons ; let maybeVectoriseTyCons = filter notVectSpecialTyCon tycons ++ impVectTyCons (conv_tcs, keep_tcs, par_tcs, drop_tcs) = classifyTyCons vectTyConFlavour parallelTyCons maybeVectoriseTyCons ; traceVt " known parallel : " $ ppr parallelTyCons ; traceVt " VECT SCALAR : " $ ppr (scalarTyConsNoRHS ++ map fst scalarTyConsWithRHS) ; traceVt " VECT [class] : " $ ppr impVectTyCons ; traceVt " VECT with rhs : " $ ppr (map fst (vectTyConsWithRHS ++ scalarTyConsWithRHS)) ; traceVt " -- after classification (local and VECT [class] tycons) --" Outputable.empty ; traceVt " reuse : " $ ppr keep_tcs ; traceVt " convert : " $ ppr conv_tcs -- warn the user about unvectorised type constructors ; let explanation = text "(They use unsupported language extensions" $$ text "or depend on type constructors that are" <+> text "not vectorised)" drop_tcs_nosyn = filter (not . isTypeFamilyTyCon) . filter (not . isTypeSynonymTyCon) $ drop_tcs ; unless (null drop_tcs_nosyn) $ emitVt "Warning: cannot vectorise these type constructors:" $ pprQuotedList drop_tcs_nosyn $$ explanation ; mapM_ addParallelTyConAndCons $ par_tcs ++ map fst vectTyConsWithRHS ; let mapping = -- Type constructors that we found we don't need to vectorise and those -- declared VECTORISE SCALAR /without/ an explicit right-hand side, use the same -- representation in both unvectorised and vectorised code; they are not -- abstract. [(tycon, tycon, False) | tycon <- keep_tcs ++ scalarTyConsNoRHS] -- We do the same for type constructors declared VECTORISE SCALAR /without/ -- an explicit right-hand side ++ [(tycon, vTycon, True) | (tycon, vTycon) <- vectTyConsWithRHS ++ scalarTyConsWithRHS] ; syn_tcs <- catMaybes <$> mapM defTyConDataCons mapping -- Vectorise all the data type declarations that we can and must vectorise (enter the -- type and data constructors into the vectorisation map on-the-fly.) ; new_tcs <- vectTyConDecls conv_tcs ; let dumpTc tc vTc = traceVt "---" (ppr tc <+> text "::" <+> ppr (dataConSig tc) $$ ppr vTc <+> text "::" <+> ppr (dataConSig vTc)) dataConSig tc | Just dc <- tyConSingleDataCon_maybe tc = dataConRepType dc | otherwise = panic "dataConSig" ; zipWithM_ dumpTc (filter isClassTyCon conv_tcs) (filter isClassTyCon new_tcs) -- We don't need new representation types for dictionary constructors. The constructors -- are always fully applied, and we don't need to lift them to arrays as a dictionary -- of a particular type always has the same value. ; let orig_tcs = filter (not . isClassTyCon) $ keep_tcs ++ conv_tcs vect_tcs = filter (not . isClassTyCon) $ keep_tcs ++ new_tcs -- Build 'PRepr' and 'PData' instance type constructors and family instances for all -- type constructors with vectorised representations. ; reprs <- mapM tyConRepr vect_tcs ; repr_fis <- zipWith3M buildPReprTyCon orig_tcs vect_tcs reprs ; pdata_fis <- zipWith3M buildPDataTyCon orig_tcs vect_tcs reprs ; pdatas_fis <- zipWith3M buildPDatasTyCon orig_tcs vect_tcs reprs ; let fam_insts = repr_fis ++ pdata_fis ++ pdatas_fis repr_axs = map famInstAxiom repr_fis pdata_tcs = famInstsRepTyCons pdata_fis pdatas_tcs = famInstsRepTyCons pdatas_fis ; updGEnv $ extendFamEnv fam_insts -- Generate workers for the vectorised data constructors, dfuns for the 'PA' instances of -- the vectorised type constructors, and associate the type constructors with their dfuns -- in the global environment. We get back the dfun bindings (which we will subsequently -- inject into the modules toplevel). ; (_, binds) <- fixV $ \ ~(dfuns, _) -> do { defTyConPAs (zipLazy vect_tcs dfuns) -- Query the 'PData' instance type constructors for type constructors that have a -- VECTORISE SCALAR type pragma without an explicit right-hand side (this is Item -- (3) of "Note [Pragmas to vectorise tycons]" above). ; pdata_scalar_tcs <- mapM pdataReprTyConExact scalarTyConsNoRHS -- Build workers for all vectorised data constructors (except abstract ones) ; sequence_ $ zipWith3 vectDataConWorkers (orig_tcs ++ scalarTyConsNoRHS) (vect_tcs ++ scalarTyConsNoRHS) (pdata_tcs ++ pdata_scalar_tcs) -- Build a 'PA' dictionary for all type constructors (except abstract ones & those -- defined with an explicit right-hand side where the dictionary is user-supplied) ; dfuns <- sequence $ zipWith4 buildTyConPADict vect_tcs repr_axs pdata_tcs pdatas_tcs ; binds <- takeHoisted ; return (dfuns, binds) } -- Return the vectorised variants of type constructors as well as the generated instance -- type constructors, family instances, and dfun bindings. ; return ( new_tcs ++ pdata_tcs ++ pdatas_tcs ++ syn_tcs , fam_insts, binds) } where addParallelTyConAndCons tycon = do { addGlobalParallelTyCon tycon ; mapM_ addGlobalParallelVar [ id | dc <- tyConDataCons tycon , AnId id <- dataConImplicitTyThings dc ] -- Ignoring the promoted tycon; hope that's ok } -- Add a mapping from the original to vectorised type constructor to the vectorisation map. -- Unless the type constructor is abstract, also mappings from the original's data constructors -- to the vectorised type's data constructors. -- -- We have three cases: (1) original and vectorised type constructor are the same, (2) the -- name of the vectorised type constructor is canonical (as prescribed by 'mkVectTyConOcc'), or -- (3) the name is not canonical. In the third case, we additionally introduce a type synonym -- with the canonical name that is set equal to the non-canonical name (so that we find the -- right type constructor when reading vectorisation information from interface files). -- defTyConDataCons (origTyCon, vectTyCon, isAbstract) = do { canonName <- mkLocalisedName mkVectTyConOcc origName ; if origName == vectName -- Case (1) || vectName == canonName -- Case (2) then do { defTyCon origTyCon vectTyCon -- T --> vT ; defDataCons -- Ci --> vCi ; return Nothing } else do -- Case (3) { let synTyCon = mkSyn canonName (mkTyConTy vectTyCon) -- type S = vT ; defTyCon origTyCon synTyCon -- T --> S ; defDataCons -- Ci --> vCi ; return $ Just synTyCon } } where origName = tyConName origTyCon vectName = tyConName vectTyCon mkSyn canonName ty = buildSynTyCon canonName [] (typeKind ty) [] ty defDataCons | isAbstract = return () | otherwise = do { MASSERT(tyConDataCons origTyCon `equalLength` tyConDataCons vectTyCon) ; zipWithM_ defDataCon (tyConDataCons origTyCon) (tyConDataCons vectTyCon) } -- Helpers -------------------------------------------------------------------- buildTyConPADict :: TyCon -> CoAxiom Unbranched -> TyCon -> TyCon -> VM Var buildTyConPADict vect_tc prepr_ax pdata_tc pdatas_tc = tyConRepr vect_tc >>= buildPADict vect_tc prepr_ax pdata_tc pdatas_tc -- Produce a custom-made worker for the data constructors of a vectorised data type. This includes -- all data constructors that may be used in vectorised code — i.e., all data constructors of data -- types with 'VECTORISE [SCALAR] type' pragmas with an explicit right-hand side. Also adds a mapping -- from the original to vectorised worker into the vectorisation map. -- -- FIXME: It's not nice that we need create a special worker after the data constructors has -- already been constructed. Also, I don't think the worker is properly added to the data -- constructor. Seems messy. vectDataConWorkers :: TyCon -> TyCon -> TyCon -> VM () vectDataConWorkers orig_tc vect_tc arr_tc = do { traceVt "Building vectorised worker for datatype" (ppr orig_tc) ; bs <- sequence . zipWith3 def_worker (tyConDataCons orig_tc) rep_tys $ zipWith4 mk_data_con (tyConDataCons vect_tc) rep_tys (inits rep_tys) (tail $ tails rep_tys) ; mapM_ (uncurry hoistBinding) bs } where tyvars = tyConTyVars vect_tc var_tys = mkTyVarTys tyvars ty_args = map Type var_tys res_ty = mkTyConApp vect_tc var_tys cons = tyConDataCons vect_tc arity = length cons [arr_dc] = tyConDataCons arr_tc rep_tys = map dataConRepArgTys $ tyConDataCons vect_tc mk_data_con con tys pre post = do dflags <- getDynFlags liftM2 (,) (vect_data_con con) (lift_data_con tys pre post (mkDataConTag dflags con)) sel_replicate len tag | arity > 1 = do rep <- builtin (selReplicate arity) return [rep `mkApps` [len, tag]] | otherwise = return [] vect_data_con con = return $ mkConApp con ty_args lift_data_con tys pre_tys post_tys tag = do len <- builtin liftingContext args <- mapM (newLocalVar (fsLit "xs")) =<< mapM mkPDataType tys sel <- sel_replicate (Var len) tag pre <- mapM emptyPD (concat pre_tys) post <- mapM emptyPD (concat post_tys) return . mkLams (len : args) . wrapFamInstBody arr_tc var_tys . mkConApp arr_dc $ ty_args ++ sel ++ pre ++ map Var args ++ post def_worker data_con arg_tys mk_body = do arity <- polyArity tyvars body <- closedV . inBind orig_worker . polyAbstract tyvars $ \args -> liftM (mkLams (tyvars ++ args) . vectorised) $ buildClosures tyvars [] [] arg_tys res_ty mk_body raw_worker <- mkVectId orig_worker (exprType body) let vect_worker = raw_worker `setIdUnfolding` mkInlineUnfoldingWithArity arity body defGlobalVar orig_worker vect_worker return (vect_worker, body) where orig_worker = dataConWorkId data_con
shlevy/ghc
compiler/vectorise/Vectorise/Type/Env.hs
bsd-3-clause
22,699
0
19
6,860
2,661
1,435
1,226
212
2
module Propellor.Gpg where import Control.Applicative import System.IO import System.FilePath import System.Directory import Data.Maybe import Data.List.Utils import Propellor.PrivData.Paths import Propellor.Message import Utility.SafeCommand import Utility.Process import Utility.Monad import Utility.Misc import Utility.Tmp type KeyId = String keyring :: FilePath keyring = privDataDir </> "keyring.gpg" -- Lists the keys in propellor's keyring. listPubKeys :: IO [KeyId] listPubKeys = parse . lines <$> readProcess "gpg" listopts where listopts = useKeyringOpts ++ ["--with-colons", "--list-public-keys"] parse = mapMaybe (keyIdField . split ":") keyIdField ("pub":_:_:_:f:_) = Just f keyIdField _ = Nothing useKeyringOpts :: [String] useKeyringOpts = [ "--options" , "/dev/null" , "--no-default-keyring" , "--keyring", keyring ] addKey :: KeyId -> IO () addKey keyid = exitBool =<< allM (uncurry actionMessage) [ ("adding key to propellor's keyring", addkeyring) , ("staging propellor's keyring", gitadd keyring) , ("updating encryption of any privdata", reencryptprivdata) , ("configuring git signing to use key", gitconfig) , ("committing changes", gitcommit) ] where addkeyring = do createDirectoryIfMissing True privDataDir boolSystem "sh" [ Param "-c" , Param $ "gpg --export " ++ keyid ++ " | gpg " ++ unwords (useKeyringOpts ++ ["--import"]) ] reencryptprivdata = ifM (doesFileExist privDataFile) ( do gpgEncrypt privDataFile =<< gpgDecrypt privDataFile gitadd privDataFile , return True ) gitadd f = boolSystem "git" [ Param "add" , File f ] gitconfig = ifM (snd <$> processTranscript "gpg" ["--list-secret-keys", keyid] Nothing) ( boolSystem "git" [ Param "config" , Param "user.signingkey" , Param keyid ] , do warningMessage $ "Cannot find a secret key for key " ++ keyid ++ ", so not configuring git user.signingkey to use this key." return True ) gitcommit = gitCommit [ File keyring , Param "-m" , Param "propellor addkey" ] -- Adds --gpg-sign if there's a keyring. gpgSignParams :: [CommandParam] -> IO [CommandParam] gpgSignParams ps = ifM (doesFileExist keyring) ( return (ps ++ [Param "--gpg-sign"]) , return ps ) -- Automatically sign the commit if there'a a keyring. gitCommit :: [CommandParam] -> IO Bool gitCommit ps = do ps' <- gpgSignParams ps boolSystem "git" (Param "commit" : ps') gpgDecrypt :: FilePath -> IO String gpgDecrypt f = ifM (doesFileExist f) ( readProcess "gpg" ["--decrypt", f] , return "" ) -- Encrypt file to all keys in propellor's keyring. gpgEncrypt :: FilePath -> String -> IO () gpgEncrypt f s = do keyids <- listPubKeys let opts = [ "--default-recipient-self" , "--armor" , "--encrypt" , "--trust-model", "always" ] ++ concatMap (\k -> ["--recipient", k]) keyids encrypted <- writeReadProcessEnv "gpg" opts Nothing (Just $ flip hPutStr s) Nothing viaTmp writeFile f encrypted
sjfloat/propellor
src/Propellor/Gpg.hs
bsd-2-clause
2,962
52
15
546
889
467
422
88
2
module MergePullRequest where import qualified Github.PullRequests as Github import Github.Auth import Github.Data main :: IO () main = do mergeResult <- Github.updatePullRequest (GithubOAuth "authtoken") "repoOwner" "repoName" 22 (EditPullRequest { editPullRequestTitle = Just "Brand new title", editPullRequestBody = Nothing, editPullRequestState = Just EditPullRequestStateClosed }) case mergeResult of (Left err) -> putStrLn $ "Error: " ++ (show err) (Right dpr) -> putStrLn . show $ dpr
bitemyapp/github
samples/Pulls/UpdatePull.hs
bsd-3-clause
513
0
12
85
145
78
67
10
2
{-# OPTIONS_GHC -fwarn-unused-binds #-} module ShouldCompile() where -- Trac #2497; test should compile without language -- pragmas to swith on the forall {-# RULES "id" forall (x :: a). id x = x #-} -- Trac #2213; eq should not be reported as unused eq,beq :: Eq a => a -> a -> Bool eq = (==) -- Used beq = (==) -- Unused {-# RULES "rule 1" forall x y. x == y = y `eq` x #-}
hvr/jhc
regress/tests/1_typecheck/2_pass/ghc/T2497.hs
mit
402
0
7
105
51
35
16
8
1
{-# LANGUAGE Trustworthy #-} {-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Control.Category -- Copyright : (c) Ashley Yakeley 2007 -- License : BSD-style (see the LICENSE file in the distribution) -- -- Maintainer : [email protected] -- Stability : experimental -- Portability : portable -- http://hackage.haskell.org/trac/ghc/ticket/1773 module Control.Category where import qualified Prelude infixr 9 . infixr 1 >>>, <<< -- | A class for categories. -- id and (.) must form a monoid. class Category cat where -- | the identity morphism id :: cat a a -- | morphism composition (.) :: cat b c -> cat a b -> cat a c {-# RULES "identity/left" forall p . id . p = p "identity/right" forall p . p . id = p "association" forall p q r . (p . q) . r = p . (q . r) #-} instance Category (->) where id = Prelude.id (.) = (Prelude..) -- | Right-to-left composition (<<<) :: Category cat => cat b c -> cat a b -> cat a c (<<<) = (.) -- | Left-to-right composition (>>>) :: Category cat => cat a b -> cat b c -> cat a c f >>> g = g . f
szatkus/haste-compiler
libraries/ghc-7.8/base/Control/Category.hs
bsd-3-clause
1,211
0
9
319
218
126
92
23
1
{- | Module : System.JBI.Config Description : Run-time configuration settings Copyright : (c) Ivan Lazar Miljenovic License : MIT Maintainer : [email protected] -} module System.JBI.Config where -------------------------------------------------------------------------------- newtype Config = Config { debugMode :: Bool } deriving (Eq, Show, Read) defaultConfig :: Config defaultConfig = Config { debugMode = False }
ivan-m/jbi
lib/System/JBI/Config.hs
mit
467
0
6
93
55
35
20
7
1
import System.Environment import System.Exit import System.Process import Data.IORef import Control.Monad import System.Timeout import Data.Maybe import GetFullProgName import Syntax import NarrowingSearch import SearchControl import Check import PrintProof import TPTPSyntax hiding (App, Var, Implies, Forall) import ParseGlue import Parser import Translate import Transform import ProofExport import ProofExportTPTP -- --------------------------------- getProb :: String -> String -> IO (ParseResult [ThfAnnotated]) getProb axiomspath filename = do file <- readFile filename case parse file 1 of FailP err -> return $ FailP err OkP prob -> expand prob where expand [] = return $ OkP [] expand (Include axfile : xs) = do axprob <- getProb axiomspath (axiomspath ++ axfile) case axprob of FailP err -> return $ FailP $ "in " ++ axiomspath ++ axfile ++ ": " ++ err OkP axprob -> do xs <- expand xs case xs of FailP err -> return $ FailP err OkP xs -> return $ OkP (axprob ++ xs) expand (AnnotatedFormula x : xs) = do xs <- expand xs case xs of FailP err -> return $ FailP err OkP xs -> return $ OkP (x : xs) -- --------------------------------- itdeepSearch :: (a -> IO Bool) -> Int -> Int -> (Int -> IO a) -> IO () itdeepSearch stop depth step f = do res <- f depth b <- stop res when (not b) $ itdeepSearch stop (depth + step) step f solveProb :: Bool -> Maybe Int -> Maybe Int -> Problem -> IO (Maybe [(String, MetaProof)]) solveProb saveproofs inter mdepth prob = do prfs <- newIORef [] let doconjs [] = do prfs <- readIORef prfs return $ Just prfs doconjs ((name, conj) : conjs) = do ticks <- newIORef 0 nsol <- newIORef 1 prf <- initMeta let hsol = when saveproofs $ do prfcpy <- expandmetas prf modifyIORef prfs ((name, prfcpy) :) p d = andp (checkProof d [] (cl conj) prf) (sidecontrol prf (SCState {scsCtx = 0, scsHyps = [], scsNewHyp = NewGlobHyps})) stop res = do nsol' <- readIORef nsol return $ nsol' == 0 || res == False ss d di = topSearch (isJust inter) ticks nsol hsol (BIEnv (prGlobHyps prob)) (p d) d di case mdepth of Just depth -> ss depth (depth + 1) >> return () Nothing -> case isJust inter of True -> ss 100000000 (100000000 + 1) >> return () False -> itdeepSearch stop 999 1000 (\d -> ss d 1000) nsol <- readIORef nsol if nsol == 0 then doconjs conjs else return Nothing case inter of Just idx -> doconjs [prConjectures prob !! idx] Nothing -> doconjs (prConjectures prob) -- --------------------------------- data CLArgs = CLArgs { problemfile :: String, fproof :: Bool, finteractive :: Maybe Int, fnotransform :: Bool, fagdaproof :: Bool, ftptpproof :: Bool, ftimeout :: Maybe Int, fcheck :: Bool, fdepth :: Maybe Int, fincludedir :: String, fshowproblem :: Bool } doit args = do tptpprob <- getProb (fincludedir args) (problemfile args) case tptpprob of FailP err -> do szs_status args "Error" "parse error" putStrLn err OkP tptpprob -> do let probname = (reverse . takeWhile (/= '/') . drop 2 . reverse) (problemfile args) prob = translateProb probname tptpprob trprob = transformProb (not (fnotransform args)) prob when (fshowproblem args) $ do when (not (fnotransform args)) $ do pprob <- prProblem prob putStrLn $ "non-transformed problem:\n" ++ pprob ptrprob <- prProblem trprob putStrLn $ "problem:\n" ++ ptrprob case fcheck args of False -> case prConjectures trprob of [] -> szs_status args "Error" "no conjecture to prove" _ -> do res <- solveProb (fproof args || (fagdaproof args && isNothing (finteractive args)) || ftptpproof args) (finteractive args) (fdepth args) trprob case res of Just prfs -> do szs_status args "Theorem" "" -- solution found when (fproof args) $ do putStrLn $ "% SZS output start Proof for " ++ problemfile args putStrLn $ "The transformed problem consists of the following conjectures:" putStrLn $ concatMap (\x -> ' ' : fst x) (reverse prfs) mapM_ (\(name, prf) -> putStrLn ("\nProof for " ++ name ++ ":") >> prProof 0 prf >>= putStrLn ) $ reverse prfs putStrLn $ "% SZS output end Proof for " ++ problemfile args when (fagdaproof args && isNothing (finteractive args)) $ do putStrLn $ "The following top-level files were created:" mapM_ (\(name, prf) -> agdaProof trprob name prf ) $ reverse prfs when (ftptpproof args) $ tptpproof tptpprob prob trprob (reverse prfs) Nothing -> szs_status args "GaveUp" "" -- exhaustive search True -> do when (fshowproblem args) $ putStrLn "checking globhyps:" okhyps <- mapM (\gh -> do res <- runProp (checkForm [] typeBool $ ghForm gh) when (fshowproblem args) $ putStrLn $ ghName gh ++ pres res return $ noerrors res ) (prGlobHyps trprob) when (fshowproblem args) $ putStrLn "checking conjectures:" okconjs <- mapM (\(cname, form) -> do res <- runProp (checkForm [] typeBool form) when (fshowproblem args) $ putStrLn $ cname ++ pres res return $ noerrors res ) (prConjectures trprob) case and (okhyps ++ okconjs) of True -> putStrLn "check OK" False -> putStrLn "check FAILED" where noerrors res = not $ '\"' `elem` res pres res = if '\"' `elem` res then error $ " error: " ++ res else " ok" szs_status args status comment = putStrLn $ "% SZS status " ++ status ++ " for " ++ problemfile args ++ (if null comment then "" else (" : " ++ comment)) main = do args <- getArgs case consume "--safe-mode" args of Nothing -> case ["--help"] == args of False -> case parseargs args of Just args -> case ftimeout args of Nothing -> doit args Just seconds -> do res <- timeout (seconds * 1000000) $ doit args case res of Just () -> return () Nothing -> szs_status args "Timeout" "" Nothing -> do putStrLn "command argument error\n" printusage True -> printusage Just args -> case parseargs args of Nothing -> do putStrLn "command argument error\n" printusage Just pargs -> do prgname <- getFullProgName (exitcode, out, err) <- readProcessWithExitCode prgname args "" case exitcode of ExitSuccess -> return () ExitFailure code -> szs_status pargs "Error" ("program stopped abnormally, exitcode: " ++ show code) putStrLn out putStrLn err parseargs = g (CLArgs {problemfile = "", fproof = False, finteractive = Nothing, fnotransform = False, fagdaproof = False, ftptpproof = False, ftimeout = Nothing, fcheck = False, fdepth = Nothing, fincludedir = "", fshowproblem = False}) where g a [] = if null (problemfile a) then Nothing else Just a g a ("--proof" : xs) = g (a {fproof = True}) xs g a ("--interactive" : n : xs) = g (a {finteractive = Just (read n)}) xs g a ("--no-transform" : xs) = g (a {fnotransform = True}) xs g a ("--agda-proof" : xs) = g (a {fagdaproof = True}) xs g a ("--tptp-proof" : xs) = g (a {ftptpproof = True}) xs g a ("--time-out" : n : xs) = g (a {ftimeout = Just (read n)}) xs g a ("--check" : xs) = g (a {fcheck = True}) xs g a ("--depth" : n : xs) = g (a {fdepth = Just (read n)}) xs g a ("--include-dir" : s : xs) = if null (fincludedir a) then g (a {fincludedir = s}) xs else Nothing g a ("--show-problem" : xs) = g (a {fshowproblem = True}) xs g a (x : xs) = if null (problemfile a) then g (a {problemfile = x}) xs else Nothing consume y [] = Nothing consume y (x : xs) | x == y = Just xs consume y (x : xs) | otherwise = case consume y xs of Just xs -> Just (x : xs) Nothing -> Nothing printusage = putStr $ "usage:\n" ++ "agsyHOL <flags> file <flags>\n" ++ " file is a TPTP THF problem file.\n" ++ " flags:\n" ++ " --safe-mode Catches unhandled errors.\n" ++ " --proof Output a proof (in internal format, one proof for each conjecture\n" ++ " of the transformed problem).\n" ++ " --interactive n Do interactive search for subproblem n (n=0,1,2..).\n" ++ " Use --show-problem to see the list of subproblems.\n" ++ " --no-transform Do not transform problem. (Normally the number of negations is minimized.)\n" ++ " --agda-proof Save agda proof files named Proof-<problem_name>-<conjecture_name>.agda and\n" ++ " Proof-<problem_name>-<conjecture_name>-<nn>.agda in current directory.\n" ++ " In order to check the proof with agda, run it on all files listed in the output.\n" ++ " Note that the agda files in the soundness directory must be in scope.\n" ++ " --time-out n Set timeout in seconds. (default: no time out)\n" ++ " --check Just check the problem for well formedness.\n" ++ " --depth n Set search depth. (default: iterated deepening for auto mode (start depth: 999, step: 1000),\n" ++ " unlimited depth for interactive mode)\n" ++ " --include-dir p Set path to axiom files.\n" ++ " --show-problem Display original and transformed problem in internal format.\n" ++ " Combined with --check also show check failure details.\n" ++ " +RTS .. -RTS Give general ghc run time system options, e.g. -t which outputs time and memory usage information.\n" ++ " --help Show this help.\n"
frelindb/agsyHOL
Main.hs
mit
9,758
0
37
2,659
3,171
1,564
1,607
241
15
import qualified System.Environment f s@(x:xs) = x:s sign x | x > 0 = 1 | x == 0 = 0 | x < 0 = -1 test (x:xs) | x == -1 = True | x > 0 = False test [] = False testc xs = case xs of (x:xs) -> x [] -> 0 foo xs = case xs of (x:xs) -> x [] -> [] bar :: Int -> Either [Char] Int bar x = if x > 2 then Right x else Left "Number less than two" -- getArgs usage main :: IO () -- main = do args <- System.Environment.getArgs -- putStrLn $ concat args -- main = foo2 foo2 = putStrLn "Enter the name: " >> getLine >>= \name -> putStrLn $ "You Entered: " ++ name class BasicEq a where isEqual :: a -> a -> Bool instance BasicEq Bool where isEqual True True = True isEqual False False = True isEqual _ _ = False data Date = Date Int Int Int deriving (Show) data Time = Time Int Int Int deriving (Show) data Reading = Reading Int deriving (Show) data Day = Day String String deriving (Show) data BloodReading = BloodReading Date Time Reading Day deriving (Show) getReading :: BloodReading -> Reading getReading (BloodReading _ _ x _) = x getInt :: Reading -> Int getInt (Reading x) = x class BasicEq3 a where isEqual1, isNotEqual1 :: a -> a -> Bool isEqual1 x y = not (isNotEqual1 x y ) isNotEqual1 x y = not (isEqual1 x y) instance BasicEq3 Bool where isEqual1 True True = True isEqual1 False False = True isEqual1 _ _ = False data Color = Red | Green | Blue deriving (Show, Read) instance BasicEq3 Color where isEqual1 Red Red = True isEqual1 Green Green = True isEqual1 Blue Blue = True isEqual1 _ _ = False -- main = do putStrLn "Enter the number: " -- inpStr <- getLine -- let inpDouble = (read inpStr)::Color -- putStrLn $ " square of number is " ++ show inpDouble -- Serialization using read and show typeclasses main = do putStrLn "Enter the data:" inpStr <- getLine putStrLn "Writing to File..." writeFile "test" inpStr putStrLn "Reading File..." fileString <- readFile "test" let x = (read fileString)::[Maybe Int] putStrLn (show x) data MyType = MyType (Int -> Bool) foo1 x = x < 5 data CannotShow = CannotShow deriving (Show) data CannotDeriveShow = CannotDeriveShow CannotShow deriving (Show) g :: a -> () -> a g x () = x sumList xs ys = map add $ zip xs ys where add (x, y) = x + y listComb xs ys = do x <- xs y <- ys return (x, y) mymap f = foldl (\x -> f x) 0 mi x = [(1, 2), (4, 5)]
gitrookie/functionalcode
code/Haskell/snippets/test.hs
mit
2,619
0
11
803
965
495
470
78
2
{-# LANGUAGE TemplateHaskell #-} {- Copyright (C) 2012-2017 Luke Brown <http://gsd.uwaterloo.ca> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -} import Language.Clafer.IG.ClaferIG import Language.Clafer.IG.ClaferModel import Language.Clafer.IG.CommandLine import Control.Monad import Control.Monad.IO.Class import Data.Maybe import Data.IORef import System.Directory import System.FilePath import System.Console.CmdArgs import Prelude hiding (all) import Test.Tasty import Test.Tasty.HUnit import Test.Tasty.TH tg_testsuite :: TestTree tg_testsuite = $(testGroupGenerator) main :: IO () main = defaultMain $ testGroup "Tests" [ tg_testsuite ] claferIGArgsDef :: IGArgs claferIGArgsDef = IGArgs { all = def, saveDir = def, claferModelFile = def, alloySolution = def, bitwidth = 4, maxInt = 7, useUids = False, addTypes = False, json = False, flatten_inheritance_comp = False, no_layout_comp = False, check_duplicates_comp = False, skip_resolver_comp = False, scope_strategy_comp = def } &= summary claferIGVersion defaultIGArgs :: FilePath -> IGArgs --defaultIGArgs fPath = IGArgs Nothing Nothing fPath False 4 False False False defaultIGArgs fPath = claferIGArgsDef{claferModelFile = fPath} --getModel :: MonadIO m => FilePath -> ClaferIGT m (Either Language.ClaferT.ClaferErrs Instance) getModel fPath = runClaferIGT (defaultIGArgs fPath) $ do setGlobalScope (fromMaybe 1 $ all $ defaultIGArgs fPath) solve counterRef <- liftIO $ newIORef 1 let saveDirectory = fromMaybe return $ underDirectory `liftM` saveDir (defaultIGArgs fPath) let nextFile = savePath fPath counterRef >>= saveDirectory file <- liftIO nextFile liftIO $ createDirectoryIfMissing True $ takeDirectory file next where savePath :: FilePath -> IORef Int -> IO FilePath savePath fPath' counterRef = do counter <- readIORef counterRef writeIORef counterRef (counter + 1) return $ fPath' ++ "." ++ (show counter) ++ ".data" underDirectory :: FilePath -> FilePath -> IO FilePath underDirectory dir file = do createDirectoryIfMissing True dir return $ joinPath [dir, file] fromRight (Right x) = x fromRight _ = error "fromRight received Left _. Should never happen." case_strMapCheck :: Assertion case_strMapCheck = do --let claferModel = Right $ Instance (ClaferModel [(Clafer (Id "" 0) (Just (StringValue "")) [])]) "" claferModel' <- getModel "test/positive/i220.cfr" (valueCheck $ c_value $ head $ c_topLevel $ modelInstance $ fromRight $ claferModel') @? "Mapping Int back to String Failed!" where valueCheck Nothing = False valueCheck (Just (AliasValue _)) = False valueCheck (Just (IntValue _)) = False valueCheck (Just (StringValue _)) = True case_pickLargerScope :: Assertion case_pickLargerScope = do let oldScopes = [ ("c0_1", 1), ("c1_b", 2), ("c0_x", 5) ] newScopes = [ ("c0_1", 2), ("c0_b", 2), ("c1_b", 1)] mergedScopes = map (pickLargerScope oldScopes) newScopes mergedScopes @?= [ ("c0_1", 2), ("c0_b", 2), ("c1_b", 2)]
gsdlab/claferIG
test/test-suite.hs
mit
4,322
0
14
992
808
439
369
75
4
module Job.Activation ( sendActivationMail ) where import Import import qualified Data.ByteString.Lazy.Char8 as C import qualified Data.Text as T import qualified Data.Text.Encoding as T import qualified Network.HTTP.Simple as HTTP import Job.Common data MailchimpActivate = MailchimpActivate Text Text deriving Show instance ToJSON MailchimpActivate where toJSON (MailchimpActivate email activationLink) = object [ "email_address" .= email -- ^ Mailchimp user email address , "status" .= ("subscribed" :: Text) -- ^ Mailchimp user status (i.e. subscribed, pending etc) , "merge_fields" .= object [ "MMERGE15" .= activationLink -- ^ Activation link mailchimp merge field , "MMERGE18" .= ("Ja" :: Text) -- ^ Add the user to the "Signed Up" group ] ] -- | Add the user to mailchimp list. sendActivationMail :: Key Job -> JobValue -> HandlerT App IO () sendActivationMail jobId (JobValueUserMail mail') = do let mail = T.toLower mail' $logInfo $ "Running sendActivationMail job for " <> mail master <- getYesod maybeUser <- runDB . getBy $ UniqueEmail mail case maybeUser of Nothing -> return () Just (Entity _ signup) -> do now <- liftIO getCurrentTime render <- getUrlRender let lang = signupLanguage signup -- Add analytics tracking to the URL if it is set. let utms = case appAnalytics $ appSettings master of Nothing -> "" Just _ -> "?utm_medium=email&utm_campaign=activation" let activationUrl = render (ActivateSignupIR lang (signupActivationToken signup)) <> utms let subscriber = MailchimpActivate mail activationUrl let postRequest = mailchimpPostRequest master lang subscriber postResponse <- liftIO $ HTTP.httpLBS postRequest let postResp = T.decodeUtf8 . C.toStrict $ HTTP.getResponseBody postResponse -- Check if the API call was successful or not case HTTP.getResponseStatusCode postResponse of -- Status code 200 indicates the user was successfully added. 200 -> runDB $ update jobId [JobFinished =. True, JobUpdated =. now, JobResult =. Just postResp] -- If we get a status code 400, the user already exists and we need to -- send a PATCH request instead to update their information. 400 -> do let patchRequest = mailchimpPatchRequest master lang subscriber mail patchResponse <- liftIO $ HTTP.httpLBS patchRequest let patchResp = T.decodeUtf8 . C.toStrict $ HTTP.getResponseBody patchResponse -- Check if the API call was successful or not. case HTTP.getResponseStatusCode patchResponse of 200 -> runDB $ update jobId [JobFinished =. True, JobUpdated =. now, JobResult =. Just patchResp] _ -> runDB $ update jobId [JobUpdated =. now, JobResult =. Just patchResp] -- Any other status code and the job is marked as failed. _ -> runDB $ update jobId [JobUpdated =. now, JobResult =. Just "Failed"] return () sendActivationMail _ _ = return ()
Tehnix/campaigns
Job/Activation.hs
mit
3,248
0
24
907
706
356
350
-1
-1
-- https://howistart.org/posts/haskell/1 module Main where import qualified Data.ByteString.Lazy as BL import qualified Data.Vector as V -- from cassava import Data.Csv -- a simple type alias for data type BaseballStats = (BL.ByteString, Int, BL.ByteString, Int) fourth :: (a, b, c, d) -> d fourth (_, _, _, d) = d baseballStats :: BL.ByteString -> Either String (V.Vector BaseballStats) baseballStats = decode NoHeader main :: IO () main = do csvData <- BL.readFile "batting.csv" let summed = fmap (V.foldr summer 0) (baseballStats csvData) putStrLn $ "Total atBats was: " ++ (show summed) where summer = (+) . fourth
aitoroses/haskell-starter-repo
src/Main.hs
mit
633
0
13
110
211
120
91
15
1
{-# LANGUAGE MultiParamTypeClasses, TypeOperators , TypeFamilies, UndecidableInstances, CPP , FlexibleContexts #-} type Time = Float --data Time deriving (Ord) --data R = NegInf | R Float | PosInf deriving (Eq,Ord) --data Time = T R | AtLeast R -- semantically, a behavior is a time dependant function type Behavior a = Time -> a -- semantically, an event is a list of time value pairs type Event a = (Time,a) -- interpretation of a behavior at a point in time -- this gives the denotation (semantic function) at :: Behavior a -> Time -> a at b t = b t -- interpretation of an event occ :: Event a -> (Time,a) occ = id occs :: Event a -> [(Time,a)] occs e = undefined time :: Behavior Time time = at id fmap :: (a -> b) -> Behavior a -> Behavior b fmap f b t = f (b t) always :: a -> Behavior a always = const ap :: Behavior (a -> b) -> Behavior a -> Behavior b ap bf ba = \t -> (bf t) (ba t) lift :: (a -> b) -> Behavior a -> Behavior b lift f a = always f `ap` a lift2 :: (a -> b -> c) -> Behavior a -> Behavior b -> Behavior c lift2 f a b = always f `ap` a `ap` b lift3 :: (a -> b -> c -> d) -> Behavior a -> Behavior b -> Behavior c -> Behavior d lift3 f a b c = always f `ap` a `ap` b `ap` c -- time transformtion timeX :: Behavior a -> Behavior Time -> Behavior a timeX ba bt = ba . bt -- timeX b time = b -- timeX b (time/2) -- slows down animation by factor of 2 -- timeX b (time - 2) -- delays by 2 seconds class AdditiveGroup v where zeroV :: v (^+^) :: v -> v -> v negateV :: v -> v (^-^) :: v -> v -> v class AdditiveGroup v => VectorSpace v where type Scalar v :: * (*^) :: Scalar v -> v -> v -- ∫{t,t0} b -- can be used to specify velocity and acceleration -- creates a behavior which sums all values of the argument behavior starting with -- the initial time up to the argument time -- the vector space implements the summation integral :: VectorSpace a => Behavior a -> Time -> Behavior a integral b t0 t = b t ^+^ b t0 -- returns values of argument behavior until event occurs -- after which the values of the behavior produced by the event are returned untilB :: Behavior a -> Event (Behavior a) -> Behavior a untilB b e t = if t <= te then b t else b' t where (te,b') = occ e (+=>) :: Event a -> (Time -> a -> b) -> Event b e +=> f = (te,f te a) where (te,a) = occ e (==>) :: Event a -> (a -> b) -> Event b e ==> f = (te,f a) where (te,a) = occ e (*=>) :: Event a -> (Time -> b) -> Event b e *=> f = e +=> \t _ -> f t (-=>) :: Event a -> b -> Event b e -=> b = e +=> \_ _ -> b constEv :: Time -> a -> Event a constEv t a = (t,a) lbp :: Time -> Event (Event ()) lbp = undefined rbp :: Time -> Event (Event ()) rbp = undefined -- b1 untilB (lbp t0) ==> \e -> b2 untilB e -=> b3 -- b1 until left button pressed then b2 until left button is released then finally b3 predicate :: Behavior Bool -> Time -> Event () predicate b t = (undefined,()) -- choose the earlier of two events (*|*) :: Event a -> Event a -> Event a a *|* b = undefined snapshot :: Event a -> Behavior b -> Event (a,b) snapshot = undefined main :: IO () main = return ()
eulerfx/learnfp
functional_reactive_animation.hs
mit
3,136
0
9
763
1,160
609
551
63
2
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE FlexibleContexts #-} module Language.PureScript.Linter.Imports (findUnusedImports, Name(..), UsedImports()) where import Prelude () import Prelude.Compat import qualified Data.Map as M import Data.Maybe (mapMaybe) import Data.List ((\\), find) import Control.Monad.Error.Class (MonadError(..)) import Control.Monad.Writer.Class import Control.Monad(unless,when) import Data.Foldable (forM_) import Language.PureScript.AST.Declarations import Language.PureScript.AST.SourcePos import Language.PureScript.Names as P import Language.PureScript.Errors import Language.PureScript.Sugar.Names.Env import Language.PureScript.Sugar.Names.Imports import qualified Language.PureScript.Constants as C -- | Imported name used in some type or expression. data Name = IdentName (Qualified Ident) | IsProperName (Qualified ProperName) | DctorName (Qualified ProperName) -- | Map of module name to list of imported names from that module which have been used. type UsedImports = M.Map ModuleName [Name] -- | -- Find and warn on any unused import statements (qualified or unqualified) -- or references in an explicit import list. -- findUnusedImports :: forall m. (Applicative m, MonadError MultipleErrors m, MonadWriter MultipleErrors m) => Module -> Env -> UsedImports -> m () findUnusedImports (Module _ _ _ mdecls _) env usedImps = do imps <- findImports mdecls forM_ (M.toAscList imps) $ \(mni, decls) -> unless (mni `elem` autoIncludes) $ forM_ decls $ \(ss, declType, qualifierName) -> censor (onErrorMessages $ addModuleLocError ss) $ let usedNames = mapMaybe (matchName (typeForDCtor mni) qualifierName) $ sugarNames ++ M.findWithDefault [] mni usedImps in case declType of Implicit -> when (null usedNames) $ tell $ errorMessage $ UnusedImport mni Explicit declrefs -> do let idents = mapMaybe runDeclRef declrefs let diff = idents \\ usedNames unless (null diff) $ tell $ errorMessage $ UnusedExplicitImport mni diff _ -> return () where sugarNames :: [ Name ] sugarNames = [ IdentName $ Qualified Nothing (Ident C.bind) ] autoIncludes :: [ ModuleName ] autoIncludes = [ ModuleName [ProperName C.prim] ] typeForDCtor :: ModuleName -> ProperName -> Maybe ProperName typeForDCtor mn pn = getTy <$> find matches tys where matches ((_, ctors), _) = pn `elem` ctors getTy ((ty, _), _) = ty tys :: [((ProperName, [ProperName]), ModuleName)] tys = maybe [] exportedTypes $ envModuleExports <$> mn `M.lookup` env matchName :: (ProperName -> Maybe ProperName) -> Maybe ModuleName -> Name -> Maybe String matchName _ qual (IdentName (Qualified q x)) | q == qual = Just $ showIdent x matchName _ qual (IsProperName (Qualified q x)) | q == qual = Just $ runProperName x matchName lookupDc qual (DctorName (Qualified q x)) | q == qual = runProperName <$> lookupDc x matchName _ _ _ = Nothing runDeclRef :: DeclarationRef -> Maybe String runDeclRef (PositionedDeclarationRef _ _ ref) = runDeclRef ref runDeclRef (ValueRef ident) = Just $ showIdent ident runDeclRef (TypeRef pn _) = Just $ runProperName pn runDeclRef _ = Nothing addModuleLocError :: Maybe SourceSpan -> ErrorMessage -> ErrorMessage addModuleLocError sp err = case sp of Just pos -> withPosition pos err _ -> err
michaelficarra/purescript
src/Language/PureScript/Linter/Imports.hs
mit
3,353
0
24
601
1,063
570
493
63
3
{-# LANGUAGE OverloadedStrings, QuasiQuotes #-} module Y2017.M12.D12.Exercise where {-- Okay, I came across an error when I was filtering articles with indexed keywords: time cold_filtered 6495 trump cold_filtered: UnexpectedNull {errSQLType = "text", errSQLTableOid = Just (Oid 50231), errSQLField = "summary", errHaskellType = "Text", errMessage = ""} What this is saying is that there are articles that do not have summaries, and so the type for summary, String, is not appropriate. I changed the type to Maybe String, and that fixes the error, but now we are recommending articles that do not have summaries. That's not good. So, today's Haskell problem is to filter out articles that do not have summaries... shoot! Which I already did, since a brief, intrinsic to its type needs a summary. So, instead we have a different problem. Filtering by keyword is slow, not because the filtering is slow, but when we go back to assign a ranking we fetch the key-phrase strength for each of the articles, ... one-by-one. See: --} import Y2017.M11.D20.Exercise hiding (keyphrasesStmt, keyphrase4) {-- We do not want to make separate SQL calls for each article id. Instead, we want to batch all our article ids into one query. So, let's rewrite the query so that we send all the SQL in one shot. --} import Database.PostgreSQL.Simple import Database.PostgreSQL.Simple.SqlQQ -- below imports available via 1HaskellADay git repository import Store.SQL.Connection import Store.SQL.Util.Indexed import Y2017.M11.D03.Exercise -- for Keyphrase import Y2017.M11.D07.Exercise -- for Recommendation import Y2017.M11.D13.Exercise -- for KeyWord import Y2017.M11.D17.Exercise -- for KWtable keyphrasesStmt :: Query keyphrasesStmt = [sql|SELECT a.article_id,a.keyphrase_id,k.strength,k.keyphrase FROM keyphrase k LEFT JOIN article_keyphrase a ON a.keyphrase_id = k.id WHERE a.article_id IN ?|] keyphrase4 :: Connection -> [Integer] -> IO [(Integer, Integer, Double, String)] keyphrase4 conn artids = undefined {-- BONUS ----------------------------------------------------------------- So, with the new one-query keyphrase-fetch, let's rebuild the app using that instead. That is to say: rebuild recs' --} recs' :: Connection -> [KeyWord] -> KWtable -> IO [Recommendation] recs' conn kws table = undefined main' :: [String] -> IO () main' (artid:keywords) = undefined -- hints: -- look at Y2017.M11.D20 for definition of recs' -- look at Y2017.M12.D06 for filtered on article id keyword searches -- time the old filtered approach (see Y2017.M12.D06) verses this approach for -- article_id 6495 and the sole keyword: mars -- now time both approaches for article_id 6495 and the sole keyword: trump
geophf/1HaskellADay
exercises/HAD/Y2017/M12/D12/Exercise.hs
mit
2,742
0
9
453
221
142
79
20
1
{-# LANGUAGE ViewPatterns #-} {-# LANGUAGE OverloadedStrings #-} module Yesod.Devel.Capture ( Capture , startCapture , logMessage , outputChunk , waitCaptured ) where import Data.Text (Text) import Data.ByteString (ByteString) import Control.Concurrent.STM import Data.Time (UTCTime, getCurrentTime) import Control.Applicative ((<$>), (<|>)) import qualified Data.ByteString as S data Capture = Capture { logMessage :: Text -> IO () , outputChunk :: ByteString -> IO () , waitCaptured :: STM (Either (UTCTime, Text) ByteString) } startCapture :: IO Capture startCapture = do logMessages <- atomically newTChan chunks <- atomically newTChan return Capture { logMessage = \msg -> do now <- getCurrentTime atomically $ writeTChan logMessages (now, msg) , outputChunk = atomically . writeTChan chunks . stripColorCodes , waitCaptured = (Left <$> readTChan logMessages) <|> (Right <$> readTChan chunks) } where wdel = 27 wm = 109 stripColorCodes bs = case S.breakByte wdel bs of (_, "") -> bs (x, S.drop 1 . snd . S.breakByte wm -> y) | S.null y -> bs | otherwise -> S.append x $ stripColorCodes y
snoyberg/yesod-devel-beta
Yesod/Devel/Capture.hs
mit
1,351
0
15
424
385
209
176
39
2
-- | Chan with size module Control.Concurrent.SizedChan (SizedChan, newSizedChan, writeSizedChan, readSizedChan, tryReadSizedChan, peekSizedChan, tryPeekSizedChan, isEmptySizedChan) where import Control.Concurrent.Chan import Data.IORef data SizedChan a = SizedChan (Chan a) -- ^ The channel (IORef Int) -- ^ Its size (IORef (Maybe a)) -- ^ Peeked payload -- | Build and returns a new instance of 'SizedChan'. newSizedChan :: IO (SizedChan a) newSizedChan = SizedChan <$> newChan <*> newIORef 0 <*> newIORef Nothing -- | Write a value to a 'SizedChan'. writeSizedChan :: SizedChan a -> a -> IO () writeSizedChan (SizedChan chan sizeIORef _) val = do writeChan chan val modifyIORef' sizeIORef succ -- | Read the next value from the 'SizedChan'. Blocks when the channel is empty. readSizedChan :: SizedChan a -> IO a readSizedChan (SizedChan chan sizeIORef peekedIORef) = do peeked <- readIORef peekedIORef case peeked of -- return and remove the peeked value Just val -> do writeIORef peekedIORef Nothing modifyIORef' sizeIORef pred return val -- else read from the channel Nothing -> do val <- readChan chan modifyIORef' sizeIORef pred return val -- | A version of `readSizedChan` which does not block. Instead it returns Nothing if no value is available. tryReadSizedChan :: SizedChan a -> IO (Maybe a) tryReadSizedChan (SizedChan chan sizeIORef peekedIORef) = do peeked <- readIORef peekedIORef case peeked of -- return and remove the peeked value Just val -> do writeIORef peekedIORef Nothing modifyIORef' sizeIORef pred return $ Just val -- check the size before reading from the channel, to prevent blocking Nothing -> do size <- readIORef sizeIORef if size == 0 then return Nothing else do val <- readChan chan modifyIORef' sizeIORef pred return $ Just val -- | Peek the next value from the 'SizedChan' without removing it. Blocks when the channel is empty. peekSizedChan :: SizedChan a -> IO a peekSizedChan (SizedChan chan _ peekedIORef) = do peeked <- readIORef peekedIORef case peeked of -- return the peeked value Just val -> return val -- read from the channel instead Nothing -> do val <- readChan chan writeIORef peekedIORef (Just val) return val -- | A version of `peekSizedChan` which does not block. Instead it returns Nothing if no value is available. tryPeekSizedChan :: SizedChan a -> IO (Maybe a) tryPeekSizedChan (SizedChan chan sizeIORef peekedIORef) = do peeked <- readIORef peekedIORef case peeked of -- return the peeked value Just val -> return $ Just val -- check the size before reading from the channel, to prevent blocking Nothing -> do size <- readIORef sizeIORef if size == 0 then return Nothing else do val <- readChan chan writeIORef peekedIORef (Just val) return $ Just val measureSizedChan :: SizedChan a -> IO Int measureSizedChan (SizedChan _ sizeIORef _) = readIORef sizeIORef isEmptySizedChan :: SizedChan a -> IO Bool isEmptySizedChan chan = do size <- measureSizedChan chan return $ size == 0
banacorn/agda-language-server
src/Control/Concurrent/SizedChan.hs
mit
3,254
0
17
789
776
368
408
74
3