code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE FlexibleInstances #-}
import Control.Applicative
import Control.DeepSeq
import Data.List (sort,unfoldr)
import Data.Word
import System.Process
import System.Vacuum.Cairo (viewFile)
import Test.QuickCheck
view x = rnf x `seq` do
viewFile "temp.svg" x
system "opera temp.svg"
data Heap a = E | T a (Heap a) (Heap a) deriving Show
-- We actually require that the root has an empty right child. It irks me.
--data Heap a = EmptyHeap | Heap a (HeapTree a)
instance NFData a => NFData (Heap a) where
rnf E = ()
rnf (T x d r) = rnf (x,d,r)
size E = 0
size (T x a b) = 1 + size a + size b
isEmpty E = True
isEmpty _ = False
isProperHeap :: Word -> Heap Word -> Bool
isProperHeap x (T y d r) = x <= y && isProperHeap y d && isProperHeap x r
isProperHeap x E = True
isProperRoot E = True
isProperRoot (T x hs E) = isProperHeap x hs
isProperRoot _ = False
findMin (T x _ _) = x
insert x h = merge (T x E E) h
insertList xs h = foldl (flip insert) h xs
merge h E = h
merge E h = h
merge h1@(T x hs1 E) h2@(T y hs2 E)
| x <= y = T x (T y hs2 hs1) E
| otherwise = T y (T x hs1 hs2) E
mergePairs E = E
mergePairs h@(T x _ E) = h
mergePairs (T x hs1 (T y hs2 hs)) =
merge (merge (T x hs1 E) (T y hs2 E)) (mergePairs hs)
deleteMin (T _ hs E) = mergePairs hs
delete x = go
where
go E = E
go (T y d r)
| x == y = merge d r
| otherwise = T y (go d) (go r)
heapsort xs = unfoldr f (insertList xs E)
where
f E = Nothing
f h = Just (findMin h, deleteMin h)
deepCheck p = quickCheckWith (stdArgs { maxSuccess = 10000}) p
instance (Ord a, Arbitrary a) => Arbitrary (Heap a) where
arbitrary = frequency [(1, return E), (10, insert <$> arbitrary <*> arbitrary)]
{-shrink E = [E]
shrink x@(T _ E E) = [E]
shrink (T x h1 h2) = let xs = (T x <$> shrink h1 <*> shrink h2) in xs ++ concatMap shrink xs-}
newtype NonEmptyHeap a = NonEmptyHeap a
instance (Ord a, Arbitrary a) => Arbitrary (NonEmptyHeap (Heap a)) where
arbitrary = NonEmptyHeap <$> (insert <$> arbitrary <*> arbitrary)
instance Show a => Show (NonEmptyHeap a) where
show (NonEmptyHeap x) = show x
showsPrec n (NonEmptyHeap x) = showsPrec n x
prop_merge_keeps_proper = (\x y -> isProperRoot x && isProperRoot y ==> isProperRoot (merge x y))
prop_merge_size = (\x y -> isProperRoot x && isProperRoot y ==> size (merge x y) == size x + size y)
prop_insert_keeps_proper = (\x y -> isProperRoot x ==> isProperRoot (insert y x))
prop_insert_size = (\x y -> isProperRoot x ==> size (insert y x) == size x + 1)
prop_insert_min (NonEmptyHeap x) y =
isProperRoot x ==> min oldMin newMin == min oldMin y
where
newMin = findMin (insert y x)
oldMin = findMin x
prop_insert_list_findMin (NonEmpty ys) = isProperRoot x && findMin x == minimum ys
where
x = insertList ys E
prop_deleteMin_keeps_proper = (\(NonEmptyHeap x) -> isProperRoot x ==> isProperRoot (deleteMin x))
prop_deleteMin_size = \(NonEmptyHeap x) -> isProperRoot x ==> size (deleteMin x) == size x - 1
prop_deleteMin_insert_min (NonEmptyHeap x) = isProperRoot x ==>
findMin x'' == findMin x && isProperRoot x' && isProperRoot x''
where
x' = insert (findMin x) x
x'' = deleteMin x'
prop_deleteMin_list_second (NonEmpty ys) = not (null (tail ys)) ==>
findMin x' == head ys' && findMin x'' == head (tail ys') && isProperRoot x' && isProperRoot x''
where
x' = insertList ys E
x'' = deleteMin x'
ys' = sort ys
prop_insert1_delete_proper x y = isProperRoot x ==> isProperRoot (delete y (insert y x))
prop_insert_delete_proper x (NonEmpty ys) = isProperRoot x ==> isProperRoot (deleteAll ys (insertAll ys x))
deleteAll [] x = x
deleteAll (y:ys) x = deleteAll ys (delete y x)
insertAll [] x = x
insertAll (y:ys) x = insertAll ys (insert y x)
prop_heapsort xs = heapsort (xs :: [Int]) == sort xs
main = do
deepCheck prop_insert1_delete_proper
deepCheck prop_insert_delete_proper
deepCheck prop_merge_keeps_proper
deepCheck prop_merge_size
deepCheck prop_insert_keeps_proper
deepCheck prop_insert_size
deepCheck prop_insert_min
deepCheck prop_insert_list_findMin
deepCheck prop_deleteMin_keeps_proper
deepCheck prop_deleteMin_size
deepCheck prop_deleteMin_insert_min
deepCheck prop_deleteMin_list_second
deepCheck prop_heapsort
| olsner/sbmalloc | PairingHeap.hs | mit | 4,311 | 2 | 15 | 937 | 1,787 | 872 | 915 | 97 | 2 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
module Template (
render
) where
import Data.Data
import Data.FileEmbed (embedDir)
import Data.ByteString (ByteString)
import Data.ByteString.UTF8 (toString)
import Text.Hastache.Context (mkGenericContext)
import Text.Hastache (
hastacheStr,
defaultConfig,
encodeStr,
decodeStrLT)
files :: [(FilePath, ByteString)]
files = $(embedDir "templates")
content :: FilePath -> String
content path = toString . snd . head $ filter (\(p, _) -> p == path) files
render :: Data a => FilePath -> a -> IO String
render templatePath values = do
res <- hastacheStr
defaultConfig
(encodeStr $ content templatePath)
(mkGenericContext values)
return $ decodeStrLT res
| prasmussen/magmod | Template.hs | mit | 774 | 0 | 11 | 152 | 228 | 126 | 102 | 25 | 1 |
---------------------------------------------------------------------
--
-- | Ascetic
--
-- @Text\/Ascetic.hs@
--
-- Data structure, combinators, and functions for assembling
-- data and emitting files in any XML-like or HTML-like
-- markup language (consisting of tags, elements, attributes,
-- declarations, and ASCII text content). Trade-offs are made
-- in favor of simplicity and concision of constructors and
-- combinators.
---------------------------------------------------------------------
--
module Text.Ascetic
where
import Data.String.Utils (join)
---------------------------------------------------------------------
-- | Data type for simple markup trees and class for data types
-- that can be converted into it.
type Content = String
type Tag = String
type Attribute = String
type Value = String
data Ascetic =
C Content -- Content.
| E Tag [Ascetic] -- Element.
| A Tag [(Attribute, Value)] [Ascetic] -- Element with attributes.
| L [Ascetic] -- Undelimited list.
| D Tag [(Attribute, Value)] Ascetic -- Declaration.
deriving (Eq)
---------------------------------------------------------------------
-- | Type class for data structures that can be converted into the
-- Ascetic representation.
class ToAscetic a where
ascetic :: a -> Ascetic
---------------------------------------------------------------------
-- | Conversion to ASCII string (with indentation for legibility).
ascii x = to "" x where
showAVs avs = [a ++ "=\"" ++ v ++ "\"" | (a,v) <- avs]
to ind x = case x of
C c -> c
E t [] -> "<" ++ t ++ ">" ++ "</" ++ t ++ ">"
E t [C c] -> ind ++ "<" ++ t ++ ">" ++ c ++ "</" ++ t ++ ">"
E t xs ->
ind
++ "<" ++ t ++ ">\n"
++ join "\n" [to (ind ++ " ") x | x <- xs]
++ "\n" ++ ind ++ "</" ++ t ++ ">"
A t avs [] -> ind ++ "<" ++ t ++ " " ++ join " " (showAVs avs) ++ ">" ++ "</" ++ t ++ ">"
A t avs [C c] -> ind ++ "<" ++ t ++ " " ++ join " " (showAVs avs) ++ ">" ++ c ++ "</" ++ t ++ ">"
A t avs xs ->
ind
++ "<" ++ t ++ " " ++ join " " (showAVs avs) ++ ">\n"
++ join "\n" [to (ind ++ " ") x | x <- xs]
++ "\n" ++ ind ++ "</" ++ t ++ ">"
L xs -> join "\n" [to ind x | x <- xs]
D t avs x ->
ind
++ "<?" ++ t ++ " " ++ join " " (showAVs avs) ++ "?>\n"
++ (to ind x)
---------------------------------------------------------------------
-- | Conversion to an ASCII string that has no extra indentation or
-- newlines for legibility.
minified x = to x where
showAVs avs = [a ++ "=\"" ++ v ++ "\"" | (a,v) <- avs]
to x = case x of
C c -> c
E t [] -> "<" ++ t ++ ">" ++ "</" ++ t ++ ">"
E t [C c] -> "<" ++ t ++ ">" ++ c ++ "</" ++ t ++ ">"
E t xs ->
"<" ++ t ++ ">"
++ join "" [to x | x <- xs]
++ "" ++ "</" ++ t ++ ">"
A t avs [] -> "<" ++ t ++ " " ++ join " " (showAVs avs) ++ ">" ++ "</" ++ t ++ ">"
A t avs [C c] -> "<" ++ t ++ " " ++ join " " (showAVs avs) ++ ">" ++ c ++ "</" ++ t ++ ">"
A t avs xs ->
"<" ++ t ++ " " ++ join " " (showAVs avs) ++ ">"
++ join "" [to x | x <- xs]
++ "" ++ "</" ++ t ++ ">"
L xs -> join "" [to x | x <- xs]
D t avs x ->
"<?" ++ t ++ " " ++ join " " (showAVs avs) ++ "?>\n"
++ (to x)
---------------------------------------------------------------------
-- | Default rendering uses "min" for HTML whitespace fidelity.
instance Show Ascetic where
show = minified
--eof | lapets/ascetic | Text/Ascetic.hs | mit | 3,749 | 0 | 20 | 1,186 | 1,188 | 612 | 576 | 60 | 9 |
-- Pretty.hs ---
--
-- Filename: Pretty.hs
-- Description:
-- Author: Manuel Schneckenreither
-- Maintainer:
-- Created: Thu Sep 4 10:42:24 2014 (+0200)
-- Version:
-- Package-Requires: ()
-- Last-Updated: Mon Jul 23 10:23:54 2018 (+0200)
-- By: Manuel Schneckenreither
-- Update #: 62
-- URL:
-- Doc URL:
-- Keywords:
-- Compatibility:
--
--
-- Commentary:
--
--
--
--
-- Change Log:
--
--
--
--
--
-- Code:
module Data.Rewriting.ARA.Exception.Pretty
( prettyProgException
) where
import Data.Rewriting.ARA.Constants
import Data.Rewriting.ARA.Exception.Type
import Prelude hiding ((<>))
import Text.PrettyPrint
prettyProgException :: ProgException -> Doc
prettyProgException ex = text (prefix ex) <> text (getElem ex)
where
prefix ShowTextOnly {} = ""
prefix SemanticException{} = exceptionPrefixSemantic ++ " "
prefix WarningException{} = exceptionPrefixWarning ++ " "
prefix FatalException{} = exceptionPrefixFatal ++ " "
prefix ParseException{} = exceptionPrefixParse ++ " "
prefix UnsolveableException{} = exceptionPrefixUnsolveable ++ " "
getElem (ShowTextOnly x) = x
getElem (SemanticException x) = x
getElem (WarningException x) = x
getElem (FatalException x) = x
getElem (ParseException x) = x
getElem (UnsolveableException x) = x
--
-- Pretty.hs ends here
| ComputationWithBoundedResources/ara-inference | src/Data/Rewriting/ARA/Exception/Pretty.hs | mit | 1,503 | 0 | 9 | 415 | 294 | 172 | 122 | 20 | 11 |
--------------------------------------------------------------------------------
-- |
-- Module : AI.Clustering.KMeans.Types
-- Copyright : (c) 2015 Kai Zhang
-- License : MIT
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- <module description starting at first column>
--------------------------------------------------------------------------------
module AI.Clustering.KMeans.Types
( KMeansOpts(..)
, defaultKMeansOpts
, KMeans(..)
, Method(..)
) where
import qualified Data.Matrix.Unboxed as MU
import qualified Data.Vector.Unboxed as U
import Data.Word (Word32)
data KMeansOpts = KMeansOpts
{ kmeansMethod :: Method
, kmeansSeed :: (U.Vector Word32) -- ^ Seed for random number generation
, kmeansClusters :: Bool -- ^ Wether to return clusters, may use a lot memory
, kmeansMaxIter :: Int -- ^ Maximum iteration
}
-- | Default options.
-- > defaultKMeansOpts = KMeansOpts
-- > { kmeansMethod = KMeansPP
-- > , kmeansSeed = U.fromList [1,2,3,4,5,6,7]
-- > , kmeansClusters = True
-- > , kmeansMaxIter = 10
-- > }
defaultKMeansOpts :: KMeansOpts
defaultKMeansOpts = KMeansOpts
{ kmeansMethod = KMeansPP
, kmeansSeed = U.fromList [2341,2342,3934,425,2345,80006,2343,234491,124,729]
, kmeansClusters = True
, kmeansMaxIter = 10000
}
-- | Results from running kmeans
data KMeans a = KMeans
{ membership :: U.Vector Int -- ^ A vector of integers (0 ~ k-1)
-- indicating the cluster to which each
-- point is allocated.
, centers :: MU.Matrix Double -- ^ A matrix of cluster centers.
, clusters :: Maybe [[a]]
, sse :: Double -- ^ the sum of squared error (SSE)
} deriving (Show)
-- | Different initialization methods
data Method = Forgy -- ^ The Forgy method randomly chooses k unique
-- observations from the data set and uses these
-- as the initial means.
| KMeansPP -- ^ K-means++ algorithm.
| Centers (MU.Matrix Double) -- ^ Provide a set of k centroids
| kaizhang/clustering | src/AI/Clustering/KMeans/Types.hs | mit | 2,205 | 0 | 11 | 588 | 291 | 196 | 95 | 28 | 1 |
{- |
Description : Visualisation of taxonomies
Copyright : (c) Otto-von-Guericke University of Magdeburg
License : GPLv2 or higher, see LICENSE.txt
This folder provides visualisation of taxonomies in the same style as the
MMiSSOntology tool written by Achim Mahnke. In fact his modules where only
slightly changed. The visualisation is done through uDraw(Graph)
<http://www.informatik.uni-bremen.de/uDrawGraph/en/index.html>
and its Haskell encapsulation as provided by the UniForM Workbench
<http://www.informatik.uni-bremen.de/uniform/wb> (see
module "GraphDisp" and "GUI.AbstractGraphView").
Module "Taxonomy.OntoParser" provides some parsing function for
MMiSSOntology LaTeX files (maybe outdated).
Module "Taxonomy.MMiSSOntologyGraph" provides the display function and
uDraw(Graph) construction functions.
Module "Taxonomy.MMiSSOntology" provides storage classes for MMiSSOntology
graphs and some build and access functions.
Module Taxonomy\/taxonomyTool.hs provides a test program
for the parser provided by "Taxonomy.OntoParser" (maybe outdated).
-}
module Taxonomy where
| spechub/Hets | Taxonomy.hs | gpl-2.0 | 1,096 | 0 | 2 | 141 | 5 | 4 | 1 | 1 | 0 |
--import Ch_02
--import Ch_03
import Ch_06
import Ch_07
import Ch_09
e :: Double
e = exp 1
ode :: Double ->Double -> Double
ode t y = t+y
odeSol :: Double -> Double
odeSol t = (-t) + 4*e**(t-1) -1
strip4y :: [(Double, Double)] -> [Double]
strip4y = map snd
strip4t :: [(Double, Double)] -> [Double]
strip4t = map fst
listError :: [Double] -> [Double] -> [Double]
listError = zipWith (-)
-- TODO there's something wrong here
inferH :: [Double] -> [Double]
inferH [] = []
inferH [_,_] = []
inferH (f:s:xs) = (s-f) : inferH (s:xs)
inferH [_] = []
main :: IO ()
main = do
let f x = (-2) * x * sin(x**2)
let g x = (x ** 2) * sin (x+1) + x **2
let antiF x = cos(x**2)
-- let fp x = e**(sin x + x) * (cos x + 1)
let err = 2**(-8)
let a_0 = -100
let b_0 = 0
let c_0 = (-1000)
let d_0 = 1000
let real = antiF b_0 - antiF a_0
let gq = adaptiveQuad gaussQuad f a_0 b_0 err
let simps = adaptiveQuad simpsons f a_0 b_0 err
let trap = adaptiveQuad trapazoid f a_0 b_0 err
print real
print gq
print simps
print simps
print trap
print (integrate g c_0 d_0)
| Marcus-Rosti/numerical-methods | src/Main.hs | gpl-2.0 | 1,155 | 1 | 12 | 333 | 556 | 296 | 260 | 40 | 1 |
module Types
where
import Import
import Utils
import Text.Parsec
data ShellTest = ShellTest {
comments :: [String] -- # COMMENTS OR BLANK LINES before test
,trailingComments :: [String] -- # COMMENTS OR BLANK LINES after the last test
,command :: TestCommand
,stdin :: Maybe String
,stdoutExpected :: Maybe Matcher
,stderrExpected :: Maybe Matcher
,exitCodeExpected :: Matcher
,testname :: String
,lineNumber :: Int
}
instance Show ShellTest where
show ShellTest{testname=n,command=c,lineNumber=ln,stdin=i,stdoutExpected=o,stderrExpected=e,exitCodeExpected=x} =
printf "ShellTest {command = %s, stdin = %s, stdoutExpected = %s, stderrExpected = %s, exitCodeExpected = %s, testname = %s, lineNumber = %s}"
(show c)
(maybe "Nothing" (show.trim) i)
(show $ show <$> o)
(show $ show <$> e)
(show x)
(show $ trim n)
(show ln)
data TestCommand = ReplaceableCommand String
| FixedCommand String
deriving Show
type Regexp = String
data Matcher = Lines Int String -- ^ 0 or more lines of text, also the starting line number ?
| Numeric String -- ^ numeric exit code as a string
| NegativeNumeric String -- ^ numeric exit code as a string, matched negatively
| PositiveRegex Regexp -- ^ regular expression
| NegativeRegex Regexp -- ^ regular expression, matched negatively
instance Show Matcher where show = showMatcherTrimmed
showMatcherTrimmed :: Matcher -> String
showMatcherTrimmed (PositiveRegex r) = "/"++(trim r)++"/"
showMatcherTrimmed (NegativeRegex r) = "!/"++(trim r)++"/"
showMatcherTrimmed (Numeric s) = trim s
showMatcherTrimmed (NegativeNumeric s) = "!"++ trim s
showMatcherTrimmed (Lines _ s) = trim s
showMatcher :: Matcher -> String
showMatcher (PositiveRegex r) = "/"++r++"/"
showMatcher (NegativeRegex r) = "!/"++r++"/"
showMatcher (Numeric s) = s
showMatcher (NegativeNumeric s) = "!"++ s
showMatcher (Lines _ s) = s
type Macro = (String, String)
data PreProcessor = NoPreprocess |
PreProcessor [(String -> Either ParseError String)]
| simonmichael/shelltestrunner | src/Types.hs | gpl-3.0 | 2,355 | 0 | 10 | 693 | 572 | 317 | 255 | 49 | 1 |
import System.Environment
import Pone.Test
import Pone.Parser.Type
import Pone.Utils
import System.IO
suite = All
main = do
hSetEncoding stdout utf8
args <- getArgs
case args of
(path:rest) -> do
results <- runTestsAndPrint (TestSpec 8 path suite)
putStrLn $ unlines $ map (++ "\n") results
_ -> error "failed to specify path to pone_src/ for testing"
| deweyvm/pone | src/Main.hs | gpl-3.0 | 406 | 0 | 15 | 108 | 121 | 61 | 60 | 14 | 2 |
-- | Top-level wrapper for Sugar.Convert, Sugar.Parens, Sugar.AddNames
module Lamdu.Sugar
( sugarWorkArea
, Sugar.WorkArea, Sugar.Payload, Sugar.ParenInfo, Sugar.EntityId, Name
) where
import qualified Control.Lens as Lens
import Control.Monad.Once (OnceT, Typeable)
import Control.Monad.Reader (ReaderT(..))
import Control.Monad.Transaction (MonadTransaction)
import Data.CurAndPrev (CurAndPrev(..))
import qualified Data.Map as Map
import Data.Tuple (swap)
import Data.UUID.Types (UUID)
import qualified Lamdu.Annotations as Annotations
import qualified Lamdu.Cache as Cache
import qualified Lamdu.Data.Anchors as Anchors
import Lamdu.Data.Tag (Tag, IsOperator, TextsInLang)
import qualified Lamdu.Debug as Debug
import Lamdu.Eval.Results (EvalResults, erExprValues, erAppliesOfLam)
import qualified Lamdu.Expr.IRef as ExprIRef
import qualified Lamdu.I18N.Code as Texts
import qualified Lamdu.I18N.Name as Texts
import Lamdu.Name (Name)
import Lamdu.Sugar.Annotations
import qualified Lamdu.Sugar.Config as SugarConfig
import qualified Lamdu.Sugar.Convert as SugarConvert
import Lamdu.Sugar.Convert.Expression.Actions (makeTypeAnnotation)
import qualified Lamdu.Sugar.Convert.Input as Input
import Lamdu.Sugar.Eval (addEvaluationResults)
import Lamdu.Sugar.Internal
import qualified Lamdu.Sugar.Lens as SugarLens
import qualified Lamdu.Sugar.Names.Add as AddNames
import qualified Lamdu.Sugar.Parens as AddParens
import qualified Lamdu.Sugar.Types as Sugar
import Revision.Deltum.Transaction (Transaction)
import Lamdu.Prelude
type T = Transaction
markAnnotations ::
Functor m =>
Sugar.WorkArea v n i o (ConvertPayload m a) ->
Sugar.WorkArea (ShowAnnotation, v) n i o (ConvertPayload m (ShowAnnotation, a))
markAnnotations workArea =
workArea
{ Sugar._waPanes = workArea ^. Sugar.waPanes <&> SugarLens.paneBinder %~ markNodeAnnotations
, Sugar._waRepl = workArea ^. Sugar.waRepl & Sugar.replExpr %~ markNodeAnnotations
}
typeAnnotationFromEvalRes ::
MonadTransaction n f => EvalPrep -> f (Sugar.Annotation v AddNames.InternalName)
typeAnnotationFromEvalRes x =
makeTypeAnnotation (x ^. eEvalId) (x ^. eType) <&> Sugar.AnnotationType
makeAnnotation ::
MonadTransaction n m =>
Annotations.Mode ->
(ShowAnnotation, EvalPrep) ->
m (Sugar.Annotation EvalPrep AddNames.InternalName)
makeAnnotation annMode (showAnn, x) =
case annMode of
_ | showAnn ^. showTypeAlways -> typeAnnotationFromEvalRes x
Annotations.Types | showAnn ^. showInTypeMode -> typeAnnotationFromEvalRes x
Annotations.Evaluation | showAnn ^. showInEvalMode -> Sugar.AnnotationVal x & pure
_ -> pure Sugar.AnnotationNone
redirectLams :: [UUID] -> EvalResults -> EvalResults
redirectLams lams results =
results
& erExprValues . Lens.mapped %~ Map.mapKeys mapScopeId
& erAppliesOfLam . Lens.mapped %~ Map.mapKeys mapScopeId
where
mapScopeId x = mapping ^. Lens.at x & maybe x mapScopeId
mapping =
lams
>>= (\lamId -> results ^@.. erAppliesOfLam . Lens.ix lamId . Lens.ifolded <. traverse . _1)
<&> swap
& Map.fromList
sugarWorkArea ::
( HasCallStack
, Has Debug.Monitors env0
, Has SugarConfig.Config env0
, Has Cache.Functions env0
, Anchors.HasCodeAnchors env0 m
, Has Annotations.Mode env1
, Has (Texts.Name Text) env1
, Has (Texts.Code Text) env1
, Has (CurAndPrev EvalResults) env1
, Monad m, Typeable m
) =>
env0 ->
OnceT (T m)
( (Tag -> (IsOperator, TextsInLang)) -> env1 ->
OnceT (T m) (Sugar.WorkArea (Sugar.Annotation (Sugar.EvaluationScopes Name (OnceT (T m))) Name) Name (OnceT (T m)) (T m)
(Sugar.Payload (Sugar.Annotation (Sugar.EvaluationScopes Name (OnceT (T m))) Name) (T m)))
)
sugarWorkArea env0 =
SugarConvert.loadWorkArea env0
<&>
\workArea getTagName env1 ->
let strippedLams = workArea ^.. traverse . pLambdas . traverse
in
markAnnotations workArea
<&> initAnnotationEvalPrep
& SugarLens.annotations (makeAnnotation (env1 ^. has))
& (`runReaderT` env0)
>>= lift . addEvaluationResults (env0 ^. Anchors.codeAnchors) (env1 ^. has <&> redirectLams strippedLams)
>>= report . AddNames.addToWorkArea env1 (fmap getTagName . lift . ExprIRef.readTagData)
<&> AddParens.addToWorkArea
<&> Lens.mapped %~
\(paren, pl) ->
Sugar.Payload
{ Sugar._plAnnotation = pl ^. pInput . Input.userData . _1
, Sugar._plActions = pl ^. pActions
, Sugar._plEntityId = pl ^. pInput . Input.entityId
, Sugar._plParenInfo = paren
, Sugar._plHiddenEntityIds = pl ^. pInput . Input.userData . _2
}
where
Debug.EvaluatorM report = env0 ^. has . Debug.naming . Debug.mAction
initAnnotationEvalPrep pl =
pl & pInput . Input.userData %~ \(showAnn, x) -> ((showAnn, mkEvalPrep pl), x)
mkEvalPrep :: ConvertPayload m a -> EvalPrep
mkEvalPrep pl =
EvalPrep
{ _eType = pl ^. pInput . Input.inferredType
, _eEvalId = pl ^. pInput . Input.entityId
}
| Peaker/lamdu | src/Lamdu/Sugar.hs | gpl-3.0 | 5,230 | 0 | 23 | 1,121 | 1,521 | 842 | 679 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DoubleClickBidManager.Queries.Listqueries
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves stored queries.
--
-- /See:/ <https://developers.google.com/bid-manager/ DoubleClick Bid Manager API Reference> for @doubleclickbidmanager.queries.listqueries@.
module Network.Google.Resource.DoubleClickBidManager.Queries.Listqueries
(
-- * REST Resource
QueriesListqueriesResource
-- * Creating a Request
, queriesListqueries
, QueriesListqueries
) where
import Network.Google.DoubleClickBids.Types
import Network.Google.Prelude
-- | A resource alias for @doubleclickbidmanager.queries.listqueries@ method which the
-- 'QueriesListqueries' request conforms to.
type QueriesListqueriesResource =
"doubleclickbidmanager" :>
"v1" :>
"queries" :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListQueriesResponse
-- | Retrieves stored queries.
--
-- /See:/ 'queriesListqueries' smart constructor.
data QueriesListqueries =
QueriesListqueries'
deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'QueriesListqueries' with the minimum fields required to make a request.
--
queriesListqueries
:: QueriesListqueries
queriesListqueries = QueriesListqueries'
instance GoogleRequest QueriesListqueries where
type Rs QueriesListqueries = ListQueriesResponse
type Scopes QueriesListqueries = '[]
requestClient QueriesListqueries'{}
= go (Just AltJSON) doubleClickBidsService
where go
= buildClient
(Proxy :: Proxy QueriesListqueriesResource)
mempty
| rueshyna/gogol | gogol-doubleclick-bids/gen/Network/Google/Resource/DoubleClickBidManager/Queries/Listqueries.hs | mpl-2.0 | 2,364 | 0 | 11 | 500 | 217 | 135 | 82 | 40 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ToolResults.Projects.Histories.Executions.Steps.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists Steps for a given Execution. The steps are sorted by creation_time
-- in descending order. The step_id key will be used to order the steps
-- with the same creation_time. May return any of the following canonical
-- error codes: - PERMISSION_DENIED - if the user is not authorized to read
-- project - INVALID_ARGUMENT - if the request is malformed -
-- FAILED_PRECONDITION - if an argument in the request happens to be
-- invalid; e.g. if an attempt is made to list the children of a
-- nonexistent Step - NOT_FOUND - if the containing Execution does not
-- exist
--
-- /See:/ <https://firebase.google.com/docs/test-lab/ Cloud Tool Results API Reference> for @toolresults.projects.histories.executions.steps.list@.
module Network.Google.Resource.ToolResults.Projects.Histories.Executions.Steps.List
(
-- * REST Resource
ProjectsHistoriesExecutionsStepsListResource
-- * Creating a Request
, projectsHistoriesExecutionsStepsList
, ProjectsHistoriesExecutionsStepsList
-- * Request Lenses
, pheslExecutionId
, pheslHistoryId
, pheslPageToken
, pheslProjectId
, pheslPageSize
) where
import Network.Google.Prelude
import Network.Google.ToolResults.Types
-- | A resource alias for @toolresults.projects.histories.executions.steps.list@ method which the
-- 'ProjectsHistoriesExecutionsStepsList' request conforms to.
type ProjectsHistoriesExecutionsStepsListResource =
"toolresults" :>
"v1beta3" :>
"projects" :>
Capture "projectId" Text :>
"histories" :>
Capture "historyId" Text :>
"executions" :>
Capture "executionId" Text :>
"steps" :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListStepsResponse
-- | Lists Steps for a given Execution. The steps are sorted by creation_time
-- in descending order. The step_id key will be used to order the steps
-- with the same creation_time. May return any of the following canonical
-- error codes: - PERMISSION_DENIED - if the user is not authorized to read
-- project - INVALID_ARGUMENT - if the request is malformed -
-- FAILED_PRECONDITION - if an argument in the request happens to be
-- invalid; e.g. if an attempt is made to list the children of a
-- nonexistent Step - NOT_FOUND - if the containing Execution does not
-- exist
--
-- /See:/ 'projectsHistoriesExecutionsStepsList' smart constructor.
data ProjectsHistoriesExecutionsStepsList =
ProjectsHistoriesExecutionsStepsList'
{ _pheslExecutionId :: !Text
, _pheslHistoryId :: !Text
, _pheslPageToken :: !(Maybe Text)
, _pheslProjectId :: !Text
, _pheslPageSize :: !(Maybe (Textual Int32))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsHistoriesExecutionsStepsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pheslExecutionId'
--
-- * 'pheslHistoryId'
--
-- * 'pheslPageToken'
--
-- * 'pheslProjectId'
--
-- * 'pheslPageSize'
projectsHistoriesExecutionsStepsList
:: Text -- ^ 'pheslExecutionId'
-> Text -- ^ 'pheslHistoryId'
-> Text -- ^ 'pheslProjectId'
-> ProjectsHistoriesExecutionsStepsList
projectsHistoriesExecutionsStepsList pPheslExecutionId_ pPheslHistoryId_ pPheslProjectId_ =
ProjectsHistoriesExecutionsStepsList'
{ _pheslExecutionId = pPheslExecutionId_
, _pheslHistoryId = pPheslHistoryId_
, _pheslPageToken = Nothing
, _pheslProjectId = pPheslProjectId_
, _pheslPageSize = Nothing
}
-- | A Execution id. Required.
pheslExecutionId :: Lens' ProjectsHistoriesExecutionsStepsList Text
pheslExecutionId
= lens _pheslExecutionId
(\ s a -> s{_pheslExecutionId = a})
-- | A History id. Required.
pheslHistoryId :: Lens' ProjectsHistoriesExecutionsStepsList Text
pheslHistoryId
= lens _pheslHistoryId
(\ s a -> s{_pheslHistoryId = a})
-- | A continuation token to resume the query at the next item. Optional.
pheslPageToken :: Lens' ProjectsHistoriesExecutionsStepsList (Maybe Text)
pheslPageToken
= lens _pheslPageToken
(\ s a -> s{_pheslPageToken = a})
-- | A Project id. Required.
pheslProjectId :: Lens' ProjectsHistoriesExecutionsStepsList Text
pheslProjectId
= lens _pheslProjectId
(\ s a -> s{_pheslProjectId = a})
-- | The maximum number of Steps to fetch. Default value: 25. The server will
-- use this default if the field is not set or has a value of 0. Optional.
pheslPageSize :: Lens' ProjectsHistoriesExecutionsStepsList (Maybe Int32)
pheslPageSize
= lens _pheslPageSize
(\ s a -> s{_pheslPageSize = a})
. mapping _Coerce
instance GoogleRequest
ProjectsHistoriesExecutionsStepsList
where
type Rs ProjectsHistoriesExecutionsStepsList =
ListStepsResponse
type Scopes ProjectsHistoriesExecutionsStepsList =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsHistoriesExecutionsStepsList'{..}
= go _pheslProjectId _pheslHistoryId
_pheslExecutionId
_pheslPageToken
_pheslPageSize
(Just AltJSON)
toolResultsService
where go
= buildClient
(Proxy ::
Proxy ProjectsHistoriesExecutionsStepsListResource)
mempty
| brendanhay/gogol | gogol-toolresults/gen/Network/Google/Resource/ToolResults/Projects/Histories/Executions/Steps/List.hs | mpl-2.0 | 6,436 | 0 | 19 | 1,466 | 663 | 395 | 268 | 108 | 1 |
--Zaoqilc
--Copyright (C) 2017 Zaoqi
--This program is free software: you can redistribute it and/or modify
--it under the terms of the GNU Affero General Public License as published
--by the Free Software Foundation, either version 3 of the License, or
--(at your option) any later version.
--This program is distributed in the hope that it will be useful,
--but WITHOUT ANY WARRANTY; without even the implied warranty of
--MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
--GNU Affero General Public License for more details.
--You should have received a copy of the GNU Affero General Public License
--along with this program. If not, see <http://www.gnu.org/licenses/>.
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
module Control.Concurrent.Signal (
newSignal,
newStreamSignal,
runSignal,
scanp,
sampleOn,
slift,
sliftinit
) where
import Control.Concurrent
import Control.Concurrent.Chan
import Control.Monad
import Data.IORef
import Control.Exception
catch_ :: IO () -> IO ()
catch_ x =
x
`catch` \(SomeException _) -> return ()
data Signal a = Signal ((a -> IO ()) -> IO ()) | -- 有新信号时调用(a -> IO ()),不能同时多次调用,返回的IO ()用来注册
Stream (IO (IO a)) --返回的IO a调用时返回下一个值
newSignal :: ((a -> IO ()) -> IO ()) -> Signal a
newSignal s = Signal $ \f -> s $ catch_ . f
newStreamSignal :: IO (IO a) -> Signal a
newStreamSignal = Stream
stream2Signal (Stream x) = newSignal $ \f -> do
s <- x
forkIO $ forever $ do
i <- s
f i
return ()
runSignal :: Signal a -> (a -> IO ()) -> IO ()
runSignal (Signal x) = x
runSignal x@(Stream _) = runSignal . stream2Signal $ x
instance Functor Signal where
fmap f (Signal s) = Signal $ \n -> s $ n . f
fmap f (Stream s) = Stream $ fmap (fmap f) s
--fmap f (Stream s) = Stream $ do
--g <- s
--return $ do
--x <- g
--return $ f x
splus (Signal a) (Signal b) =
let
call ra rb f r i = do
atomicWriteIORef r (Just i)
ia <- readIORef ra
ib <- readIORef rb
case (,) <$> ia <*> ib of
Just x -> f x
Nothing -> return ()
in Signal $ \f -> do
ra <- newIORef Nothing
rb <- newIORef Nothing
b $ call ra rb f rb
a $ call ra rb f ra
splus (Stream a) (Stream b) = Stream $ do
fa <- a
fb <- b
return $ (,) <$> fa <*> fb
splus (Signal a) (Stream b) = Signal $ \f -> do
fb <- b
a $ \ia -> do
ib <- fb
f (ia, ib)
splus (Stream a) (Signal b) = Signal $ \f -> do
fa <- a
b $ \ib -> do
ia <- fa
f (ia, ib)
instance Applicative Signal where
pure = Stream . return . return
x <*> y = fmap (\(f, x) -> f x) $ splus x y
scanp :: (b -> a -> b) -> b -> Signal a -> Signal b
scanp f x (Signal s) = Signal $ \n -> do
r <- newIORef x
s $ \i -> do
p <- readIORef r
let ns = f p i
writeIORef r ns
n ns
scanp f x (Stream s) = Stream $ do
fi <- s
r <- newMVar x
return $ do
i <- fi
uninterruptibleMask $ \restore -> do
p <- takeMVar r
let ns = f p i
onException (restore $ do
putMVar r ns
return ns) (putMVar r p)
sampleOn :: Signal b -> Signal a -> Signal a
sampleOn (Stream _) x = x
sampleOn (Signal c) (Stream v) = Signal $ \n -> do
fv <- v
c $ \_ -> do
i <- fv
n i
sampleOn (Signal c) (Signal v) = Signal $ \n -> do
r <- newIORef Nothing
v $ \i -> atomicWriteIORef r (Just i)
c $ \_ -> do
i <- readIORef r
case i of Just x -> n x
Nothing -> return ()
slift :: Signal (IO a) -> Signal a
slift (Signal s) = Signal $ \n -> s $ \f -> do
x <- f
n x
slift (Stream s) = Stream $ fmap join s
sliftinit :: IO a -> Signal a
sliftinit f = Stream $ do
x <- f
return . return $ x
| zaoqi/zaoqilc | featuring/Control/Concurrent/Signal.hs | agpl-3.0 | 4,169 | 0 | 20 | 1,320 | 1,509 | 743 | 766 | 114 | 2 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# OPTIONS_GHC -Wall -fno-prof-auto #-}
{- Use this internally because the transformers RWST leaks in the W part -}
module Reactive.Impulse.Internal.RWST (
RWST(..)
, runRWST
, execRWST
, evalRWST
, mapRWST
, withRWST
, reader
, ask
, local
, asks
, writer
, tell
, listen
, listens
, pass
, censor
, state
, get
, put
, modify
, gets
, module Data.Monoid
, module Control.Monad.Trans
) where
import Control.Applicative
import Control.Monad
import qualified Control.Monad.Reader.Class as RWS
import qualified Control.Monad.Writer.Class as RWS
import qualified Control.Monad.State.Class as RWS
import Control.Monad.Trans
import Control.Monad.Fix
import Data.Monoid
newtype RWST r w s m a = RWST { runRWST' :: w -> r -> s -> m (a, s, w) }
runRWST :: Monoid w => RWST r w s m a -> r -> s -> m (a, s, w)
runRWST rws = runRWST' rws mempty
evalRWST :: (Monad m, Monoid w)
=> RWST r w s m a
-> r
-> s
-> m (a, w)
evalRWST m r s = do
(a, _, w) <- runRWST m r s
return (a, w)
execRWST :: (Monad m, Monoid w)
=> RWST r w s m a
-> r
-> s
-> m (s, w)
execRWST m r s = do
(_, s', w) <- runRWST m r s
return (s', w)
mapRWST :: (m (a, s, w) -> n (b, s, w)) -> RWST r w s m a -> RWST r w s n b
mapRWST f m = RWST $ \w r s -> f (runRWST' m w r s)
withRWST :: (r' -> s -> (r, s)) -> RWST r w s m a -> RWST r' w s m a
withRWST f m = RWST $ \w r s -> uncurry (runRWST' m w) (f r s)
instance (Functor m) => Functor (RWST r w s m) where
fmap f m = RWST $ \w0 r s ->
fmap (\ (a, s', w) -> (f a, s', w)) $ runRWST' m w0 r s
instance (Functor m, Monad m) => Applicative (RWST r w s m) where
pure = return
(<*>) = ap
instance (Functor m, MonadPlus m) => Alternative (RWST r w s m) where
empty = mzero
(<|>) = mplus
instance (Monad m) => Monad (RWST r w s m) where
{-# INLINE return #-}
return a = RWST $ \w _ s -> return (a, s, w)
{-# INLINE (>>=) #-}
m >>= k = RWST $ \w0 r s -> do
(!a, !s', !w) <- runRWST' m w0 r s
(!b, !s'',!w') <- runRWST' (k a) w r s'
return (b, s'', w')
fail msg = RWST $ \_ _ -> fail msg
instance (MonadPlus m) => MonadPlus (RWST r w s m) where
mzero = RWST $ \_ _ _ -> mzero
m `mplus` n = RWST $ \w r s -> runRWST' m w r s `mplus` runRWST' n w r s
instance (MonadFix m) => MonadFix (RWST r w s m) where
mfix f = RWST $ \w r s -> mfix $ \ (a, _, _) -> runRWST' (f a) w r s
instance MonadTrans (RWST r w s) where
lift m = RWST $ \w _ s -> do
a <- m
return (a, s, w)
instance (MonadIO m) => MonadIO (RWST r w s m) where
liftIO = lift . liftIO
-- ---------------------------------------------------------------------------
-- Reader operations
reader :: (Monad m) => (r -> a) -> RWST r w s m a
reader = asks
ask :: (Monad m) => RWST r w s m r
ask = RWST $ \w r s -> return (r, s, w)
local :: (Monad m) => (r -> r) -> RWST r w s m a -> RWST r w s m a
local f m = RWST $ \w r s -> runRWST' m w (f r) s
asks :: (Monad m) => (r -> a) -> RWST r w s m a
asks f = RWST $ \w r s -> return (f r, s, w)
-- ---------------------------------------------------------------------------
-- Writer operations
writer :: (Monad m, Monoid w) => (a, w) -> RWST r w s m a
writer (a, w) = RWST $ \w' _ s -> let !w'2 = w' `mappend` w in return (a, s, w'2)
tell :: (Monoid w, Monad m) => w -> RWST r w s m ()
tell w = RWST $ \w' _ s -> let !w'2 = w' `mappend` w in return ((),s,w'2)
listen :: (Monad m) => RWST r w s m a -> RWST r w s m (a, w)
listen m = RWST $ \w r s -> do
(a, s', w') <- runRWST' m w r s
return ((a, w'), s', w')
listens :: (Monad m) => (w -> b) -> RWST r w s m a -> RWST r w s m (a, b)
listens f m = RWST $ \w r s -> do
(a, s', w') <- runRWST' m w r s
return ((a, f w'), s', w')
pass :: (Monad m) => RWST r w s m (a, w -> w) -> RWST r w s m a
pass m = RWST $ \w r s -> do
((a, f), s', w') <- runRWST' m w r s
return (a, s', f w')
censor :: (Monad m) => (w -> w) -> RWST r w s m a -> RWST r w s m a
censor f m = RWST $ \w r s -> do
(a, s', w') <- runRWST' m w r s
return (a, s', f w')
-- ---------------------------------------------------------------------------
-- State operations
state :: (Monad m) => (s -> (a,s)) -> RWST r w s m a
state f = RWST $ \w _ s -> let (a,s') = f s in return (a, s', w)
get :: (Monad m) => RWST r w s m s
get = RWST $ \w _ s -> return (s, s, w)
put :: (Monad m) => s -> RWST r w s m ()
put s = RWST $ \w _ _ -> return ((), s, w)
modify :: (Monad m) => (s -> s) -> RWST r w s m ()
modify f = RWST $ \w _ s -> return ((), f s, w)
gets :: (Monad m) => (s -> a) -> RWST r w s m a
gets f = RWST $ \w _ s -> return (f s, s, w)
-- ---------------------------------------------------------------------------
-- MTL stuff
instance (Monad m) => RWS.MonadReader r (RWST r w s m) where
ask = ask
local = local
reader = reader
instance (Monad m) => RWS.MonadState s (RWST r w s m) where
get = get
put = put
state = state
instance (Monoid w, Monad m) => RWS.MonadWriter w (RWST r w s m) where
writer = writer
tell = tell
listen = listen
pass = pass
| JohnLato/impulse | src/Reactive/Impulse/Internal/RWST.hs | lgpl-3.0 | 5,285 | 0 | 13 | 1,491 | 2,763 | 1,486 | 1,277 | 139 | 1 |
-- Haskell implementation of steinhaus-joihnson-trotter algorithm
-- I find it hard to implement this in haskell...
| seckcoder/lang-learn | algo/permutation/steinhaus-johnson-trotter-seck.hs | unlicense | 119 | 0 | 2 | 19 | 4 | 3 | 1 | 1 | 0 |
-- Copyright (C) 2009-2012 John Millikin <[email protected]>
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
-- | Basic types, useful to every D-Bus application.
--
-- Authors of client applications should import "DBus.Client", which provides
-- an easy RPC-oriented interface to D-Bus methods and signals.
module DBus
(
-- * Messages
Message
-- ** Method calls
, MethodCall
, methodCall
, methodCallPath
, methodCallInterface
, methodCallMember
, methodCallSender
, methodCallDestination
, methodCallAutoStart
, methodCallReplyExpected
, methodCallBody
-- ** Method returns
, MethodReturn
, methodReturn
, methodReturnSerial
, methodReturnSender
, methodReturnDestination
, methodReturnBody
-- ** Method errors
, MethodError
, methodError
, methodErrorName
, methodErrorSerial
, methodErrorSender
, methodErrorDestination
, methodErrorBody
, methodErrorMessage
-- ** Signals
, Signal
, signal
, signalPath
, signalMember
, signalInterface
, signalSender
, signalDestination
, signalBody
-- ** Received messages
, ReceivedMessage(ReceivedMethodCall, ReceivedMethodReturn, ReceivedMethodError, ReceivedSignal)
, receivedMessageSerial
, receivedMessageSender
, receivedMessageBody
-- * Variants
, Variant
, IsVariant(..)
, variantType
, IsAtom
, IsValue
, typeOf
, typeOf'
-- * Signatures
, Signature
, Type(..)
, signature
, signature_
, signatureTypes
, formatSignature
, parseSignature
-- * Object paths
, ObjectPath
, objectPath_
, formatObjectPath
, parseObjectPath
-- * Names
-- ** Interface names
, InterfaceName
, interfaceName_
, formatInterfaceName
, parseInterfaceName
-- ** Member names
, MemberName
, memberName_
, formatMemberName
, parseMemberName
-- ** Error names
, ErrorName
, errorName_
, formatErrorName
, parseErrorName
-- ** Bus names
, BusName
, busName_
, formatBusName
, parseBusName
-- * Non-native containers
-- ** Structures
, Structure
, structureItems
-- ** Arrays
, Array
, arrayItems
-- ** Dictionaries
, Dictionary
, dictionaryItems
-- * Addresses
, Address
, addressMethod
, addressParameters
, address
, formatAddress
, formatAddresses
, parseAddress
, parseAddresses
, getSystemAddress
, getSessionAddress
, getStarterAddress
-- * Message marshaling
, Endianness (..)
-- ** Marshal
, marshal
, MarshalError
, marshalErrorMessage
-- ** Unmarshal
, unmarshal
, UnmarshalError
, unmarshalErrorMessage
-- ** Message serials
, Serial
, serialValue
, firstSerial
, nextSerial
-- * D-Bus UUIDs
, UUID
, formatUUID
, randomUUID
) where
import Control.Monad (replicateM)
import qualified Data.ByteString.Char8 as Char8
import Data.Proxy (Proxy(..))
import Data.Word (Word16)
import System.Random (randomRIO)
import Text.Printf (printf)
import DBus.Internal.Address
import DBus.Internal.Message
import qualified DBus.Internal.Types
import DBus.Internal.Types hiding (typeOf)
import DBus.Internal.Wire
-- | Deprecated. Get the D-Bus type corresponding to the given Haskell value. The value
-- may be @undefined@.
typeOf :: IsValue a => a -> Type
typeOf = DBus.Internal.Types.typeOf
-- | Get the D-Bus type corresponding to the given Haskell type 'a'.
typeOf' :: IsValue a => Proxy a -> Type
typeOf' = DBus.Internal.Types.typeOf_
-- | Construct a new 'MethodCall' for the given object, interface, and method.
--
-- Use fields such as 'methodCallDestination' and 'methodCallBody' to populate
-- a 'MethodCall'.
--
-- @
--{-\# LANGUAGE OverloadedStrings \#-}
--
--methodCall \"/\" \"org.example.Math\" \"Add\"
-- { 'methodCallDestination' = Just \"org.example.Calculator\"
-- , 'methodCallBody' = ['toVariant' (1 :: Int32), 'toVariant' (2 :: Int32)]
-- }
-- @
methodCall :: ObjectPath -> InterfaceName -> MemberName -> MethodCall
methodCall path iface member = MethodCall path (Just iface) member Nothing Nothing True True []
-- | Construct a new 'MethodReturn', in reply to a method call with the given
-- serial.
--
-- Use fields such as 'methodReturnBody' to populate a 'MethodReturn'.
methodReturn :: Serial -> MethodReturn
methodReturn s = MethodReturn s Nothing Nothing []
-- | Construct a new 'MethodError', in reply to a method call with the given
-- serial.
--
-- Use fields such as 'methodErrorBody' to populate a 'MethodError'.
methodError :: Serial -> ErrorName -> MethodError
methodError s name = MethodError name s Nothing Nothing []
-- | Construct a new 'Signal' for the given object, interface, and signal name.
--
-- Use fields such as 'signalBody' to populate a 'Signal'.
signal :: ObjectPath -> InterfaceName -> MemberName -> Signal
signal path iface member = Signal path iface member Nothing Nothing []
-- | No matter what sort of message was received, get its serial.
receivedMessageSerial :: ReceivedMessage -> Serial
receivedMessageSerial (ReceivedMethodCall s _) = s
receivedMessageSerial (ReceivedMethodReturn s _) = s
receivedMessageSerial (ReceivedMethodError s _) = s
receivedMessageSerial (ReceivedSignal s _) = s
receivedMessageSerial (ReceivedUnknown s _) = s
-- | No matter what sort of message was received, get its sender (if provided).
receivedMessageSender :: ReceivedMessage -> Maybe BusName
receivedMessageSender (ReceivedMethodCall _ msg) = methodCallSender msg
receivedMessageSender (ReceivedMethodReturn _ msg) = methodReturnSender msg
receivedMessageSender (ReceivedMethodError _ msg) = methodErrorSender msg
receivedMessageSender (ReceivedSignal _ msg) = signalSender msg
receivedMessageSender (ReceivedUnknown _ msg) = unknownMessageSender msg
-- | No matter what sort of message was received, get its body (if provided).
receivedMessageBody :: ReceivedMessage -> [Variant]
receivedMessageBody (ReceivedMethodCall _ msg) = methodCallBody msg
receivedMessageBody (ReceivedMethodReturn _ msg) = methodReturnBody msg
receivedMessageBody (ReceivedMethodError _ msg) = methodErrorBody msg
receivedMessageBody (ReceivedSignal _ msg) = signalBody msg
receivedMessageBody (ReceivedUnknown _ msg) = unknownMessageBody msg
-- | Convert a 'Message' into a 'Char8.ByteString'. Although unusual, it is
-- possible for marshaling to fail; if this occurs, an error will be
-- returned instead.
marshal :: Message msg => Endianness -> Serial -> msg -> Either MarshalError Char8.ByteString
marshal = marshalMessage
-- | Parse a 'Char8.ByteString' into a 'ReceivedMessage'. The result can be
-- inspected to see what type of message was parsed. Unknown message types
-- can still be parsed successfully, as long as they otherwise conform to
-- the D-Bus standard.
unmarshal :: Char8.ByteString -> Either UnmarshalError ReceivedMessage
unmarshal = unmarshalMessage
-- | A D-Bus UUID is 128 bits of data, usually randomly generated. They are
-- used for identifying unique server instances to clients.
--
-- Older versions of the D-Bus spec also called these values /GUIDs/.
--
-- D-Bus UUIDs are not the same as the RFC-standardized UUIDs or GUIDs.
newtype UUID = UUID Char8.ByteString
deriving (Eq, Ord, Show)
-- | Format a D-Bus UUID as hex-encoded ASCII.
formatUUID :: UUID -> String
formatUUID (UUID bytes) = Char8.unpack bytes
-- | Generate a random D-Bus UUID. This value is suitable for use in a
-- randomly-allocated address, or as a listener's socket address
-- @\"guid\"@ parameter.
randomUUID :: IO UUID
randomUUID = do
-- The version of System.Random bundled with ghc < 7.2 doesn't define
-- instances for any of the fixed-length word types, so we imitate
-- them using the instance for Int.
--
-- 128 bits is 8 16-bit integers. We use chunks of 16 instead of 32
-- because Int is not guaranteed to be able to store a Word32.
let hexInt16 i = printf "%04x" (i :: Int)
int16s <- replicateM 8 (randomRIO (0, fromIntegral (maxBound :: Word16)))
return (UUID (Char8.pack (concatMap hexInt16 int16s)))
| rblaze/haskell-dbus | lib/DBus.hs | apache-2.0 | 8,863 | 0 | 13 | 1,882 | 1,253 | 748 | 505 | 160 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- |
-- Module : Geography.VectorTile.Util
-- Copyright : (c) Colin Woodbury 2016 - 2018
-- License : BSD3
-- Maintainer: Colin Woodbury <[email protected]>
module Data.Geometry.VectorTile.Util where
import Data.Geometry.VectorTile.Geometry (Point (..))
import qualified Data.Sequence as Seq
import qualified Data.Text as Text
---
-- | A strict pair of Ints.
data Pair = Pair !Int !Int
-- | A sort of "self-zip", forming pairs from every two elements in a list.
-- Fails if there is an uneven number of elements.
safePairsWith :: (a -> Int) -> Seq.Seq a -> Either Text.Text (Seq.Seq Point)
safePairsWith f list = if null err then Right pts else Left "Uneven number of parameters given."
where
(pts, err) = go list
go Seq.Empty = (Seq.empty, Seq.empty)
go (a Seq.:<| Seq.Empty) = (Seq.empty, Seq.singleton a)
go (a Seq.:<| b Seq.:<| rest) = (Point (f a) (f b) Seq.<| (fst . go $ rest), snd . go $ rest)
-- | Flatten a list of pairs. Equivalent to:
--
-- > ps ^.. each . both
unpairs :: [(a,a)] -> [a]
unpairs = foldr (\(a,b) acc -> a : b : acc) []
{-# INLINE unpairs #-}
| sitewisely/zellige | src/Data/Geometry/VectorTile/Util.hs | apache-2.0 | 1,189 | 0 | 11 | 278 | 331 | 189 | 142 | 19 | 4 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE FlexibleContexts #-}
module FormatHandler.Html
( htmlFormatHandler
, YesodAloha (..)
, splitTitle
, titleForm
) where
import FormatHandler
import Text.Julius (julius)
import Text.HTML.SanitizeXSS (sanitizeBalance)
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import Yesod.Core
import Yesod.Form
import Yesod.Form.Jquery
import Text.Lucius (lucius)
import Control.Monad.Trans.Class (lift)
import Control.Monad.IO.Class (liftIO)
import Text.Hamlet (shamlet)
import Data.Maybe (listToMaybe, mapMaybe)
import Text.Blaze (preEscapedText)
import qualified Data.Set as Set
import qualified Data.Text.Lazy.Encoding as TLE
import Data.Text.Encoding.Error (lenientDecode)
import qualified Data.ByteString.Lazy as L
import Data.Enumerator (enumList)
import Text.HTML.TagSoup
import Control.Arrow ((***))
import Control.Applicative ((<$>), (<*>))
import Text.Blaze (Html)
splitTitle :: T.Text -> (Maybe T.Text, T.Text)
splitTitle t =
case T.stripPrefix "Title: " t of
Just rest ->
let (title, rest') = T.break (== '\n') rest
in (Just title, T.drop 1 rest')
Nothing -> (Nothing, t)
joinTitle :: (a -> T.Text) -> Maybe T.Text -> a -> T.Text
joinTitle unwrap Nothing t = unwrap t
joinTitle unwrap (Just a) t = T.concat ["Title: ", a, "\n", unwrap t]
titleForm :: RenderMessage master FormMessage
=> Field sub master a
-> (T.Text -> a)
-> (a -> T.Text)
-> GWidget sub master ()
-> Maybe T.Text
-> Html
-> Form sub master (FormResult T.Text, GWidget sub master ())
titleForm field wrap unwrap extraWidget mt =
(fmap . fmap) (\(a, b) -> (a, b >> extraWidget))
$ renderTable $ joinTitle unwrap
<$> aopt textField "Title" (mtitle :: Maybe (Maybe T.Text))
<*> areq field "Content" (fmap wrap content)
where
(mtitle, content) = maybe (Nothing, Nothing) ((Just *** Just) . splitTitle) mt
htmlFormatHandler :: (YesodAloha master, YesodJquery master) => FormatHandler master
htmlFormatHandler = FormatHandler
{ fhExts = Set.singleton "html"
, fhName = "HTML"
, fhForm = titleForm alohaHtmlField id id (return ())
, fhWidget = widget
, fhFilter = Just . enumList 8 . L.toChunks . TLE.encodeUtf8 . TL.fromStrict . sanitizeBalance . TL.toStrict . TLE.decodeUtf8With lenientDecode
, fhRefersTo = const $ const $ return []
, fhTitle = \sm uri -> fmap (fst . splitTitle) $ liftIO $ uriToText sm uri
, fhFlatWidget = widget
, fhToText = \sm uri -> fmap (Just . plain) $ liftIO $ uriToText sm uri
, fhExtraParents = \_ _ -> return []
}
where
widget sm uri = do
t <- fmap (snd . splitTitle) $ liftIO $ uriToText sm uri
toWidget $ preEscapedText t
plain = T.concat . mapMaybe plain' . parseTags
plain' (TagText t) = Just t
plain' _ = Nothing
class YesodAloha a where
urlAloha :: a -> Either (Route a) T.Text
urlAlohaPlugins :: a -> [Either (Route a) T.Text]
alohaHtmlField :: (YesodAloha master, YesodJquery master) => Field sub master T.Text
alohaHtmlField = Field
{ fieldParse = return . Right . fmap sanitizeBalance . listToMaybe
, fieldView = \theId name val _isReq -> do
y <- lift getYesod
addScriptEither $ urlJqueryJs y
addScriptEither $ urlAloha y
mapM_ addScriptEither $ urlAlohaPlugins y
toWidget [shamlet|
<div ##{theId}-container>
<textarea ##{theId} name=#{name}>#{showVal val}
|]
toWidget [julius|$(function(){$("##{theId}").aloha();})|]
toWidget [lucius|##{theId}-container { width: 800px; height: 400px; overflow: auto }|]
}
where
showVal = either id id
| snoyberg/yesodcms | FormatHandler/Html.hs | bsd-2-clause | 3,767 | 0 | 15 | 836 | 1,244 | 677 | 567 | 88 | 2 |
module Propellor.Property.Network where
import Propellor
import Propellor.Property.File
type Interface = String
ifUp :: Interface -> Property NoInfo
ifUp iface = cmdProperty "ifup" [iface]
-- | Resets /etc/network/interfaces to a clean and empty state,
-- containing just the standard loopback interface, and with
-- interfacesD enabled.
--
-- This can be used as a starting point to defining other interfaces.
--
-- No interfaces are brought up or down by this property.
cleanInterfacesFile :: Property NoInfo
cleanInterfacesFile = hasContent interfacesFile
[ "# Deployed by propellor, do not edit."
, ""
, "source-directory interfaces.d"
, ""
, "# The loopback network interface"
, "auto lo"
, "iface lo inet loopback"
]
`describe` ("clean " ++ interfacesFile)
-- | Configures an interface to get its address via dhcp.
dhcp :: Interface -> Property NoInfo
dhcp iface = hasContent (interfaceDFile iface)
[ "auto " ++ iface
, "iface " ++ iface ++ " inet dhcp"
]
`describe` ("dhcp " ++ iface)
`requires` interfacesDEnabled
-- | Writes a static interface file for the specified interface.
--
-- The interface has to be up already. It could have been brought up by
-- DHCP, or by other means. The current ipv4 addresses
-- and routing configuration of the interface are written into the file.
--
-- If the interface file already exists, this property does nothing,
-- no matter its content.
--
-- (ipv6 addresses are not included because it's assumed they come up
-- automatically in most situations.)
static :: Interface -> Property NoInfo
static iface = check (not <$> doesFileExist f) setup
`describe` desc
`requires` interfacesDEnabled
where
f = interfaceDFile iface
desc = "static " ++ iface
setup = property desc $ do
ls <- liftIO $ lines <$> readProcess "ip"
["-o", "addr", "show", iface, "scope", "global"]
stanzas <- liftIO $ concat <$> mapM mkstanza ls
ensureProperty $ hasContent f $ ("auto " ++ iface) : stanzas
mkstanza ipline = case words ipline of
-- Note that the IP address is written CIDR style, so
-- the netmask does not need to be specified separately.
(_:iface':"inet":addr:_) | iface' == iface -> do
gw <- getgateway
return $ catMaybes
[ Just $ "iface " ++ iface ++ " inet static"
, Just $ "\taddress " ++ addr
, ("\tgateway " ++) <$> gw
]
_ -> return []
getgateway = do
rs <- lines <$> readProcess "ip"
["route", "show", "scope", "global", "dev", iface]
return $ case words <$> headMaybe rs of
Just ("default":"via":gw:_) -> Just gw
_ -> Nothing
-- | 6to4 ipv6 connection, should work anywhere
ipv6to4 :: Property NoInfo
ipv6to4 = hasContent (interfaceDFile "sit0")
[ "# Deployed by propellor, do not edit."
, "iface sit0 inet6 static"
, "\taddress 2002:5044:5531::1"
, "\tnetmask 64"
, "\tgateway ::192.88.99.1"
, "auto sit0"
]
`describe` "ipv6to4"
`requires` interfacesDEnabled
`onChange` ifUp "sit0"
interfacesFile :: FilePath
interfacesFile = "/etc/network/interfaces"
-- | A file in the interfaces.d directory.
interfaceDFile :: Interface -> FilePath
interfaceDFile iface = "/etc/network/interfaces.d" </> iface
-- | Ensures that files in the the interfaces.d directory are used.
interfacesDEnabled :: Property NoInfo
interfacesDEnabled = containsLine interfacesFile "source-directory interfaces.d"
`describe` "interfaces.d directory enabled"
| shosti/propellor | src/Propellor/Property/Network.hs | bsd-2-clause | 3,366 | 38 | 18 | 612 | 716 | 393 | 323 | 65 | 3 |
module WebToInk.Converter.ConverterService ( prepareKindleGeneration
, getTitle
, getMobi
) where
import System.Directory (createDirectoryIfMissing, getDirectoryContents)
import System.IO (writeFile)
import System.IO.Temp (createTempDirectory)
import System.Cmd (rawSystem)
import System.Exit (ExitCode (..))
import System.Posix.Files (setFileMode, unionFileModes, ownerModes, otherExecuteMode)
import System.FilePath(combine, takeExtension, (<.>))
import Data.Char (isAscii)
import Data.List (isPrefixOf, nub)
import Data.Functor ((<$>))
import Control.Applicative((<*>))
import Control.Exception (throwIO, try, Exception)
import qualified Data.ByteString.Char8 as C
import WebToInk.Converter.HtmlPages
import WebToInk.Converter.Images (getImages)
import WebToInk.Converter.Download (downloadPage, savePage, downloadAndSaveImages, getSrcFilePath)
import WebToInk.Converter.OpfGeneration (generateOpf)
import WebToInk.Converter.TocGeneration (generateToc)
import WebToInk.Converter.Types
import WebToInk.Converter.Constants
import WebToInk.Converter.Exceptions
import WebToInk.Converter.Utils
import WebToInk.Converter.Logger
-- | Tries to download page at given url and resolve title.
-- If anything goes wrong an empty string is returned.
getTitle :: Url -> IO (Either String String)
getTitle url = do
logd $ "Getting title for: " ++ url
result <- try go :: (Exception a) => IO (Either a String)
case result of
Right title -> return $ Right title
Left exception -> handleException exception
where
go = do
maybeToc <- downloadPage url
logt "Downloaded page, resolving title"
return $ case maybeToc of
Just toc -> resolveTitle Nothing toc
Nothing -> ""
-- | Resolves page at url and all direct children.
-- Downloads all the pages and their images.
-- Then generates a .mobi file from it using the kindlegen tool
-- Finally it returns the path to the generated mobi file from which it can be downloaded.
getMobi :: Url -> String -> String -> FilePath -> IO (Either String FilePath)
getMobi url title author targetFolder = do
logd $ "Preparing " ++ title ++ " by " ++ author
result <- try go :: (Exception a) => IO (Either a FilePath)
case result of
Right fullFilePath -> return $ Right fullFilePath
Left exception -> handleException exception
where
go = do
path <- prepareKindleGeneration (Just title) (Just author) "en-us" url targetFolder
-- Allow all users to enter path and read from it since we want to make this available
-- TODO: handle the case where current user is not permitted to change permissions
-- setFileMode path $ unionFileModes ownerModes otherExecuteMode
let targetFile = filter isAscii title<.>"mobi"
runKindlegen targetFile path True
runKindlegen targetFile path firstTime = do
result <- rawSystem "kindlegen" [ "-o", targetFile, combine path "book.opf" ]
case result of
ExitSuccess -> return (combine path targetFile)
-- In case of warnings (1) we are ok
ExitFailure 1 -> return (combine path targetFile)
-- In case of problems related to javascript (2) remove it from all pages and try again
ExitFailure 2 -> if firstTime
then removeJavaScriptsAndTryAgain targetFile path
else throwIO $ KindlegenException 2
-- All others are problematic and need to be raised
ExitFailure code -> throwIO $ KindlegenException code
removeJavaScriptsAndTryAgain targetFile path = do
htmlFiles <- fmap getHtmlFilePaths . getDirectoryContents $ pagesFullPath
mapM_ removeScriptsFromFileAndSave htmlFiles
runKindlegen targetFile path False
where
removeScriptsFromFileAndSave fullPath = removeScriptsFromFile fullPath >>= saveContentsToFile fullPath
removeScriptsFromFile = fmap (removeScripts . C.unpack) . C.readFile
saveContentsToFile fullPath = C.writeFile fullPath . C.pack
getHtmlFilePaths = map (combine pagesFullPath) . filter isHtmlFile
pagesFullPath = combine path pagesFolder
isHtmlFile file = let extension = takeExtension file
in extension == ".html" || extension == ".htm"
main = testConverter
testLogger = do
initLogger "debug" (Just "./debug.log")
logi "hello world"
logd "hello world"
logt "hello world"
loge "hello world"
logw "hello world"
testConverter = do
initLogger "debug" (Just "./debug.log")
result <- getMobi url title author targetFolder
case result of
Right filePath -> logi $ "Success: " ++ filePath
Left error -> loge $ "Error: " ++ error
return ()
where
url = "http://static.springsource.org/spring/docs/current/spring-framework-reference/html/overview.html"
title = "Spring"
author = "Team"
targetFolder = "../books"
prepareKindleGeneration :: Maybe String -> Maybe String -> String -> Url -> FilePath -> IO FilePath
prepareKindleGeneration maybeTitle maybeAuthor language tocUrl folder = do
logd $ "Getting pages from: " ++ tocUrl
maybeGetHtmlPagesResult <- getHtmlPages tocUrl
case maybeGetHtmlPagesResult of
Just result -> logd ("Got pages, creating webtoink temp directory at: " ++ folder)
>> createTempDirectory folder "webtoink" >>= prepare result
Nothing -> loge "Could not download table of contents and processed no html pages"
>> throwIO TableOfContentsCouldNotBeDownloadedException
where
prepare (GetHtmlPagesResult tocContent pagesDic) targetFolder = do
let author = resolveAuthor maybeAuthor tocContent
let title = resolveTitle maybeTitle tocContent
let topPagesDic = filter (isTopLink . fst) pagesDic
let topPages = map fst topPagesDic
logd $ "Preparing for kindlegen " ++ "(Author: " ++ show author ++ "Title: " ++ show title ++ ")"
logt $ prettifyList ", " topPagesDic
createKindleStructure title author topPagesDic topPages targetFolder
where
correctFolder targetFolder (filePath, url) = (combine targetFolder filePath, url)
createKindleStructure title author topPagesDic topPages targetFolder = do
logd $ "created temp folder" ++ show targetFolder
logd "Starting to download pages"
result <- downloadPages tocUrl topPagesDic targetFolder
let failedFileNames = map piFileName $ failedPages result
let goodTopPages = filter (`notElem` failedFileNames) topPages
logt $ "Successfully downloaded: " ++ (prettifyList ", " goodTopPages)
logt $ "Failed to download: " ++ (prettifyList ", " failedFileNames)
logd "Generating book.opf"
let opfString = generateOpf goodTopPages (allImageUrls result) title language author
writeFile (combine targetFolder "book.opf") opfString
logd "Generating toc.ncx"
let tocString = generateToc goodTopPages title language author
writeFile (combine targetFolder "toc.ncx") tocString
return targetFolder
downloadPages :: Url -> [(FilePath, Url)] -> FilePath -> IO DownloadPagesResult
downloadPages tocUrl topPagesDic targetFolder = do
let rootUrl = getRootUrl tocUrl
downloadResults <- mapM (\(fileName, pageUrl) ->
tryProcessPage (PageInfo rootUrl pageUrl fileName) targetFolder) topPagesDic
let uniqueImageUrls =
map (getSrcFilePath "") . nub . concatMap allImageUrls $ downloadResults
let allFailedPages = concatMap failedPages downloadResults
return $ DownloadPagesResult uniqueImageUrls allFailedPages
tryProcessPage :: PageInfo -> FilePath -> IO DownloadPagesResult
tryProcessPage pi targetFolder = do
maybePageContents <- downloadPage (piPageUrl pi)
case maybePageContents of
Just pageContents -> do
imageUrls <- processPage pi pageContents targetFolder
return $ DownloadPagesResult imageUrls []
Nothing -> return $ DownloadPagesResult [] [pi]
processPage :: PageInfo -> PageContents -> FilePath -> IO [String]
processPage pi pageContents targetFolder = do
let imageUrls = (filter (not . ("https:" `isPrefixOf`)) . getImages) pageContents
downloadAndSaveImages targetFolder (piRootUrl pi) (piPageUrl pi) imageUrls
let adaptedPageContents = cleanAndLocalize imageUrls pageContents
savePage targetFolder (piFileName pi) adaptedPageContents
return imageUrls
cleanAndLocalize :: [Url] -> PageContents -> PageContents
cleanAndLocalize imageUrls pageContents =
removeBaseHref . localizeSrcUrls ("../" ++ imagesFolder) imageUrls $ pageContents
prettifyList :: Show a => String -> [a] -> String
prettifyList delim = foldr ((++) . (++) delim . show) ""
handleException exception = do
let exceptionInfo = getExceptionInfo exception
loge (fst exceptionInfo)
return $ Left (snd exceptionInfo)
where
getExceptionInfo exception =
case exception of
TableOfContentsCouldNotBeDownloadedException -> ( "TableOfContentsCouldNotBeDownloadedException."
, "Could not download page. Please check the url and/or make sure that the server is available.")
ex@(KindlegenException code) -> ( show ex
, "The kindlegen tool was unable to convert the page. Please try another format.")
ex -> ( "Unknown Exception: " ++ show ex
, "An unexcpected error occured. Please try again later.")
| thlorenz/WebToInk | webtoink-converter/WebToInk/Converter/ConverterService.hs | bsd-2-clause | 10,328 | 0 | 16 | 2,905 | 2,233 | 1,106 | 1,127 | 163 | 6 |
{-# LANGUAGE OverloadedStrings, DuplicateRecordFields #-}
{- Example "data-point" from a "daily" result:
"time":1475564400,
"summary":"Partly cloudy until evening.",
"icon":"partly-cloudy-day",
"sunriseTime":1475590177,
"sunsetTime":1475632150,
"moonPhase":0.12,
"precipIntensity":0,
"precipIntensityMax":0,
"precipProbability":0,
"temperatureMin":55.22,
"temperatureMinTime":1475647200,
"temperatureMax":68.38,
"temperatureMaxTime":1475622000,
"apparentTemperatureMin":55.22,
"apparentTemperatureMinTime":1475647200,
"apparentTemperatureMax":68.38,
"apparentTemperatureMaxTime":1475622000,
"dewPoint":51.8,
"humidity":0.75,
"windSpeed":6.3,
"windBearing":311,
"visibility":10,
"cloudCover":0.42,
"pressure":1016,
"ozone":290.35
-}
module WeatherPoint (
CurrentWeather(..),
WeatherPoint(..)
) where
import Data.Text (Text)
import Data.Aeson
import Data.ByteString.Lazy
data WeatherPoint = WeatherPoint {
summary :: Text,
icon :: Text,
temperatureMin :: Double, -- C or F
temperatureMax :: Double, -- C or F
humidity :: Double, -- 0-1, percentage
precipProbability :: Double -- 0-1, percentage
} deriving (Eq, Show)
instance FromJSON WeatherPoint where
parseJSON (Object v) =
WeatherPoint
<$> v .: "summary"
<*> v .: "icon"
<*> v .: "temperatureMin"
<*> v .: "temperatureMax"
<*> v .: "humidity"
<*> v .: "precipProbability"
parseJSON _ = mempty
data CurrentWeather = CurrentWeather {
summary :: Text,
icon :: Text,
temperature :: Double, -- C or F
humidity :: Double, -- 0-1, percentage
precipProbability :: Double -- 0-1, percentage
} deriving (Eq, Show)
instance FromJSON CurrentWeather where
parseJSON (Object v) =
CurrentWeather
<$> v .: "summary"
<*> v .: "icon"
<*> v .: "temperature"
<*> v .: "humidity"
<*> v .: "precipProbability"
parseJSON _ = mempty
sample :: ByteString
sample = "{\"time\":1475650800,\"summary\":\"Clear throughout the day.\",\"icon\":\"clear-day\",\"sunriseTime\":1475676631,\"sunsetTime\":1475718459,\"moonPhase\":0.15,\"precipIntensity\":0,\"precipIntensityMax\":0,\"precipProbability\":0,\"temperatureMin\":54.79,\"temperatureMinTime\":1475676000,\"temperatureMax\":70.74,\"temperatureMaxTime\":1475704800,\"apparentTemperatureMin\":54.79,\"apparentTemperatureMinTime\":1475676000,\"apparentTemperatureMax\":70.74,\"apparentTemperatureMaxTime\":1475704800,\"dewPoint\":49.47,\"humidity\":0.68,\"windSpeed\":8.4,\"windBearing\":315,\"visibility\":9.36,\"cloudCover\":0.07,\"pressure\":1017.49,\"ozone\":310.41}"
| jasonkuhrt/weather | source/WeatherPoint.hs | bsd-3-clause | 2,782 | 0 | 17 | 547 | 314 | 182 | 132 | 43 | 1 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, FlexibleContexts #-}--, OverlappingInstances #-}
module Language.SPL.Analyzer where
import Language.SPL.Program
import Language.SPL.Position
--import Language.SPL.Environment
type Error = (Position,String)
type Errors = [Error]
--report :: (Show a, MonadWriter Errors m) => a -> String -> m ()
--report p m = tell $ [show p ++ ": " ++ m]
--warn
--error
--inform
ask :: Name -> Type
ask = undefined
info :: (MonadReader Environment m) => Name -> m Info
info n = asks (M.lookup n)
class Analysable a where
-- | Deeply analyse types and usage of object
analyse :: a -> Bool
-- | Checks if Type matches the type of the object
match :: Type -> a -> Bool
instance (Analysable a) => Analysable [a] where --Holds for Blocks and Programs
--
analyse = and . map analyse
-- A Block matches a certain type if all the Return statements evaluate to the same type.
match t = and . map (match t)
instance Analysable Construct where
-- For a Declaration we only have to check the give type matches the initialization expression.
analyse (Declaration t n e) = match t e
-- For a Definition we have to check if the return type matches the types of all the Return expressions in the Block
-- After that continue analyzing the Block.
analyse (Definition t n ps cs bs) = match t bs && analyse bs
-- Constructs match any type.
match _ _ = True
instance Analysable Statement where
-- We analyse all the components of a given Statement.
analyse (Return _) = True
analyse (Assign n e) = match (ask n) e
analyse (If c ts es) = match BOOL c && analyse ts && analyse es
analyse (While c ls) = match BOOL c && analyse ls
analyse (Execute n as) = length ps == length as && and (zipWith match ps as) -- Warning if return value not used?
where ps = [ask n] --parameters $ ask n
-- Statement matches are only used to ensure the types of the Return statements.
-- We go on recursively checking the Blocks of If and While statements.
match VOID (Return Nothing) = True
match t (Return (Just e)) = match t e
match t (If _ ts es) = match t ts && match t es
match t (While _ ls) = match t ls
match _ _ = True
instance Analysable Expression where
-- Not implemented, probably not needed.
analyse (Call n as) = length ps == length as && and (zipWith match ps as)
where ps = [ask n]
analyse _ = True
--match (Poly a) _ = True
match (INT) (Integer _) = True
match (BOOL) (Boolean _) = True
match (LIST _) (Nil) = True
match (PAIR t s) (Pair x y) = match t x && match s y
match t (Value n) = t == ask n
match t (Call n as) = t == ask n && analyse (Call n as)
match t (Infix o l r) = match t o && match t l && match t r
match t (Prefix o e) = match t o && match t e
match t _ = False
instance Analysable BinaryOperator where
analyse = undefined
match (INT) o = o `elem` [Add, Sub, Mul, Div, Mod]
match (BOOL) o = o `elem` [Eq, Ne, Lt, Gt, Le, Ge, And, Or]
match (LIST _) o = o `elem` [Cons]
instance Analysable UnaryOperator where
analyse = undefined
match (BOOL) o = o `elem` [Not, Neg]
parameters :: Construct -> [Type]
parameters (Definition _ _ ps _ _) = map extract ps
where extract (Parameter t _) = t
parameters _ = [] -- error!
| timjs/spl-compiler | Old/Analyzer.hs | bsd-3-clause | 3,466 | 0 | 10 | 937 | 1,097 | 573 | 524 | 57 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
data Binary = Bin Binary Binary | Tip
deriving Show
louds :: Binary -> [Bool]
louds binary = True : encode binary
where
encode :: Binary -> [Bool]
encode (Bin l r) = True : (encode l) ++ (encode r)
encode Tip = [False]
rank :: Eq a => a -> [a] -> Int -> Int
rank _ _ 0 = 0
rank a (x:xs) i | a == x = 1 + (rank a xs (i - 1))
rank a (_:xs) i = 0 + (rank a xs (i - 1))
rank _ [] _ = 0
select :: Eq a => a -> [a] -> Int -> Int
select _ _ 0 = -1
select a (x:xs) i | a == x = 1 + (select a xs (i - 1))
select a (_:xs) i = 1 + (select a xs (i - 0))
select _ [] _ = 0
-- Jacobson encoding
main :: IO ()
main = return ()
| haskell-works/succinct-playground | app/Main.hs | bsd-3-clause | 836 | 0 | 10 | 282 | 421 | 220 | 201 | 23 | 2 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-|
Module : Text.XML.Xleb
Description : The Xleb XML-parsing monad
Copyright : (c) Getty Ritter, 2017
License : BSD
Maintainer : Getty Ritter <[email protected]>
Stability : experimental
The 'Xleb' monad (and the corresponding 'XlebT' monad transformer) is
a monadic sublanguage for easily parsing XML structures.
This module is intended to be imported qualified, to avoid name
clashes with 'Prelude' functions. e.g.
> import qualified Text.XML.Xleb as X
-}
module Text.XML.Xleb
( -- * How To Use 'Xleb'
-- $use
-- * The 'Xleb' monad
Xleb
, runXleb
-- ** The 'XlebT' monad transformer
, XlebT
, runXlebT
-- * Errors
, XlebError(..)
, errorString
-- * Element Structure
, elem
, attr
, contents
, rawElement
, child
, children
-- * Parsing contained string data
, Parse
, string
, number
, reader
-- * Selecting Elements
, Selector
, byTag
, any
) where
import Prelude hiding (any, elem)
import Control.Applicative (Alternative(..))
import qualified Control.Monad.Fail as M
import qualified Control.Monad.Except as M
import qualified Control.Monad.Reader as M
import qualified GHC.Exts as GHC
import qualified Data.Functor.Identity as M
import qualified Text.XML.Light as XML
-- | The 'XlebT' monad transformer describes a computation used to
-- parse a fragment of XML from a particular element of an XML
-- structure. This may fail with an error, or it may produce a value.
newtype XlebT m a =
Xleb (M.ReaderT XML.Element (M.ExceptT XlebError m) a)
deriving (Functor, Applicative, Monad, Alternative)
-- | The 'Xleb' monad describes a computation used to parse a fragment
-- of XML from a particular element of an XML structure. This may fail
-- with an error, or it may produce a value.
type Xleb a = XlebT M.Identity a
-- | The 'XlebError' type describes the various errors that can occur
-- in the course of parsing an XML structure. If you simply want the
-- human-readable string that corresponds to your error, then use the
-- 'errorString' function.
data XlebError
= XEInElem String XlebError
-- ^ Describes the element context in which an error occurred
| XEInAttr String XlebError
-- ^ Describes the attribute context in which an error occurred
| XEParseFailure String
-- ^ Some parser function was unable to produce a value from the
-- string embedded in an XML element
| XENoSuchAttribute String
-- ^ A 'XlebT' computation required an attribute that wasn't
-- found in the specified element.
| XEUnexpectedElement String String
-- ^ A 'XlebT' computation expected one element but found another
| XENoMatchingElement Selector
-- ^ A 'XlebT' computation used a selector which did not
-- successfully describe any child elements
| XEAmbiguousElement Selector
-- ^ A 'XlebT' computation used a selector as though it would
-- unambiguously name a single child, but instead multiple child
-- elements matched the selector
| XEBadXML
-- ^ The "xml" library was unable to parse the document as XML.
| XOtherError String
-- ^ Another error occurred which was not described by the above
-- constructors
deriving (Eq, Show)
instance Monoid XlebError where
mappend x _ = x
mempty = XOtherError "unknown error"
-- | Convert a 'XlebError' value to the corresponding human-readable
-- string.
errorString :: XlebError -> String
errorString = gatherContext ""
where gatherContext ctx (XEInElem el err) =
gatherContext (ctx ++ el ++ "/") err
gatherContext ctx (XEInAttr at err) =
gatherContext (ctx ++ "[@" ++ at ++ "]") err
gatherContext ctx err =
ctx ++ ": " ++ showError err
showError (XEParseFailure err) = err
showError XEBadXML =
"Unable to parse input string as XML"
showError (XENoSuchAttribute str) =
"No attribute called '" ++ str ++ "'"
showError (XEUnexpectedElement e1 e2) =
"Unexpected element " ++ e1 ++ "; expected " ++ e2
showError (XENoMatchingElement sel) =
"No elements were found maching selector " ++ show sel
showError (XEAmbiguousElement sel) =
"Multiple elements matched the selector " ++ show sel
showError (XOtherError str) = str
showError (XEInElem _ _) = error "[unexpected]"
showError (XEInAttr _ _) = error "[unexpected]"
instance Monad m => M.MonadFail (XlebT m) where
fail = Xleb . M.throwError . XOtherError
-- | A value of type @'Parse' t@ is a function that can either produce
-- a value of type @t@ or fail with a string message.
type Parse t = String -> Either String t
-- | A 'Selector' represents some criteria by which child elements are
-- matched.
data Selector
= SelByName String
| SelByNS String
| SelBoth Selector Selector
| SelAny
deriving (Eq, Show)
instance Monoid Selector where
mempty = SelAny
mappend = SelBoth
instance GHC.IsString Selector where
fromString = SelByName
toPred :: Selector -> XML.Element -> Bool
toPred SelAny _ = True
toPred (SelByName n) el =
XML.showQName (XML.elName el) == n
toPred (SelByNS n) el =
case XML.qPrefix (XML.elName el) of
Nothing -> False
Just p -> p == n
toPred (SelBoth s1 s2) el =
toPred s1 el && toPred s2 el
-- | Find an attribute on the current focus element and parse it to a
-- value of type @t@. If the parse function fails, then this will fail
-- with 'XEParseFailure'.
attr :: Monad m => String -> Parse t -> XlebT m t
attr name parser = Xleb $ do
el <- M.ask
case XML.findAttr (XML.unqual name) el of
Nothing -> M.throwError (XENoSuchAttribute name)
Just a -> case parser a of
Left err -> M.throwError (XEInAttr name (XEParseFailure err))
Right x -> return x
-- | Take the string content of the current element and parse it to a
-- value of type @t@. If the parse function fails, then this will fail
-- with 'XEParseFailure'.
contents :: Monad m => Parse t -> XlebT m t
contents parser = Xleb $ do
cnt <- XML.strContent `fmap` M.ask
case parser cnt of
Left err -> M.throwError (XEParseFailure err)
Right x -> return x
-- | Access the raw underlying XML element that we are
-- processing. This is sometimes necessary for working with free-form
-- XML data.
rawElement :: Monad m => XlebT m XML.Element
rawElement = Xleb M.ask
-- | Use a 'Selector' that unambiguously identifies a single child
-- element of the current element and then parse it according to a
-- given 'XlebT' computation focused on that element. If no child
-- matches the provided 'Selector', then this will fail with
-- 'XENoMatchingElement'. If multiple children match the provided
-- 'Selector', then this will fail with 'XEAmbiguousElement'.
child :: Monad m => Selector -> XlebT m t -> XlebT m t
child sel (Xleb mote) = Xleb $ do
cld <- XML.filterChildren (toPred sel) `fmap` M.ask
case cld of
[] -> M.throwError (XENoMatchingElement sel)
[x] -> M.local (const x) mote
_ -> M.throwError (XEAmbiguousElement sel)
-- | Use a 'Selector' that identifies some child elements of the
-- current element and parse each according to a given 'XlebT'
-- computation, which will be repeated with focus on each child
-- element, and returning the resulting values as a list. If no child
-- elements match the 'Selector', then this will return an empty list.
children :: Monad m => Selector -> XlebT m t -> XlebT m [t]
children sel (Xleb mote) = Xleb $ do
cld <- XML.filterChildren (toPred sel) `fmap` M.ask
sequence [ M.local (const x) mote | x <- cld ]
-- | A 'Parse' function that parses numeric values according to their
-- Haskell 'Read' instance.
number :: (Read n, Num n) => Parse n
number = Right . read
-- | A 'Parse' function that accepts arbitrary string input without
-- failing.
string :: Parse String
string = Right
-- | A 'Parse' function that parses Haskell values according to their
-- 'Read' instance.
reader :: Read a => Parse a
reader = Right . read
-- | Creates a 'Selector' which expects an exact tag name.
byTag :: String -> Selector
byTag = SelByName
-- | Creates a 'Selector' which expects a specific namespace
byNamespace :: String -> Selector
byNamespace = SelByNS
-- | Creates a 'Selector' which matches any possible child element.
any :: Selector
any = SelAny
-- | @'elem' n t@ will ensure that the currently focused element is a
-- tag named @n@ and will then evaluate it using the computation
-- @t@. This will fail with 'XEUnexpectedElement' if the tag is named
-- something else.
elem :: Monad m => String -> XlebT m t -> XlebT m t
elem name (Xleb mote) = Xleb $ do
el <- M.ask
case el of
XML.Element { XML.elName = qname }
| XML.showQName qname == name -> mote
| otherwise -> M.throwError
(XEUnexpectedElement (XML.showQName qname) name)
doXleb :: XML.Element -> XlebT m t -> m (Either XlebError t)
doXleb el (Xleb mote) =
M.runExceptT (M.runReaderT mote el)
-- | Run a 'Xleb' computation over a string containing XML data,
-- producing either the resulting value or an error. If the XML data
-- contained in the argument string is invalid, then this will fail
-- with 'XEBadXML'.
runXleb :: String -> Xleb t -> Either XlebError t
runXleb raw xleb = case XML.parseXMLDoc raw of
Nothing -> Left XEBadXML
Just x -> M.runIdentity (doXleb x xleb)
-- | Run a 'XlebT' computation over a string containing XML data,
-- producing either the resulting monadic value or an error. If the
-- XML data contained in the argument string is invalid, then this
-- will fail with 'XEBadXML'.
runXlebT :: Monad m => String -> XlebT m t -> m (Either XlebError t)
runXlebT raw xleb = case XML.parseXMLDoc raw of
Nothing -> return (Left XEBadXML)
Just x -> doXleb x xleb
{- $use
The 'Xleb' monad describes both parsing /and/ traversing a given XML
structure: several of the functions to produce 'Xleb' computations
take other 'Xleb' computations, which are run on various sub-parts of
the XML tree. Consequently, instead of decomposing an XML structure
and passing it around to various functions, the 'Xleb' language treats
"the current location in the tree" as an implicit piece of data in the
'Xleb' monad.
You will generally want to identify your root note with the 'elem'
function to ensure that your root note has the tag you
expect. Children of that node can be accessed using the 'child' or
'children' function to either unambiguously find a specific child
element, or to find all child elements that match a given selector and
apply a 'Xleb' computation to each of them.
@
a <- X.child (X.byTag "a") parseA
b <- X.children (X.byTag "b") parseB
@
Leaf data tends to come in two forms in XML: attribute values (like
@\<tag attr="value"\>@) or tag content (like
@\<tag\>value\<\/tag\>@). In both cases, the 'Xleb' functions allow
you to parse that content however you'd like by providing an arbitrary
function of type @'String' -> 'Either' 'String' a@. The "xleb" library
provides several built-in functions of this type for common
situations.
@
c <- X.attr "index" X.number
d <- X.contents X.string
@
Finally, the `Xleb` monad has `Alternative` instances which allow for
concise expression of optional values or multiple possibilities.
@
e \<- X.children X.any (parseA \<|\> parseB)
f \<- optional (X.attr "total" X.number)
@
Consequently, for an XML structure like the following:
@
\<feed\>
\<title\>Feed Name\<\/title\>
\<author\>Pierre Menard\<\/author\>
\<entry title="Entry 01"\>First Post\<\/entry\>
\<entry title="Entry 02"\>Second Post Post\<\/entry\>
\<\/feed\>
@
We can write a 'Xleb' computation which is capable of parsing this
structure in a handful of lines:
@
import Control.Applicative (optional)
import qualified Text.XML.Xleb as X
feed :: X.Xleb (String, Maybe String, [(String, String)])
feed = X.elem "feed" $ do
feedTitle <- X.child (X.byTag "title") $
X.contents X.string
feedAuthor <- optional $ X.child (X.byTag "author") $
X.contents X.string
feedEntries <- X.children (X.byTag "entry") entry
return (feedTitle, feedAuthor, feedEntries)
entry :: X.Xleb (String, String)
entry = (,) \<$\> X.attr "title" X.string \<*\> X.contents X.string
@
-}
| aisamanra/xleb | src/Text/XML/Xleb.hs | bsd-3-clause | 12,279 | 0 | 18 | 2,547 | 1,905 | 1,014 | 891 | 152 | 11 |
{-# LANGUAGE DeriveDataTypeable, RecordWildCards, TemplateHaskell, MagicHash #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
module System.Console.CmdArgs.Test.Implicit.Diffy where
import System.Console.CmdArgs
import System.Console.CmdArgs.Quote
import System.Console.CmdArgs.Test.Implicit.Util
data Diffy = Create {src :: Maybe FilePath, out :: FilePath}
| Diff {old :: FilePath, new :: FilePath, out :: FilePath}
deriving (Data,Typeable,Show,Eq)
outFlags x = x &= help "Output file" &= typFile
create = Create
{src = def &= help "Source directory" &= typDir
,out = outFlags "ls.txt"
} &= help "Create a fingerprint"
diff = Diff
{old = def &= typ "OLDFILE" &= argPos 0
,new = def &= typ "NEWFILE" &= argPos 1
,out = outFlags "diff.txt"
} &= help "Perform a diff"
mode = cmdArgsMode $ modes [create,diff] &= help "Create and compare differences" &= program "diffy" &= summary "Diffy v1.0"
$(cmdArgsQuote
[d|
outFlags_ x = x &=# help "Output file" &=# typFile
create_ = Create
{src = Nothing &=# help "Source directory" &=# typDir
,out = outFlags_ "ls.txt"
} &=# help "Create a fingerprint"
diff_ = Diff
{old = "" &=# typ "OLDFILE" &=# argPos 0
,new = "" &=# typ "NEWFILE" &=# argPos 1
,out = outFlags_ "diff.txt"
} &=# help "Perform a diff"
mode_ = cmdArgsMode# $ modes# [create_,diff_] &=# help "Create and compare differences" &=# program "diffy" &=# summary "Diffy v1.0"
|])
-- STOP MANUAL
test = do
let Tester{..} = testers "Diffy" [mode,mode_]
fails []
isHelp ["--help"] ["diffy [COMMAND] ... [OPTIONS]"] -- FIXME: Should know that root is not valid, thus no brackets on [COMMAND]
isHelp ["create","--help"] []
isHelp ["diff","--help"] []
isHelpNot ["--help"] ["diffy"]
isVersion ["--version"] "Diffy v1.0"
isVersion ["--numeric-version"] "1.0"
["create"] === create
fails ["create","file1"]
fails ["create","--quiet"]
fails ["create","--verbose"]
isVerbosity ["create"] Normal
["create","--src","x"] === create{src=Just "x"}
["create","--src","x","--src","y"] === create{src=Just "y"}
fails ["diff","--src","x"]
fails ["create","foo"]
["diff","foo1","foo2"] === diff{old="foo1",new="foo2"}
fails ["diff","foo1"]
fails ["diff","foo1","foo2","foo3"]
completion [] (0,0) [CompleteValue "create",CompleteValue "diff",CompleteValue "--out",CompleteValue "--help",CompleteValue "--version",CompleteValue "--numeric-version"]
completion ["d"] (0,1) [CompleteValue "diff"]
completion ["dd"] (0,2) []
| ndmitchell/cmdargs | System/Console/CmdArgs/Test/Implicit/Diffy.hs | bsd-3-clause | 2,681 | 0 | 11 | 592 | 703 | 376 | 327 | 57 | 1 |
-- -----------------------------------------------------------------------------
-- Alex wrapper code.
--
-- This code is in the PUBLIC DOMAIN; you may copy it freely and use
-- it for any purpose whatsoever.
import Control.Applicative (Applicative (..))
import Data.Word (Word8)
#if defined(ALEX_BASIC_BYTESTRING) || defined(ALEX_POSN_BYTESTRING) || defined(ALEX_MONAD_BYTESTRING)
import qualified Data.Char
import qualified Data.ByteString.Lazy as ByteString
import qualified Data.ByteString.Internal as ByteString (w2c)
#elif defined(ALEX_STRICT_BYTESTRING)
import qualified Data.Char
import qualified Data.ByteString as ByteString
import qualified Data.ByteString.Internal as ByteString
import qualified Data.ByteString.Unsafe as ByteString
#else
import qualified Data.Bits
-- | Encode a Haskell String to a list of Word8 values, in UTF8 format.
utf8Encode :: Char -> [Word8]
utf8Encode = map fromIntegral . go . ord
where
go oc
| oc <= 0x7f = [oc]
| oc <= 0x7ff = [ 0xc0 + (oc `Data.Bits.shiftR` 6)
, 0x80 + oc Data.Bits..&. 0x3f
]
| oc <= 0xffff = [ 0xe0 + (oc `Data.Bits.shiftR` 12)
, 0x80 + ((oc `Data.Bits.shiftR` 6) Data.Bits..&. 0x3f)
, 0x80 + oc Data.Bits..&. 0x3f
]
| otherwise = [ 0xf0 + (oc `Data.Bits.shiftR` 18)
, 0x80 + ((oc `Data.Bits.shiftR` 12) Data.Bits..&. 0x3f)
, 0x80 + ((oc `Data.Bits.shiftR` 6) Data.Bits..&. 0x3f)
, 0x80 + oc Data.Bits..&. 0x3f
]
#endif
type Byte = Word8
-- -----------------------------------------------------------------------------
-- The input type
#if defined(ALEX_POSN) || defined(ALEX_MONAD) || defined(ALEX_GSCAN)
type AlexInput = (AlexPosn, -- current position,
Char, -- previous char
[Byte], -- pending bytes on current char
String) -- current input string
ignorePendingBytes :: AlexInput -> AlexInput
ignorePendingBytes (p,c,ps,s) = (p,c,[],s)
alexInputPrevChar :: AlexInput -> Char
alexInputPrevChar (p,c,bs,s) = c
alexGetByte :: AlexInput -> Maybe (Byte,AlexInput)
alexGetByte (p,c,(b:bs),s) = Just (b,(p,c,bs,s))
alexGetByte (p,c,[],[]) = Nothing
alexGetByte (p,_,[],(c:s)) = let p' = alexMove p c
(b:bs) = utf8Encode c
in p' `seq` Just (b, (p', c, bs, s))
#endif
#if defined(ALEX_POSN_BYTESTRING) || defined(ALEX_MONAD_BYTESTRING)
type AlexInput = (AlexPosn, -- current position,
Char, -- previous char
ByteString.ByteString) -- current input string
ignorePendingBytes :: AlexInput -> AlexInput
ignorePendingBytes i = i -- no pending bytes when lexing bytestrings
alexInputPrevChar :: AlexInput -> Char
alexInputPrevChar (p,c,s) = c
alexGetByte :: AlexInput -> Maybe (Byte,AlexInput)
alexGetByte (p,_,cs) | ByteString.null cs = Nothing
| otherwise = let b = ByteString.head cs
cs' = ByteString.tail cs
c = ByteString.w2c b
p' = alexMove p c
in p' `seq` cs' `seq` Just (b, (p', c, cs'))
#endif
#ifdef ALEX_BASIC_BYTESTRING
type AlexInput = (Char,
ByteString.ByteString)
alexInputPrevChar :: AlexInput -> Char
alexInputPrevChar (c,_) = c
alexGetByte (_, cs)
| ByteString.null cs = Nothing
| otherwise = Just (ByteString.head cs,
(ByteString.w2c $ ByteString.head cs,
ByteString.tail cs))
#endif
#ifdef ALEX_STRICT_BYTESTRING
data AlexInput = AlexInput { alexChar :: {-# UNPACK #-}!Char
, alexStr :: {-# UNPACK #-}!ByteString.ByteString }
alexInputPrevChar :: AlexInput -> Char
alexInputPrevChar = alexChar
alexGetByte (AlexInput _ cs)
| ByteString.null cs = Nothing
| otherwise = Just $! (ByteString.head cs, AlexInput c cs')
where
(c,cs') = (ByteString.w2c (ByteString.unsafeHead cs)
, ByteString.unsafeTail cs)
#endif
-- -----------------------------------------------------------------------------
-- Token positions
-- `Posn' records the location of a token in the input text. It has three
-- fields: the address (number of chacaters preceding the token), line number
-- and column of a token within the file. `start_pos' gives the position of the
-- start of the file and `eof_pos' a standard encoding for the end of file.
-- `move_pos' calculates the new position after traversing a given character,
-- assuming the usual eight character tab stops.
#if defined(ALEX_POSN) || defined(ALEX_MONAD) || defined(ALEX_POSN_BYTESTRING) || defined(ALEX_MONAD_BYTESTRING) || defined(ALEX_GSCAN)
data AlexPosn = AlexPn !Int !Int !Int
deriving (Eq,Show)
alexStartPos :: AlexPosn
alexStartPos = AlexPn 0 1 1
alexMove :: AlexPosn -> Char -> AlexPosn
alexMove (AlexPn a l c) '\t' = AlexPn (a+1) l (((c+7) `div` 8)*8+1)
alexMove (AlexPn a l c) '\n' = AlexPn (a+1) (l+1) 1
alexMove (AlexPn a l c) _ = AlexPn (a+1) l (c+1)
#endif
-- -----------------------------------------------------------------------------
-- Default monad
#ifdef ALEX_MONAD
data AlexState = AlexState {
alex_pos :: !AlexPosn, -- position at current input location
alex_inp :: String, -- the current input
alex_chr :: !Char, -- the character before the input
alex_bytes :: [Byte],
alex_scd :: !Int -- the current startcode
#ifdef ALEX_MONAD_USER_STATE
, alex_ust :: AlexUserState -- AlexUserState will be defined in the user program
#endif
}
-- Compile with -funbox-strict-fields for best results!
runAlex :: String -> Alex a -> Either String a
runAlex input (Alex f)
= case f (AlexState {alex_pos = alexStartPos,
alex_inp = input,
alex_chr = '\n',
alex_bytes = [],
#ifdef ALEX_MONAD_USER_STATE
alex_ust = alexInitUserState,
#endif
alex_scd = 0}) of Left msg -> Left msg
Right ( _, a ) -> Right a
newtype Alex a = Alex { unAlex :: AlexState -> Either String (AlexState, a) }
instance Functor Alex where
fmap f a = Alex $ \s -> case unAlex a s of
Left msg -> Left msg
Right (s', a') -> Right (s', f a')
instance Applicative Alex where
pure a = Alex $ \s -> Right (s, a)
fa <*> a = Alex $ \s -> case unAlex fa s of
Left msg -> Left msg
Right (s', f) -> case unAlex a s' of
Left msg -> Left msg
Right (s'', b) -> Right (s'', f b)
instance Monad Alex where
m >>= k = Alex $ \s -> case unAlex m s of
Left msg -> Left msg
Right (s',a) -> unAlex (k a) s'
return a = Alex $ \s -> Right (s,a)
alexGetInput :: Alex AlexInput
alexGetInput
= Alex $ \s@AlexState{alex_pos=pos,alex_chr=c,alex_bytes=bs,alex_inp=inp} ->
Right (s, (pos,c,bs,inp))
alexSetInput :: AlexInput -> Alex ()
alexSetInput (pos,c,bs,inp)
= Alex $ \s -> case s{alex_pos=pos,alex_chr=c,alex_bytes=bs,alex_inp=inp} of
s@(AlexState{}) -> Right (s, ())
alexError :: String -> Alex a
alexError message = Alex $ \s -> Left message
alexGetStartCode :: Alex Int
alexGetStartCode = Alex $ \s@AlexState{alex_scd=sc} -> Right (s, sc)
alexSetStartCode :: Int -> Alex ()
alexSetStartCode sc = Alex $ \s -> Right (s{alex_scd=sc}, ())
#ifdef ALEX_MONAD_USER_STATE
alexGetUserState :: Alex AlexUserState
alexGetUserState = Alex $ \s@AlexState{alex_ust=ust} -> Right (s,ust)
alexSetUserState :: AlexUserState -> Alex ()
alexSetUserState ss = Alex $ \s -> Right (s{alex_ust=ss}, ())
#endif
alexMonadScan = do
inp <- alexGetInput
sc <- alexGetStartCode
case alexScan inp sc of
AlexEOF -> alexEOF
AlexError ((AlexPn _ line column),_,_,_) -> alexError $ "lexical error at line " ++ (show line) ++ ", column " ++ (show column)
AlexSkip inp' len -> do
alexSetInput inp'
alexMonadScan
AlexToken inp' len action -> do
alexSetInput inp'
action (ignorePendingBytes inp) len
-- -----------------------------------------------------------------------------
-- Useful token actions
type AlexAction result = AlexInput -> Int -> Alex result
-- just ignore this token and scan another one
-- skip :: AlexAction result
skip input len = alexMonadScan
-- ignore this token, but set the start code to a new value
-- begin :: Int -> AlexAction result
begin code input len = do alexSetStartCode code; alexMonadScan
-- perform an action for this token, and set the start code to a new value
andBegin :: AlexAction result -> Int -> AlexAction result
(action `andBegin` code) input len = do alexSetStartCode code; action input len
token :: (AlexInput -> Int -> token) -> AlexAction token
token t input len = return (t input len)
#endif /* ALEX_MONAD */
-- -----------------------------------------------------------------------------
-- Monad (with ByteString input)
#ifdef ALEX_MONAD_BYTESTRING
data AlexState = AlexState {
alex_pos :: !AlexPosn, -- position at current input location
alex_inp :: ByteString.ByteString, -- the current input
alex_chr :: !Char, -- the character before the input
alex_scd :: !Int -- the current startcode
#ifdef ALEX_MONAD_USER_STATE
, alex_ust :: AlexUserState -- AlexUserState will be defined in the user program
#endif
}
-- Compile with -funbox-strict-fields for best results!
runAlex :: ByteString.ByteString -> Alex a -> Either String a
runAlex input (Alex f)
= case f (AlexState {alex_pos = alexStartPos,
alex_inp = input,
alex_chr = '\n',
#ifdef ALEX_MONAD_USER_STATE
alex_ust = alexInitUserState,
#endif
alex_scd = 0}) of Left msg -> Left msg
Right ( _, a ) -> Right a
newtype Alex a = Alex { unAlex :: AlexState -> Either String (AlexState, a) }
instance Monad Alex where
m >>= k = Alex $ \s -> case unAlex m s of
Left msg -> Left msg
Right (s',a) -> unAlex (k a) s'
return a = Alex $ \s -> Right (s,a)
alexGetInput :: Alex AlexInput
alexGetInput
= Alex $ \s@AlexState{alex_pos=pos,alex_chr=c,alex_inp=inp} ->
Right (s, (pos,c,inp))
alexSetInput :: AlexInput -> Alex ()
alexSetInput (pos,c,inp)
= Alex $ \s -> case s{alex_pos=pos,alex_chr=c,alex_inp=inp} of
s@(AlexState{}) -> Right (s, ())
alexError :: String -> Alex a
alexError message = Alex $ \s -> Left message
alexGetStartCode :: Alex Int
alexGetStartCode = Alex $ \s@AlexState{alex_scd=sc} -> Right (s, sc)
alexSetStartCode :: Int -> Alex ()
alexSetStartCode sc = Alex $ \s -> Right (s{alex_scd=sc}, ())
alexMonadScan = do
inp@(_,_,str) <- alexGetInput
sc <- alexGetStartCode
case alexScan inp sc of
AlexEOF -> alexEOF
AlexError ((AlexPn _ line column),_,_) -> alexError $ "lexical error at line " ++ (show line) ++ ", column " ++ (show column)
AlexSkip inp' len -> do
alexSetInput inp'
alexMonadScan
AlexToken inp'@(_,_,str') len action -> do
alexSetInput inp'
action (ignorePendingBytes inp) len
where
len = ByteString.length str - ByteString.length str'
-- -----------------------------------------------------------------------------
-- Useful token actions
type AlexAction result = AlexInput -> Int -> Alex result
-- just ignore this token and scan another one
-- skip :: AlexAction result
skip input len = alexMonadScan
-- ignore this token, but set the start code to a new value
-- begin :: Int -> AlexAction result
begin code input len = do alexSetStartCode code; alexMonadScan
-- perform an action for this token, and set the start code to a new value
andBegin :: AlexAction result -> Int -> AlexAction result
(action `andBegin` code) input len = do alexSetStartCode code; action input len
token :: (AlexInput -> Int -> token) -> AlexAction token
token t input len = return (t input len)
#endif /* ALEX_MONAD_BYTESTRING */
-- -----------------------------------------------------------------------------
-- Basic wrapper
#ifdef ALEX_BASIC
type AlexInput = (Char,[Byte],String)
alexInputPrevChar :: AlexInput -> Char
alexInputPrevChar (c,_,_) = c
-- alexScanTokens :: String -> [token]
alexScanTokens str = go ('\n',[],str)
where go inp@(_,_bs,s) =
case alexScan inp 0 of
AlexEOF -> []
AlexError _ -> error "lexical error"
AlexSkip inp' len -> go inp'
AlexToken inp' len act -> act (take len s) : go inp'
alexGetByte :: AlexInput -> Maybe (Byte,AlexInput)
alexGetByte (c,(b:bs),s) = Just (b,(c,bs,s))
alexGetByte (c,[],[]) = Nothing
alexGetByte (_,[],(c:s)) = case utf8Encode c of
(b:bs) -> Just (b, (c, bs, s))
[] -> Nothing
#endif
-- -----------------------------------------------------------------------------
-- Basic wrapper, ByteString version
#ifdef ALEX_BASIC_BYTESTRING
-- alexScanTokens :: String -> [token]
alexScanTokens str = go ('\n',str)
where go inp@(_,str) =
case alexScan inp 0 of
AlexEOF -> []
AlexError _ -> error "lexical error"
AlexSkip inp' len -> go inp'
AlexToken inp'@(_,str') _ act -> act (ByteString.take len str) : go inp'
where len = ByteString.length str - ByteString.length str'
#endif
#ifdef ALEX_STRICT_BYTESTRING
-- alexScanTokens :: String -> [token]
alexScanTokens str = go (AlexInput '\n' str)
where go inp@(AlexInput _ str) =
case alexScan inp 0 of
AlexEOF -> []
AlexError _ -> error "lexical error"
AlexSkip inp' len -> go inp'
AlexToken inp'@(AlexInput _ str') _ act -> act (ByteString.unsafeTake len str) : go inp'
where len = ByteString.length str - ByteString.length str'
#endif
-- -----------------------------------------------------------------------------
-- Posn wrapper
-- Adds text positions to the basic model.
#ifdef ALEX_POSN
--alexScanTokens :: String -> [token]
alexScanTokens str = go (alexStartPos,'\n',[],str)
where go inp@(pos,_,_,str) =
case alexScan inp 0 of
AlexEOF -> []
AlexError ((AlexPn _ line column),_,_,_) -> error $ "lexical error at line " ++ (show line) ++ ", column " ++ (show column)
AlexSkip inp' len -> go inp'
AlexToken inp' len act -> act pos (take len str) : go inp'
#endif
-- -----------------------------------------------------------------------------
-- Posn wrapper, ByteString version
#ifdef ALEX_POSN_BYTESTRING
--alexScanTokens :: ByteString -> [token]
alexScanTokens str = go (alexStartPos,'\n',str)
where go inp@(pos,_,str) =
case alexScan inp 0 of
AlexEOF -> []
AlexError ((AlexPn _ line column),_,_) -> error $ "lexical error at line " ++ (show line) ++ ", column " ++ (show column)
AlexSkip inp' len -> go inp'
AlexToken inp' len act -> act pos (ByteString.take (fromIntegral len) str) : go inp'
#endif
-- -----------------------------------------------------------------------------
-- GScan wrapper
-- For compatibility with previous versions of Alex, and because we can.
#ifdef ALEX_GSCAN
alexGScan stop state inp = alex_gscan stop alexStartPos '\n' [] inp (0,state)
alex_gscan stop p c bs inp (sc,state) =
case alexScan (p,c,bs,inp) sc of
AlexEOF -> stop p c inp (sc,state)
AlexError _ -> stop p c inp (sc,state)
AlexSkip (p',c',bs',inp') len -> alex_gscan stop p' c' bs' inp' (sc,state)
AlexToken (p',c',bs',inp') len k ->
k p c inp len (\scs -> alex_gscan stop p' c' bs' inp' scs)
(sc,state)
#endif
| kumasento/alex | templates/wrappers.hs | bsd-3-clause | 16,536 | 2 | 16 | 4,592 | 4,360 | 2,361 | 1,999 | 17 | 1 |
{-# LANGUAGE
DeriveDataTypeable
, DeriveGeneric
, LambdaCase
, OverloadedStrings
, ScopedTypeVariables
, TemplateHaskell
, TypeFamilies
#-}
module Api.Test where
import Control.Monad.Reader
import Control.Monad.Trans.Error
import Data.Aeson
import Data.Data
import Data.JSON.Schema
import Data.Text (Text)
import GHC.Generics
import Generics.Generic.Aeson
import Generics.Regular
import Generics.Regular.XmlPickler
import Text.XML.HXT.Arrow.Pickle
import Rest
import qualified Rest.Resource as R
import ApiTypes
import qualified Api.Test.Err2 as E2
-- | Customer extends the root of the API with a reader containing the ways to identify a customer in our URLs.
-- Currently only by the customer name.
type WithText = ReaderT Text BlogApi
data Err = Err deriving (Generic, Show, Typeable)
deriveAll ''Err "PFErr"
type instance PF Err = PFErr
instance ToJSON Err where toJSON = gtoJson
instance FromJSON Err where parseJSON = gparseJson
instance JSONSchema Err where schema = gSchema
instance XmlPickler Err where xpickle = gxpickle
instance ToResponseCode Err where
toResponseCode _ = 400
data Ok = Ok deriving (Generic, Show, Typeable)
deriveAll ''Ok "PFOk"
type instance PF Ok = PFOk
instance XmlPickler Ok where xpickle = gxpickle
instance ToJSON Ok where toJSON = gtoJson
instance FromJSON Ok where parseJSON = gparseJson
instance JSONSchema Ok where schema = gSchema
resource :: Resource BlogApi WithText Text Void Void
resource = mkResourceReader
{ R.name = "test"
, R.actions = [ ("noResponse" , noResponse )
, ("onlyError" , onlyError )
, ("differentFormats" , differentFormats )
, ("intersectedFormats" , intersectedFormats )
, ("intersectedFormats2", intersectedFormats2)
, ("errorImport" , errorImport )
, ("noError" , noError )
, ("justStringO" , justStringO )
, ("preferJson" , preferJson )
, ("octetStreamOut" , octetStreamOut )
, ("onlyInput" , onlyInput )
]
}
noResponse :: Handler WithText
noResponse = mkConstHandler id $ return ()
onlyError :: Handler WithText
onlyError = mkConstHandler (jsonE . someE) $
throwError $ domainReason Err
differentFormats :: Handler WithText
differentFormats = mkInputHandler (jsonE . someE . xmlO . someO . stringI . someI) $
\case
"error" -> throwError $ domainReason Err
_ -> return Ok
intersectedFormats :: Handler WithText
intersectedFormats = mkInputHandler (jsonE . someE . xmlO . jsonO . someO . stringI . someI) $
\case
"error" -> throwError $ domainReason Err
_ -> return Ok
intersectedFormats2 :: Handler WithText
intersectedFormats2 = mkInputHandler (xmlE . someE . xmlO . jsonO . someO . stringI . someI) $
\case
"error" -> throwError $ domainReason Err
_ -> return Ok
errorImport :: Handler WithText
errorImport = mkIdHandler (stringI . rawXmlO . xmlE . someE) $ \s (_::Text) ->
case s of
"error" -> throwError $ domainReason E2.Err
_ -> return "<ok/>"
noError :: Handler WithText
noError = mkConstHandler (jsonO . someO) $ return Ok
justStringO :: Handler WithText
justStringO = mkConstHandler (stringO . someO) $ return "Ok"
preferJson :: Handler WithText
preferJson = mkInputHandler (xmlJsonO . xmlJsonE . stringI . someI) $
\case
"error" -> throwError $ domainReason Err
_ -> return Ok
octetStreamOut :: Handler WithText
octetStreamOut = mkInputHandler (fileI . fileO . xmlJsonE) $
\case
"error" -> throwError $ domainReason Err
_ -> return ("ok", "ok")
onlyInput :: Handler WithText
onlyInput = mkInputHandler (jsonI . someI) $ \() -> throwError NotFound
| tinkerthaler/basic-invoice-rest | example-api/Api/Test.hs | bsd-3-clause | 3,901 | 0 | 13 | 983 | 1,016 | 551 | 465 | 96 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
module ApiAnnotation (
getAnnotation, getAndRemoveAnnotation,
getAnnotationComments,getAndRemoveAnnotationComments,
ApiAnns,
ApiAnnKey,
AnnKeywordId(..),
AnnotationComment(..),
LRdrName -- Exists for haddocks only
) where
import RdrName
import Outputable
import SrcLoc
import qualified Data.Map as Map
import Data.Data
{- Note [Api annotations]
~~~~~~~~~~~~~~~~~~~~~~
In order to do source to source conversions using the GHC API, the
locations of all elements of the original source needs to be tracked.
The includes keywords such as 'let' / 'in' / 'do' etc as well as
punctuation such as commas and braces, and also comments.
These are captured in a structure separate from the parse tree, and
returned in the pm_annotations field of the ParsedModule type.
The non-comment annotations are stored indexed to the SrcSpan of the
AST element containing them, together with a AnnKeywordId value
identifying the specific keyword being captured.
> type ApiAnnKey = (SrcSpan,AnnKeywordId)
>
> Map.Map ApiAnnKey SrcSpan
So
> let X = 1 in 2 *x
would result in the AST element
L span (HsLet (binds for x = 1) (2 * x))
and the annotations
(span,AnnLet) having the location of the 'let' keyword
(span,AnnIn) having the location of the 'in' keyword
The comments are indexed to the SrcSpan of the lowest AST element
enclosing them
> Map.Map SrcSpan [Located AnnotationComment]
So the full ApiAnns type is
> type ApiAnns = ( Map.Map ApiAnnKey SrcSpan
> , Map.Map SrcSpan [Located AnnotationComment])
This is done in the lexer / parser as follows.
The PState variable in the lexer has the following variables added
> annotations :: [(ApiAnnKey,SrcSpan)],
> comment_q :: [Located Token],
> annotations_comments :: [(SrcSpan,[Located AnnotationComment])]
The first and last store the values that end up in the ApiAnns value
at the end via Map.fromList
The comment_q captures comments as they are seen in the token stream,
so that when they are ready to be allocated via the parser they are
available.
The parser interacts with the lexer using the function
> addAnnotation :: SrcSpan -> AnnKeywordId -> SrcSpan -> P ()
which takes the AST element SrcSpan, the annotation keyword and the
target SrcSpan.
This adds the annotation to the `annotations` field of `PState` and
transfers any comments in `comment_q` to the `annotations_comments`
field.
Parser
------
The parser implements a number of helper types and methods for the
capture of annotations
> type AddAnn = (SrcSpan -> P ())
>
> mj :: AnnKeywordId -> Located e -> (SrcSpan -> P ())
> mj a l = (\s -> addAnnotation s a (gl l))
AddAnn represents the addition of an annotation a to a provided
SrcSpan, and `mj` constructs an AddAnn value.
> ams :: Located a -> [AddAnn] -> P (Located a)
> ams a@(L l _) bs = (mapM_ (\a -> a l) bs) >> return a
So the production in Parser.y for the HsLet AST element is
| 'let' binds 'in' exp {% ams (sLL $1 $> $ HsLet (snd $ unLoc $2) $4)
(mj AnnLet $1:mj AnnIn $3
:(fst $ unLoc $2)) }
This adds an AnnLet annotation for 'let', an AnnIn for 'in', as well
as any annotations that may arise in the binds. This will include open
and closing braces if they are used to delimit the let expressions.
-}
-- ---------------------------------------------------------------------
type ApiAnns = ( Map.Map ApiAnnKey [SrcSpan]
, Map.Map SrcSpan [Located AnnotationComment])
type ApiAnnKey = (SrcSpan,AnnKeywordId)
-- | Retrieve a list of annotation 'SrcSpan's based on the 'SrcSpan'
-- of the annotated AST element, and the known type of the annotation.
getAnnotation :: ApiAnns -> SrcSpan -> AnnKeywordId -> [SrcSpan]
getAnnotation (anns,_) span ann
= case Map.lookup (span,ann) anns of
Nothing -> []
Just ss -> ss
-- | Retrieve a list of annotation 'SrcSpan's based on the 'SrcSpan'
-- of the annotated AST element, and the known type of the annotation.
-- The list is removed from the annotations.
getAndRemoveAnnotation :: ApiAnns -> SrcSpan -> AnnKeywordId
-> ([SrcSpan],ApiAnns)
getAndRemoveAnnotation (anns,cs) span ann
= case Map.lookup (span,ann) anns of
Nothing -> ([],(anns,cs))
Just ss -> (ss,(Map.delete (span,ann) anns,cs))
-- |Retrieve the comments allocated to the current 'SrcSpan'
--
-- Note: A given 'SrcSpan' may appear in multiple AST elements,
-- beware of duplicates
getAnnotationComments :: ApiAnns -> SrcSpan -> [Located AnnotationComment]
getAnnotationComments (_,anns) span =
case Map.lookup span anns of
Just cs -> cs
Nothing -> []
-- |Retrieve the comments allocated to the current 'SrcSpan', and
-- remove them from the annotations
getAndRemoveAnnotationComments :: ApiAnns -> SrcSpan
-> ([Located AnnotationComment],ApiAnns)
getAndRemoveAnnotationComments (anns,canns) span =
case Map.lookup span canns of
Just cs -> (cs,(anns,Map.delete span canns))
Nothing -> ([],(anns,canns))
-- --------------------------------------------------------------------
-- | API Annotations exist so that tools can perform source to source
-- conversions of Haskell code. They are used to keep track of the
-- various syntactic keywords that are not captured in the existing
-- AST.
--
-- The annotations, together with original source comments are made
-- available in the @'pm_annotations'@ field of @'GHC.ParsedModule'@.
-- Comments are only retained if @'Opt_KeepRawTokenStream'@ is set in
-- @'DynFlags.DynFlags'@ before parsing.
--
-- Note: in general the names of these are taken from the
-- corresponding token, unless otherwise noted
-- See note [Api annotations] above for details of the usage
data AnnKeywordId
= AnnAs
| AnnAt
| AnnBang -- ^ '!'
| AnnBackquote -- ^ '`'
| AnnBy
| AnnCase -- ^ case or lambda case
| AnnClass
| AnnClose -- ^ '\#)' or '\#-}' etc
| AnnCloseC -- ^ '}'
| AnnCloseP -- ^ ')'
| AnnCloseS -- ^ ']'
| AnnColon
| AnnComma -- ^ as a list separator
| AnnCommaTuple -- ^ in a RdrName for a tuple
| AnnDarrow -- ^ '=>'
| AnnData
| AnnDcolon -- ^ '::'
| AnnDefault
| AnnDeriving
| AnnDo
| AnnDot -- ^ '.'
| AnnDotdot -- ^ '..'
| AnnElse
| AnnEqual
| AnnExport
| AnnFamily
| AnnForall
| AnnForeign
| AnnFunId -- ^ for function name in matches where there are
-- multiple equations for the function.
| AnnGroup
| AnnHeader -- ^ for CType
| AnnHiding
| AnnIf
| AnnImport
| AnnIn
| AnnInfix -- ^ 'infix' or 'infixl' or 'infixr'
| AnnInstance
| AnnLam
| AnnLarrow -- ^ '<-'
| AnnLet
| AnnMdo
| AnnMinus -- ^ '-'
| AnnModule
| AnnNewtype
| AnnOf
| AnnOpen -- ^ '(\#' or '{-\# LANGUAGE' etc
| AnnOpenC -- ^ '{'
| AnnOpenP -- ^ '('
| AnnOpenS -- ^ '['
| AnnPackageName
| AnnPattern
| AnnProc
| AnnQualified
| AnnRarrow -- ^ '->'
| AnnRec
| AnnRole
| AnnSafe
| AnnSemi -- ^ ';'
| AnnStatic -- ^ 'static'
| AnnThen
| AnnTilde -- ^ '~'
| AnnTildehsh -- ^ '~#'
| AnnType
| AnnUnit -- ^ '()' for types
| AnnUsing
| AnnVal -- ^ e.g. INTEGER
| AnnValStr -- ^ String value, will need quotes when output
| AnnVbar -- ^ '|'
| AnnWhere
| Annlarrowtail -- ^ '-<'
| Annrarrowtail -- ^ '->'
| AnnLarrowtail -- ^ '-<<'
| AnnRarrowtail -- ^ '>>-'
| AnnEofPos
deriving (Eq,Ord,Data,Typeable,Show)
instance Outputable AnnKeywordId where
ppr x = text (show x)
-- ---------------------------------------------------------------------
data AnnotationComment =
-- Documentation annotations
AnnDocCommentNext String -- ^ something beginning '-- |'
| AnnDocCommentPrev String -- ^ something beginning '-- ^'
| AnnDocCommentNamed String -- ^ something beginning '-- $'
| AnnDocSection Int String -- ^ a section heading
| AnnDocOptions String -- ^ doc options (prune, ignore-exports, etc)
| AnnDocOptionsOld String -- ^ doc options declared "-- # ..."-style
| AnnLineComment String -- ^ comment starting by "--"
| AnnBlockComment String -- ^ comment in {- -}
deriving (Eq,Ord,Data,Typeable,Show)
-- Note: these are based on the Token versions, but the Token type is
-- defined in Lexer.x and bringing it in here would create a loop
instance Outputable AnnotationComment where
ppr x = text (show x)
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose','ApiAnnotation.AnnComma',
-- 'ApiAnnotation.AnnRarrow','ApiAnnotation.AnnTildehsh',
-- 'ApiAnnotation.AnnTilde'
-- - May have 'ApiAnnotation.AnnComma' when in a list
type LRdrName = Located RdrName
| green-haskell/ghc | compiler/parser/ApiAnnotation.hs | bsd-3-clause | 8,957 | 0 | 11 | 2,086 | 919 | 572 | 347 | 130 | 2 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ViewPatterns #-}
-----------------------------------------------------------------------------
-- |
-- Module : Geometry.Combinators
-- Copyright : (c) 2011-2017 diagrams team (see LICENSE)
-- License : BSD-style (see LICENSE)
-- Maintainer : [email protected]
--
-- Higher-level tools for combining geometric objects.
--
-----------------------------------------------------------------------------
module Geometry.Combinators
(
-- * Binary operations
beside
, atDirection
-- * n-ary operations
, appends
, position, atPoints
, cat
, sep
, sepEven
-- , composeAligned
-- * Alignment
, align
, alignBy
, alignBy'
-- * Snugging
, snug
, snugBy
-- * Centering
, center
, centerV
, snugCenter
, snugCenterV
) where
import Control.Lens ((&))
import Control.Lens.Cons
import Data.Foldable (foldl')
import Data.Maybe (fromMaybe)
import Data.Monoid.WithSemigroup
import qualified Data.Semigroup as Sem
import Geometry.Direction
import Geometry.Envelope
import Geometry.Juxtapose
import Geometry.Space
import Geometry.Trace
import Geometry.Transform
import Linear.Affine
import Linear.Metric
import Linear.V2
import Linear.Vector
------------------------------------------------------------
-- Combining two objects
------------------------------------------------------------
-- | Place two monoidal objects (/i.e./ diagrams, paths,
-- animations...) next to each other along the given vector. In
-- particular, place the second object so that the vector points
-- from the local origin of the first object to the local origin of
-- the second object, at a distance so that their envelopes are just
-- tangent. The local origin of the new, combined object is the
-- local origin of the first object (unless the first object is the
-- identity element, in which case the second object is returned
-- unchanged).
--
-- <<diagrams/src_Geometry_Combinators_besideEx.svg#diagram=besideEx&height=200>>
--
-- > besideEx = beside (r2 (20,30))
-- > (circle 1 # fc orange)
-- > (circle 1.5 # fc purple)
-- > # showOrigin
-- > # centerXY # pad 1.1
--
-- Note that @beside v@ is associative, so objects under @beside v@
-- form a semigroup for any given vector @v@. In fact, they also
-- form a monoid: 'mempty' is clearly a right identity (@beside v d1
-- mempty === d1@), and there should also be a special case to make
-- it a left identity, as described above.
--
-- In older versions of the @diagrams@ library, @beside@ put the
-- local origin of the result at the point of tangency between the
-- two inputs. That semantics can easily be recovered by performing
-- an alignment on the first input before combining. That is, if
-- @beside'@ denotes the old semantics,
--
-- > beside' v x1 x2 = beside v (x1 # align v) x2
--
-- To get something like @beside v x1 x2@ whose local origin is
-- identified with that of @x2@ instead of @x1@, use @beside
-- (negateV v) x2 x1@.
beside :: (Juxtaposable a, Sem.Semigroup a) => Vn a -> a -> a -> a
beside v d1 d2 = d1 Sem.<> juxtapose v d1 d2
-- | Place two juxtaposable objects adjacent to one another, with the
-- second placed in the direction 'd' from the first. The local
-- origin of the resulting combined object is the same as the local
-- origin of the first. See the documentation of 'beside' for more
-- information.
atDirection
:: (Juxtaposable a, Sem.Semigroup a)
=> Direction (V a) (N a) -> a -> a -> a
atDirection = beside . fromDirection
------------------------------------------------------------
-- Combining multiple objects
------------------------------------------------------------
-- | @appends x ys@ appends each of the objects in @ys@ to the object
-- @x@ in the corresponding direction. Note that each object in
-- @ys@ is positioned beside @x@ /without/ reference to the other
-- objects in @ys@, so this is not the same as iterating 'beside'.
--
-- <<diagrams/src_Geometry_Combinators_appendsEx.svg#diagram=appendsEx&width=200>>
--
-- > appendsEx = appends c (zip (iterateN 6 (rotateBy (1/6)) unitX) (repeat c))
-- > # centerXY # pad 1.1
-- > where c = circle 1
appends :: (Metric (V a), Floating (N a), Juxtaposable a, Monoid' a) => a -> [(Vn a,a)] -> a
appends d1 apps = d1 Sem.<> mconcat (map (\(v,d) -> juxtapose (signorm v) d1 d) apps)
-- | Position things absolutely: combine a list of objects
-- (e.g. diagrams or paths) by assigning them absolute positions in
-- the vector space of the combined object.
--
-- <<diagrams/src_Geometry_Combinators_positionEx.svg#diagram=positionEx&height=300>>
--
-- > positionEx = position (zip (map mkPoint [-3, -2.8 .. 3]) (repeat spot))
-- > where spot = circle 0.2 # fc black
-- > mkPoint :: Double -> P2 Double
-- > mkPoint x = p2 (x,x*x)
position :: (InSpace v n a, HasOrigin a, Monoid a) => [(Point v n, a)] -> a
position = mconcat . map (uncurry moveTo)
-- | Curried version of @position@, takes a list of points and a list of
-- objects.
atPoints :: (InSpace v n a, HasOrigin a, Monoid' a) => [Point v n] -> [a] -> a
atPoints ps as = position $ zip ps as
-- | @cat v@ positions a list of objects so that their local origins
-- lie along a line in the direction of @v@. Successive objects
-- will have their envelopes just touching. The local origin
-- of the result will be the same as the local origin of the first
-- object.
--
-- See also 'sep', which takes a distance parameter allowing
-- certain aspects of the operation to be tweaked.
--
-- See also 'Geometry.TwoD.Combinators.hcat' and
-- 'Geometry.TwoD.Combinators.vcat'
cat
:: (InSpace v n a, Enveloped a, Monoid a, HasOrigin a)
=> v n -> [a] -> a
cat v = sep v 0
-- | Similar to 'cat' but with a gap parameter which is used as the
-- distance between successive objects.
--
-- See also 'Geometry.TwoD.Combinators.hsep' and
-- 'Geometry.TwoD.Combinators.vsep'
sep
:: (InSpace v n t, Monoid t, Enveloped t, HasOrigin t)
=> v n -> n -> [t] -> t
sep _ _ [] = mempty
sep (signorm -> v) s (t0:ts) = snd $ foldl' f (n0, t0) ts
where
-- If we come across an empty envelope treat it as a point on the
-- origin (this isn't ideal but what else can we do? Maybe don't
-- even move it at all?)
extent' = fromMaybe (0,0) . extent v
n0 = snd $ extent' t0
f (!n, tAcc) t = (n + s - nMin + nMax, tAcc')
where
(nMin, nMax) = extent' t
nStart = n + s - nMin
tAcc' = tAcc `mappend` moveOriginTo (P $ negate nStart *^ v) t
-- | Evenly separate items along the vector @v@ at distance @s@,
-- starting at the 'origin'.
--
-- >>> sepEven unitX $ map regPoly [3..7]
--
-- See also 'Geometry.TwoD.Combinators.hsepEven' and
-- 'Geometry.TwoD.Combinators.vsepEven'
sepEven
:: (InSpace v n t, Metric v, Floating n, Monoid t, HasOrigin t)
=> v n -> n -> [t] -> t
sepEven (signorm -> v) s =
position . zip (iterate (.+^ s *^ v) origin)
{-# INLINE sepEven #-}
------------------------------------------------------------------------
-- Aligning
------------------------------------------------------------------------
-- | @alignBy v d a@ moves the origin of @a@ along the vector @v@. If @d
-- = 1@, the origin is moved to the edge of the boundary in the
-- direction of @v@; if @d = -1@, it moves to the edge of the boundary
-- in the direction of the negation of @v@. Other values of @d@
-- interpolate linearly (so for example, @d = 0@ centers the origin
-- along the direction of @v@).
alignBy'
:: (InSpace v n t, Fractional n, HasOrigin t)
=> (v n -> t -> Maybe (n, n)) -> v n -> n -> t -> t
alignBy' f v d t = fromMaybe t $ do
(a,b) <- f v t
Just $ moveOriginTo (P $ lerp' ((d + 1) / 2) b a *^ v) t
where
lerp' alpha a b = alpha * a + (1 - alpha) * b
-- case f v of
-- Just (a,b) -> moveOriginTo (lerp ((d + 1) / 2) a b) t
-- Nothing -> t
{-# INLINE alignBy'#-}
alignBy
:: (InSpace v n t, Enveloped t, HasOrigin t)
=> v n -> n -> t -> t
alignBy = alignBy' extent
{-# INLINE alignBy#-}
-- | @align v@ aligns an enveloped object along the edge in the
-- direction of @v@. That is, it moves the local origin in the
-- direction of @v@ until it is on the edge of the envelope. (Note
-- that if the local origin is outside the envelope to begin with, it
-- may have to move \"backwards\".)
align
:: (InSpace v n t, Enveloped t, HasOrigin t)
=> v n -> t -> t
align v = alignBy v 1
-- | Version of @alignBy@ specialized to use @traceBoundary@
snugBy
:: (InSpace v n t, Fractional n, Traced t, HasOrigin t)
=> v n -> n -> t -> t
snugBy = alignBy' traceBoundary
traceBoundary :: (InSpace v n t, Traced t) => v n -> t -> Maybe (n,n)
traceBoundary = \v t ->
case appTrace (getTrace t) origin v of
x :< xs -> foldl' (\(V2 a b) x' -> V2 (min a x') (max b x')) (V2 x x) xs
& \(V2 a b) -> Just (a,b)
_ -> Nothing
{-# INLINE traceBoundary #-}
-- | Like align but uses trace.
snug :: (InSpace v n t, Fractional n, Traced t, HasOrigin t)
=> v n -> t -> t
snug v = snugBy v 1
-- | @centerV v@ centers an enveloped object along the direction of
-- @v@.
centerV
:: (InSpace v n a, Enveloped a, HasOrigin a)
=> v n -> a -> a
centerV v = alignBy v 0
applyAll :: Foldable t => t (b -> b) -> b -> b
applyAll = foldr (.) id
-- | @center@ centers an enveloped object along all of its basis vectors.
center
:: (InSpace v n a, Traversable v, Enveloped a, HasOrigin a)
=> a -> a
center = applyAll fs
where
fs = map centerV basis
-- | Like @centerV@ using trace.
snugCenterV
:: (InSpace v n a, Fractional n, Traced a, HasOrigin a)
=> v n -> a -> a
snugCenterV v = snugBy v 0
-- | Like @center@ using trace.
snugCenter
:: (InSpace v n a, Traversable v, Fractional n, HasOrigin a, Traced a)
=> a -> a
snugCenter = applyAll fs
where
fs = map snugCenterV basis
| cchalmers/geometry | src/Geometry/Combinators.hs | bsd-3-clause | 10,535 | 0 | 17 | 2,546 | 1,950 | 1,094 | 856 | 126 | 2 |
import Test.Hspec
import EratosthenesSieve
main :: IO ()
main = hspec $ do
describe "The sieve of Eratosthenes" $ do
it "should return a list with all the prime numbers up to a given number" $ do
primesUpTo 2 `shouldBe` [2]
primesUpTo 3 `shouldBe` [2, 3]
primesUpTo 5 `shouldBe` [2, 3, 5]
primesUpTo 7 `shouldBe` [2, 3, 5, 7]
primesUpTo 11 `shouldBe` [2, 3, 5, 7, 11]
| theUniC/eratosthenes-sieve.hs | test/Spec.hs | bsd-3-clause | 405 | 0 | 15 | 104 | 153 | 83 | 70 | 11 | 1 |
module Pipe
( TSink
, TSource
, TPipe
, newTPipe
, writeTSink
, readTSource
) where
import Control.Concurrent.STM
newtype TSink a = TSink (TChan a)
newtype TSource a = TSource (TChan a)
type TPipe a = (TSink a, TSource a)
writeTSink :: TSink a -> a -> STM ()
writeTSink (TSink chan) = writeTChan chan
readTSource :: TSource a -> STM a
readTSource (TSource chan) = readTChan chan
newTPipe :: STM (TPipe a)
newTPipe = do
chan <- newTChan
return (TSink chan, TSource chan)
| frerich/lambdacrawler | src/Pipe.hs | bsd-3-clause | 512 | 0 | 9 | 126 | 193 | 103 | 90 | 19 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Duration.HU.Tests
( tests
) where
import Data.String
import Prelude
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Duration.HU.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "HU Tests"
[ makeCorpusTest [Seal Duration] corpus
]
| facebookincubator/duckling | tests/Duckling/Duration/HU/Tests.hs | bsd-3-clause | 509 | 0 | 9 | 80 | 79 | 50 | 29 | 11 | 1 |
-- |
-- Module : Language.SequentCore.Plugin
-- Description : GHC plugin library
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Tools for writing a GHC plugin using the Sequent Core language in place of
-- GHC Core.
module Language.SequentCore.Plugin (
sequentPass, sequentPassWithFlags
) where
import Language.SequentCore.Driver.Flags
import Language.SequentCore.Syntax
import Language.SequentCore.Translate
import GhcPlugins ( ModGuts, CoreM
, bindsOnlyPass
, deShadowBinds
)
-- | Given a function that processes a module's bindings as Sequent Core terms,
-- perform the same processing as a Core-to-Core pass usable from a GHC plugin.
-- Intended to be passed to the @CoreDoPluginPass@ constructor as part of your
-- plugin's @installCoreToDos@ function. See "Language.SequentCore.Dump" for an
-- example and the GHC manual for more details.
sequentPass :: ([SeqCoreBind] -> CoreM [SeqCoreBind])
-- ^ A processing function. May assume that there are no shadowed
-- identifiers in the given binders (this is ensured by a call to
-- 'deShadowBinds').
-> (ModGuts -> CoreM ModGuts)
sequentPass process =
bindsOnlyPass (fmap bindsToCore . process . fromCoreModule . deShadowBinds)
-- | Similar to 'sequentPass', but takes a 'SeqFlags' for use by the
-- translation.
sequentPassWithFlags :: SeqFlags
-> ([SeqCoreBind] -> CoreM [SeqCoreBind])
-> (ModGuts -> CoreM ModGuts)
sequentPassWithFlags sflags process =
bindsOnlyPass $ \binds -> do
term <- fromCoreModuleM sflags (deShadowBinds binds)
term' <- process term
return $ bindsToCore term'
| lukemaurer/sequent-core | src/Language/SequentCore/Plugin.hs | bsd-3-clause | 1,741 | 0 | 12 | 397 | 229 | 132 | 97 | 20 | 1 |
{-# LANGUAGE BangPatterns, DeriveDataTypeable, DeriveGeneric, FlexibleInstances, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Web.RTBBidder.Protocol.Adx.BidRequest.AdSlot.NativeAdTemplate.Fields (Fields(..)) where
import Prelude ((+), (/), (.))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified GHC.Generics as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data Fields = NO_FIELDS
| HEADLINE
| BODY
| CALL_TO_ACTION
| ADVERTISER
| IMAGE
| LOGO
| APP_ICON
| STAR_RATING
| PRICE
| STORE
| VIDEO
deriving (Prelude'.Read, Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data, Prelude'.Generic)
instance P'.Mergeable Fields
instance Prelude'.Bounded Fields where
minBound = NO_FIELDS
maxBound = VIDEO
instance P'.Default Fields where
defaultValue = NO_FIELDS
toMaybe'Enum :: Prelude'.Int -> P'.Maybe Fields
toMaybe'Enum 0 = Prelude'.Just NO_FIELDS
toMaybe'Enum 1 = Prelude'.Just HEADLINE
toMaybe'Enum 2 = Prelude'.Just BODY
toMaybe'Enum 4 = Prelude'.Just CALL_TO_ACTION
toMaybe'Enum 8 = Prelude'.Just ADVERTISER
toMaybe'Enum 16 = Prelude'.Just IMAGE
toMaybe'Enum 32 = Prelude'.Just LOGO
toMaybe'Enum 64 = Prelude'.Just APP_ICON
toMaybe'Enum 128 = Prelude'.Just STAR_RATING
toMaybe'Enum 256 = Prelude'.Just PRICE
toMaybe'Enum 512 = Prelude'.Just STORE
toMaybe'Enum 1024 = Prelude'.Just VIDEO
toMaybe'Enum _ = Prelude'.Nothing
instance Prelude'.Enum Fields where
fromEnum NO_FIELDS = 0
fromEnum HEADLINE = 1
fromEnum BODY = 2
fromEnum CALL_TO_ACTION = 4
fromEnum ADVERTISER = 8
fromEnum IMAGE = 16
fromEnum LOGO = 32
fromEnum APP_ICON = 64
fromEnum STAR_RATING = 128
fromEnum PRICE = 256
fromEnum STORE = 512
fromEnum VIDEO = 1024
toEnum
= P'.fromMaybe
(Prelude'.error
"hprotoc generated code: toEnum failure for type Web.RTBBidder.Protocol.Adx.BidRequest.AdSlot.NativeAdTemplate.Fields")
. toMaybe'Enum
succ NO_FIELDS = HEADLINE
succ HEADLINE = BODY
succ BODY = CALL_TO_ACTION
succ CALL_TO_ACTION = ADVERTISER
succ ADVERTISER = IMAGE
succ IMAGE = LOGO
succ LOGO = APP_ICON
succ APP_ICON = STAR_RATING
succ STAR_RATING = PRICE
succ PRICE = STORE
succ STORE = VIDEO
succ _
= Prelude'.error
"hprotoc generated code: succ failure for type Web.RTBBidder.Protocol.Adx.BidRequest.AdSlot.NativeAdTemplate.Fields"
pred HEADLINE = NO_FIELDS
pred BODY = HEADLINE
pred CALL_TO_ACTION = BODY
pred ADVERTISER = CALL_TO_ACTION
pred IMAGE = ADVERTISER
pred LOGO = IMAGE
pred APP_ICON = LOGO
pred STAR_RATING = APP_ICON
pred PRICE = STAR_RATING
pred STORE = PRICE
pred VIDEO = STORE
pred _
= Prelude'.error
"hprotoc generated code: pred failure for type Web.RTBBidder.Protocol.Adx.BidRequest.AdSlot.NativeAdTemplate.Fields"
instance P'.Wire Fields where
wireSize ft' enum = P'.wireSize ft' (Prelude'.fromEnum enum)
wirePut ft' enum = P'.wirePut ft' (Prelude'.fromEnum enum)
wireGet 14 = P'.wireGetEnum toMaybe'Enum
wireGet ft' = P'.wireGetErr ft'
wireGetPacked 14 = P'.wireGetPackedEnum toMaybe'Enum
wireGetPacked ft' = P'.wireGetErr ft'
instance P'.GPB Fields
instance P'.MessageAPI msg' (msg' -> Fields) Fields where
getVal m' f' = f' m'
instance P'.ReflectEnum Fields where
reflectEnum
= [(0, "NO_FIELDS", NO_FIELDS), (1, "HEADLINE", HEADLINE), (2, "BODY", BODY), (4, "CALL_TO_ACTION", CALL_TO_ACTION),
(8, "ADVERTISER", ADVERTISER), (16, "IMAGE", IMAGE), (32, "LOGO", LOGO), (64, "APP_ICON", APP_ICON),
(128, "STAR_RATING", STAR_RATING), (256, "PRICE", PRICE), (512, "STORE", STORE), (1024, "VIDEO", VIDEO)]
reflectEnumInfo _
= P'.EnumInfo
(P'.makePNF (P'.pack ".Adx.BidRequest.AdSlot.NativeAdTemplate.Fields") ["Web", "RTBBidder", "Protocol"]
["Adx", "BidRequest", "AdSlot", "NativeAdTemplate"]
"Fields")
["Web", "RTBBidder", "Protocol", "Adx", "BidRequest", "AdSlot", "NativeAdTemplate", "Fields.hs"]
[(0, "NO_FIELDS"), (1, "HEADLINE"), (2, "BODY"), (4, "CALL_TO_ACTION"), (8, "ADVERTISER"), (16, "IMAGE"), (32, "LOGO"),
(64, "APP_ICON"), (128, "STAR_RATING"), (256, "PRICE"), (512, "STORE"), (1024, "VIDEO")]
instance P'.TextType Fields where
tellT = P'.tellShow
getT = P'.getRead | hiratara/hs-rtb-bidder | src/Web/RTBBidder/Protocol/Adx/BidRequest/AdSlot/NativeAdTemplate/Fields.hs | bsd-3-clause | 4,477 | 0 | 11 | 861 | 1,253 | 696 | 557 | 114 | 1 |
-- |
-- Helper methods used to construct requests.
--
module Network.TableStorage.Request (
propertyList,
entityKeyResource,
columnToTypeString,
printEntityColumn,
printComparisonType,
buildFilterString,
buildQueryString
) where
import Data.Time ( formatTime )
import System.Locale ( defaultTimeLocale )
import Data.Maybe ( fromMaybe )
import Data.List ( intercalate )
import Text.XML.Light.Types ( elAttribs )
import Text.XML.Light
( Element(elContent, elName),
Content(Elem),
Attr(Attr),
blank_element )
import Network.TableStorage.Types
( EntityFilter(..),
ComparisonType(..),
EntityQuery(eqFilter, eqPageSize),
EntityColumn(..),
EntityKey(ekPartitionKey, ekRowKey) )
import Network.TableStorage.XML ( cDataText )
import Network.TableStorage.Atom
( qualifyDataServices, qualifyMetadata )
import Network.TableStorage.Format ( atomDateFormat )
import Network.HTTP.Base ( urlEncode )
-- |
-- Formats a list of entity properties for inclusion in an Atom entry.
--
propertyList :: [(String, EntityColumn)] -> Element
propertyList props =
blank_element { elName = qualifyMetadata "properties",
elContent = map property props } where
property (key, value) =
let stringValue = printEntityColumn value in
Elem blank_element { elName = qualifyDataServices key,
elAttribs = [ Attr (qualifyMetadata "type") $ columnToTypeString value,
Attr (qualifyMetadata "null") $ maybe "true" (const "false") stringValue ],
elContent = cDataText $ fromMaybe "" stringValue }
-- |
-- Constructs relative URIs which refer to the entity with the specified table name
-- and entity key.
--
entityKeyResource :: String -> EntityKey -> String
entityKeyResource tableName key = "/" ++ tableName ++ "(PartitionKey='" ++ ekPartitionKey key ++ "',RowKey='" ++ ekRowKey key ++ "')"
-- |
-- Converts an entity column into its type name
--
columnToTypeString :: EntityColumn -> String
columnToTypeString (EdmBinary _) = "Edm.Binary"
columnToTypeString (EdmBoolean _) = "Edm.Boolean"
columnToTypeString (EdmDateTime _) = "Edm.DateTime"
columnToTypeString (EdmDouble _) = "Edm.Double"
columnToTypeString (EdmGuid _) = "Edm.EdmGuid"
columnToTypeString (EdmInt32 _) = "Edm.Int32"
columnToTypeString (EdmInt64 _) = "Edm.Int64"
columnToTypeString (EdmString _) = "Edm.String"
-- |
-- Formats a column value to appear in the body of an Atom entry
--
printEntityColumn :: EntityColumn -> Maybe String
printEntityColumn (EdmBinary (Just val)) = Just val
printEntityColumn (EdmBoolean (Just True)) = Just "true"
printEntityColumn (EdmBoolean (Just False)) = Just "false"
printEntityColumn (EdmDateTime (Just val)) = Just $ formatTime defaultTimeLocale atomDateFormat val
printEntityColumn (EdmDouble (Just val)) = Just $ show val
printEntityColumn (EdmGuid (Just val)) = Just val
printEntityColumn (EdmInt32 (Just val)) = Just $ show val
printEntityColumn (EdmInt64 (Just val)) = Just $ show val
printEntityColumn (EdmString (Just val)) = Just val
printEntityColumn _ = Nothing
-- |
-- Formats a comparison type to appear in the query string
--
printComparisonType :: ComparisonType -> String
printComparisonType Equal = "eq"
printComparisonType GreaterThan = "gt"
printComparisonType GreaterThanOrEqual = "ge"
printComparisonType LessThan = "lt"
printComparisonType LessThanOrEqual = "le"
printComparisonType NotEqual = "ne"
-- |
-- Converts entity filter values into strings to appear in the filter
-- portion of the Query Entities URI.
--
buildFilterString :: EntityFilter -> String
buildFilterString (And fs) = '(' : intercalate "%20and%20" (map buildFilterString fs) ++ ")"
buildFilterString (Or fs) = '(' : intercalate "%20or%20" (map buildFilterString fs) ++ ")"
buildFilterString (Not f) =
"(not%20"
++ buildFilterString f
++ ")"
buildFilterString (CompareBoolean prop val) =
urlEncode prop
++ "%20eq%20"
++ if val then "true" else "false"
buildFilterString (CompareDateTime prop cmp val) =
urlEncode prop
++ "%20"
++ printComparisonType cmp
++ "%20datetime'"
++ formatTime defaultTimeLocale atomDateFormat val
++ "'"
buildFilterString (CompareDouble prop cmp val) =
urlEncode prop
++ "%20"
++ printComparisonType cmp
++ "%20"
++ show val
buildFilterString (CompareGuid prop val) =
urlEncode prop
++ "%20eq%20guid'"
++ val
++ "'"
buildFilterString (CompareInt32 prop cmp val) =
urlEncode prop
++ "%20"
++ printComparisonType cmp
++ "%20"
++ show val
buildFilterString (CompareInt64 prop cmp val) =
urlEncode prop
++ "%20"
++ printComparisonType cmp
++ "%20"
++ show val
buildFilterString (CompareString prop cmp val) =
urlEncode prop
++ "%20"
++ printComparisonType cmp
++ "%20'"
++ urlEncode val
++ "'"
-- |
-- Constructs the full query string for the Query Entities web method.
--
buildQueryString :: EntityQuery -> String
buildQueryString query =
"$filter="
++ maybe "" buildFilterString (eqFilter query)
++ "&$top="
++ maybe "" show (eqPageSize query) | paf31/tablestorage | src/Network/TableStorage/Request.hs | bsd-3-clause | 5,280 | 0 | 16 | 1,104 | 1,327 | 702 | 625 | 122 | 2 |
{-# LANGUAGE PolyKinds #-}
module Data.Flip
( Flip (..)
) where
newtype Flip f a b = Flip { getFlip :: f b a }
| sonyandy/unify | examples/unify-hm/Data/Flip.hs | bsd-3-clause | 126 | 0 | 7 | 40 | 37 | 25 | 12 | 4 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
module TransactionServer where
import System.Random
import Control.Monad.Trans.Except
import Control.Monad.Trans.Resource
import Control.Monad.IO.Class
import Data.Aeson
import Data.Aeson.TH
import Data.Bson.Generic
import GHC.Generics
import Network.Wai hiding(Response)
import Network.Wai.Handler.Warp
import Network.Wai.Logger
import Servant
import Servant.API
import Servant.Client
import System.IO
import System.Directory
import System.Environment (getArgs, getProgName, lookupEnv)
import System.Log.Formatter
import System.Log.Handler (setFormatter)
import System.Log.Handler.Simple
import System.Log.Handler.Syslog
import System.Log.Logger
import Data.Bson.Generic
import qualified Data.List as DL
import Data.Maybe (catMaybes)
import Data.Text (pack, unpack)
import Data.Time.Clock (UTCTime, getCurrentTime)
import Data.Time.Format (defaultTimeLocale, formatTime)
import Database.MongoDB
import Control.Monad (when)
import Network.HTTP.Client (newManager, defaultManagerSettings)
import CommonResources
import MongodbHelpers
type ApiHandler = ExceptT ServantErr IO
transactionApi :: Proxy TransactionApi
transactionApi = Proxy
server :: Server TransactionApi
server =
beginTrans :<|>
downloadTrans :<|>
uploadTrans :<|>
commitTrans
transactionApp :: Application
transactionApp = serve transactionApi server
directoryApi :: Proxy DirectoryApi
directoryApi = Proxy
join :: FileServer -> ClientM Response
open :: FileName -> ClientM File
close :: FileUpload -> ClientM Response
allfiles :: Ticket -> ClientM [String]
remove :: FileName -> ClientM Response
join :<|> open :<|> close :<|> allfiles :<|> remove = client directoryApi
runApp :: IO()
runApp = do
putStrLn ("Starting TransactionServer on port: " ++ transserverport)
run (read (transserverport) ::Int) transactionApp
beginTrans :: Ticket -> ApiHandler Response
beginTrans (Ticket ticket encryptedTimeout) = liftIO $ do
let sessionKey = encryptDecrypt sharedSecret ticket
let decryptedTimeout = decryptTime sharedSecret encryptedTimeout
putStrLn ("Checking Client Credentials...")
currentTime <- getCurrentTime
if (currentTime > decryptedTimeout) then do
putStrLn "Client session timeout"
return (Response (encryptDecrypt sessionKey "Failed"))
else do
putStrLn "Starting transaction"
putStrLn "Storing client sessionKey as transaction ID"
withMongoDbConnection $ upsert (select ["transactionID" =: sessionKey] "TRANSACTION_ID_RECORD") $ toBSON sessionKey
return (Response (encryptDecrypt sessionKey "Successful"))
downloadTrans :: FileName -> ApiHandler File
downloadTrans fileName@(FileName ticket encryptedTimeout encryptedFN) = liftIO $ do
let sessionKey = encryptDecrypt sharedSecret ticket
let decryptedTimeout = decryptTime sharedSecret encryptedTimeout
let decryptedFN = encryptDecrypt sessionKey encryptedFN
putStrLn ("Checking Client Credentials...")
currentTime <- getCurrentTime
if (currentTime > decryptedTimeout) then do
putStrLn "Client session timeout"
return (File "Failed" "Failed")
else do
manager <- newManager defaultManagerSettings
res <- runClientM (open fileName) (ClientEnv manager (BaseUrl Http dirserverhost (read (dirserverport) :: Int) ""))
case res of
Left err -> do
putStrLn (show err)
return (File "Failed" "Failed")
Right file -> do
putStrLn "Storing file transaction data"
withMongoDbConnection $ upsert (select ["userID" =: sessionKey, "transFileName" =: decryptedFN] "TRANSACTION_FILE_RECORD") $ toBSON (TransactionFile decryptedFN sessionKey)
return file
uploadTrans :: FileUpload -> ApiHandler Response
uploadTrans fileUpload@(FileUpload ticket encryptedTimeout (File encryptedFN encryptedFC)) = liftIO $ do
let sessionKey = encryptDecrypt sharedSecret ticket
let decryptedTimeout = decryptTime sharedSecret encryptedTimeout
let decryptedFN = encryptDecrypt sessionKey encryptedFN
putStrLn ("Checking Client Credentials...")
currentTime <- getCurrentTime
if (currentTime > decryptedTimeout) then do
putStrLn "Client session timeout"
return (Response (encryptDecrypt sessionKey "Failed"))
else do
manager <- newManager defaultManagerSettings
let tempFileName = encryptDecrypt sessionKey ("TMP~"++decryptedFN)
let fupload = FileUpload ticket encryptedTimeout (File tempFileName encryptedFC)
res <- runClientM (TransactionServer.close fupload) (ClientEnv manager (BaseUrl Http dirserverhost (read (dirserverport) :: Int) ""))
case res of
Left err -> do
putStrLn (show err)
return (Response (encryptDecrypt sessionKey "Failed"))
Right (Response response) -> do
let decryptedres = encryptDecrypt sessionKey response
putStrLn ("Uploaded temp file - " ++ decryptedres)
return (Response response)
commitTrans :: Ticket -> ApiHandler Response
commitTrans tic@(Ticket ticket encryptedTimeout) = liftIO $ do
let sessionKey = encryptDecrypt sharedSecret ticket
let decryptedTimeout = decryptTime sharedSecret encryptedTimeout
putStrLn ("Checking Client Credentials...")
currentTime <- getCurrentTime
if (currentTime > decryptedTimeout) then do
putStrLn "Client session timeout"
return (Response (encryptDecrypt sessionKey "Failed"))
else do
transactions <- liftIO $ withMongoDbConnection $ do
docs <- find (select ["userID" =: sessionKey] "TRANSACTION_FILE_RECORD") >>= drainCursor
return $ catMaybes $ DL.map (\ b -> fromBSON b :: Maybe TransactionFile) docs
mapM (commitfile tic) transactions
liftIO $ withMongoDbConnection $ do
delete (select ["userID" =: sessionKey] "TRANSACTION_FILE_RECORD")
liftIO $ withMongoDbConnection $ do
delete (select ["transactionID" =: sessionKey] "TRANSACTION_ID_RECORD")
return (Response (encryptDecrypt sessionKey "Successful"))
commitfile :: Ticket -> TransactionFile -> IO()
commitfile (Ticket ticket encryptedTimeout) (TransactionFile decryptedFN sessionKey) = liftIO $ do
putStrLn ("Commiting file: " ++ decryptedFN)
manager <- newManager defaultManagerSettings
let temp_file = encryptDecrypt sessionKey ("TMP~"++ decryptedFN)
let fileName = (FileName ticket encryptedTimeout temp_file)
res <- runClientM (open fileName) (ClientEnv manager (BaseUrl Http dirserverhost (read (dirserverport) :: Int) ""))
case res of
Left err -> putStrLn (show err)
Right (File encryptedFN encryptedFC) -> do
let fn = encryptDecrypt sessionKey encryptedFN
let temp = encryptDecrypt sessionKey temp_file
case (temp == fn) of
False -> putStrLn "Commit Failed"
True -> do
let fileupload = (FileUpload ticket encryptedTimeout (File (encryptDecrypt sessionKey decryptedFN) encryptedFC))
res <- runClientM (TransactionServer.close fileupload) (ClientEnv manager (BaseUrl Http dirserverhost (read (dirserverport) :: Int) ""))
case res of
Left err -> do putStrLn (show err)
Right (Response response) -> do
let uploadresponse = encryptDecrypt sessionKey response
putStrLn uploadresponse
case uploadresponse of
"Success" -> do
res <- runClientM (remove (FileName ticket encryptedTimeout temp_file)) (ClientEnv manager (BaseUrl Http dirserverhost (read (dirserverport) :: Int) ""))
case res of
Left err -> putStrLn (show err)
Right (Response response) -> putStrLn (encryptDecrypt sessionKey response)
_ -> putStrLn "Shouldnt get here"
| Garygunn94/DFS | TransactionServer/src/TransactionServer.hs | bsd-3-clause | 8,739 | 31 | 42 | 2,230 | 2,178 | 1,082 | 1,096 | 173 | 6 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Temperature.IT.Tests
( tests ) where
import Data.String
import Prelude
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Temperature.IT.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "IT Tests"
[ makeCorpusTest [Seal Temperature] corpus
]
| facebookincubator/duckling | tests/Duckling/Temperature/IT/Tests.hs | bsd-3-clause | 515 | 0 | 9 | 77 | 79 | 50 | 29 | 11 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Rad.QL.Define.Schema where
import Data.Monoid ((<>))
import qualified Data.Trie as Trie
import Rad.QL.Internal.Types
import Rad.QL.AST
import Rad.QL.Types
import Rad.QL.Query
defineSchema :: forall m b. (GraphQLType OBJECT m b) => b -> Schema m
defineSchema root = Schema
{ typeDict = collectTypes Trie.empty [tdef]
, rootQuery = res
, rootQueryType = tdef
}
where rdef = def :: GraphQLTypeDef OBJECT m b
tdef = gqlTypeDef rdef
res ss = unpackSub $ (gqlResolve rdef) ss root
unpackSub (SubResult m) = return m
unpackSub (SubResultM m) = m
collectTypes :: TypeDict -> [TypeDef] -> TypeDict
collectTypes seen [] = seen
collectTypes seen (t:ts)
| Trie.member n seen = collectTypes seen ts
| otherwise = collectTypes (Trie.insert n t seen) ts'
where n = typeDefName t
ts' = case t of
TypeDefObject (ObjectTypeDef _ _ ifs fdefs) ->
ts <> [ TypeDefInterface ifdef | ifdef <- ifs ]
<> [ t' | FieldDef _ _ _ _ t' _ <- fdefs ]
<> [ t' | FieldDef _ _ argdefs _ _ _ <- fdefs
, InputValueDef _ _ _ t' _ <- argdefs
]
TypeDefInterface (InterfaceTypeDef _ _ fdefs) ->
ts <> [ t' | FieldDef _ _ _ _ t' _ <- fdefs ]
<> [ t' | FieldDef _ _ argdefs _ _ _ <- fdefs
, InputValueDef _ _ _ t' _ <- argdefs
]
TypeDefUnion (UnionTypeDef _ _ odefs) ->
ts <> [ TypeDefObject o | o <- odefs ]
_ -> ts
| jqyu/bustle-chi | src/Rad/QL/Define/Schema.hs | bsd-3-clause | 1,685 | 0 | 15 | 616 | 561 | 292 | 269 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Milter.Base (
Packet (..)
, getPacket
, getIP
, getKeyVal
, getBody
, negotiate
, accept, discard, hold, reject, continue
) where
import Blaze.ByteString.Builder
import Blaze.ByteString.Builder.Char8
import Control.Applicative
import Control.Monad
import qualified Data.ByteString as X (unpack)
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as BS
import Data.IP
import Data.List (foldl')
import Data.Monoid
import System.IO
----------------------------------------------------------------
accept :: Handle -> IO ()
accept hdl = safePutPacket hdl $ Packet 'a' ""
discard :: Handle -> IO ()
discard hdl = safePutPacket hdl $ Packet 'd' ""
hold :: Handle -> IO ()
hold hdl = safePutPacket hdl $ Packet 't' ""
reject :: Handle -> IO ()
reject hdl = safePutPacket hdl $ Packet 'r' ""
continue :: Handle -> IO ()
continue hdl = safePutPacket hdl $ Packet 'c' ""
----------------------------------------------------------------
data Packet = Packet Char ByteString
getPacket :: Handle -> IO Packet
getPacket hdl = do
n <- fourBytesToInt <$> getNByte hdl 4
Packet <$> getCmd hdl <*> getNByte hdl (n - 1)
putPacket :: Handle -> Packet -> IO ()
putPacket hdl (Packet c bs) = do
let len = BS.length bs + 1
pkt = intToFourBytes len <> fromChar c <> fromByteString bs
BS.hPut hdl $ toByteString pkt
safePutPacket :: Handle -> Packet -> IO ()
safePutPacket hdl pkt = withOpenedHandleDo hdl $ putPacket hdl pkt
withOpenedHandleDo :: Handle -> IO () -> IO ()
withOpenedHandleDo hdl block = do
closed <- hIsClosed hdl
unless closed block
----------------------------------------------------------------
getKeyVal :: ByteString -> (ByteString, ByteString)
getKeyVal bs = (key,val)
where
kv = BS.split '\0' bs
key = kv !! 0
val = kv !! 1
----------------------------------------------------------------
getBody :: ByteString -> ByteString
getBody = BS.init -- removing the last '\0'
----------------------------------------------------------------
getIP :: ByteString -> IP
getIP bs
| fam == '4' = IPv4 . read $ adr
| otherwise = IPv6 . read $ adr
where
ip = BS.split '\0' bs !! 1
fam = BS.head ip
adr = BS.unpack $ BS.drop 3 ip
----------------------------------------------------------------
negotiate :: Handle -> IO ()
negotiate hdl = putPacket hdl negoPkt -- do NOT use safePutPacket
negoPkt :: Packet
negoPkt = Packet 'O' $ toByteString $ ver <> act <> pro
where
ver = intToFourBytes 2 -- Sendmail 8.13.8, sigh
act = intToFourBytes 0
pro = intToFourBytes noRcpt
noRcpt :: Int
noRcpt = 0x8
{- version 2 does not support, sigh
noUnknown = 0x100
noData = 0x200
-}
----------------------------------------------------------------
getNByte :: Handle -> Int -> IO ByteString
getNByte = BS.hGet
getCmd :: Handle -> IO Char
getCmd hdl = BS.head <$> BS.hGet hdl 1
fourBytesToInt :: ByteString -> Int
fourBytesToInt = foldl' (\a b -> a * 256 + b) 0 . map fromIntegral . X.unpack
intToFourBytes :: Int -> Builder
intToFourBytes = fromInt32be . fromIntegral
{-
moddiv :: Int -> [Int]
moddiv q0 = [r4,r3,r2,r1]
where
(q1,r1) = q0 `divMod` 256
(q2,r2) = q1 `divMod` 256
(q3,r3) = q2 `divMod` 256
r4 = q3 `mod` 256
-}
| kazu-yamamoto/rpf | Milter/Base.hs | bsd-3-clause | 3,325 | 0 | 12 | 648 | 955 | 497 | 458 | 77 | 1 |
{-
Gifcurry
(C) 2017 David Lettier
lettier.com
-}
{-# LANGUAGE
NamedFieldPuns
, DuplicateRecordFields
#-}
module GuiKeyboard where
import Control.Monad
import Data.IORef
import Data.Word
import qualified GI.Gdk
import qualified GI.Gtk
import qualified GuiRecords as GR
import qualified GuiPreview
import GuiMisc
addKeyboardEventHandler
:: GR.GuiComponents
-> IO ()
addKeyboardEventHandler
[email protected]
{ GR.window
}
=
void
$ GI.Gtk.onWidgetKeyPressEvent window
$ keyboardEventHandler guiComponents
keyboardEventHandler
:: GR.GuiComponents
-> GI.Gdk.EventKey
-> IO Bool
keyboardEventHandler
[email protected]
{
}
eventKey
= do
keyValue <- GI.Gdk.getEventKeyKeyval eventKey
let isSeekLeft = isSeekLeftKey keyValue
let isSeekRight = isSeekRightKey keyValue
let isSeek = isSeekLeft || isSeekRight
when isSeek $ handleSeekKeys guiComponents isSeekLeft
return False
handleSeekKeys
:: GR.GuiComponents
-> Bool
-> IO ()
handleSeekKeys
[email protected]
{ GR.startTimeSpinButton
, GR.endTimeSpinButton
, GR.videoPreviewPauseToggleButton
, GR.guiInFilePropertiesRef
, GR.maybeVideoPreviewWidget = (Just _)
, GR.maybePlaybinElement = (Just _)
}
isSeekLeft
= do
(maybePlaybinDuration, maybePlaybinPosition)
<- GuiPreview.getPlaybinDurationAndPosition guiComponents
case (maybePlaybinDuration, maybePlaybinPosition) of
(Just _, Just playbinPosition) -> do
void
$ GI.Gtk.setToggleButtonActive videoPreviewPauseToggleButton True
startTime <-
secondsToNanoseconds
<$> GI.Gtk.spinButtonGetValue startTimeSpinButton
endTime <-
secondsToNanoseconds
<$> GI.Gtk.spinButtonGetValue endTimeSpinButton
GR.GuiInFileProperties
{ GR.inFileFps
} <- readIORef guiInFilePropertiesRef
let fps = if inFileFps <= 0 then 1 else inFileFps
let inc = doubleToInt64 $ (1 / fps) * nanosecondsInASecond
let seekTo = if isSeekLeft then playbinPosition - inc else playbinPosition + inc
let seekTo'
| seekTo >= endTime = endTime
| seekTo <= startTime = startTime
| otherwise = seekTo
GuiPreview.seekPlaybinElement
guiComponents
(Just seekTo')
(Just endTime)
_ -> return ()
handleSeekKeys _ _ = return ()
isSeekLeftKey
:: Word32
-> Bool
isSeekLeftKey GI.Gdk.KEY_less = True
isSeekLeftKey _ = False
isSeekRightKey
:: Word32
-> Bool
isSeekRightKey GI.Gdk.KEY_greater = True
isSeekRightKey _ = False
| lettier/gifcurry | src/gui/GuiKeyboard.hs | bsd-3-clause | 2,707 | 0 | 18 | 676 | 632 | 321 | 311 | 88 | 4 |
{-
This file is part of the Haskell package distinfo. It is subject to
the license terms in the LICENSE file found in the top-level directory
of this distribution and at git://pmade.com/distinfo/LICENSE. No part
of the distinfo package, including this file, may be copied, modified,
propagated, or distributed except according to the terms contained in
the LICENSE file.
-}
--------------------------------------------------------------------------------
module DistInfo (module Export) where
--------------------------------------------------------------------------------
import DistInfo.Server as Export
import DistInfo.Node as Export
| devalot/distinfo | src/DistInfo.hs | bsd-3-clause | 644 | 0 | 4 | 80 | 27 | 20 | 7 | 3 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{- |
Core types for content.
The whole site is a list of categories ('Category'). Categories have items
('Item') in them. Items have some sections (fields inside of 'Item'), as well
as traits ('Trait').
It is recommended to use lenses to access fields of various types. All those
lenses are exported from this module. Some lenses (like 'uid' and 'name') are
overloaded and can be used with many types.
-}
module Guide.Types.Core
(
Trait(..),
ItemKind(..),
hackageName,
ItemSection(..),
Item(..),
pros,
prosDeleted,
cons,
consDeleted,
ecosystem,
link,
kind,
Hue(..),
hueToDarkColor,
hueToLightColor,
CategoryStatus(..),
Category(..),
title,
status,
enabledSections,
groups,
items,
itemsDeleted,
categorySlug,
-- * Overloaded things
uid,
hasUid,
content,
name,
description,
notes,
created,
group_,
)
where
import Imports
-- Text
import qualified Data.Text.All as T
-- Containers
import qualified Data.Set as S
-- JSON
import qualified Data.Aeson as A
-- acid-state
import Data.SafeCopy hiding (kind)
import Data.SafeCopy.Migrate
import Guide.Markdown
import Guide.Utils
import Guide.Types.Hue
----------------------------------------------------------------------------
-- General notes on code
----------------------------------------------------------------------------
{-
If you want to add a field to one of the types, see Note [extending types].
For an explanation of deriveSafeCopySorted, see Note [acid-state].
-}
----------------------------------------------------------------------------
-- Trait
----------------------------------------------------------------------------
-- | A trait (pro or con). Traits are stored in items.
data Trait = Trait {
_traitUid :: Uid Trait,
_traitContent :: MarkdownInline }
deriving (Show, Generic, Data)
deriveSafeCopySorted 4 'extension ''Trait
makeFields ''Trait
changelog ''Trait (Current 4, Past 3) []
deriveSafeCopySorted 3 'base ''Trait_v3
instance A.ToJSON Trait where
toJSON = A.genericToJSON A.defaultOptions {
A.fieldLabelModifier = over _head toLower . drop (T.length "_trait") }
----------------------------------------------------------------------------
-- Item
----------------------------------------------------------------------------
-- | Kind of an item (items can be libraries, tools, etc).
data ItemKind
= Library (Maybe Text) -- Hackage name
| Tool (Maybe Text) -- Hackage name
| Other
deriving (Eq, Show, Generic, Data)
deriveSafeCopySimple 3 'extension ''ItemKind
hackageName :: Traversal' ItemKind (Maybe Text)
hackageName f (Library x) = Library <$> f x
hackageName f (Tool x) = Tool <$> f x
hackageName _ Other = pure Other
instance A.ToJSON ItemKind where
toJSON (Library x) = A.object [
"tag" A..= ("Library" :: Text),
"contents" A..= x ]
toJSON (Tool x) = A.object [
"tag" A..= ("Tool" :: Text),
"contents" A..= x ]
toJSON Other = A.object [
"tag" A..= ("Other" :: Text) ]
data ItemKind_v2
= Library_v2 (Maybe Text)
| Tool_v2 (Maybe Text)
| Other_v2
-- TODO: at the next migration change this to deriveSafeCopySimple!
deriveSafeCopy 2 'base ''ItemKind_v2
instance Migrate ItemKind where
type MigrateFrom ItemKind = ItemKind_v2
migrate (Library_v2 x) = Library x
migrate (Tool_v2 x) = Tool x
migrate Other_v2 = Other
-- | Different kinds of sections inside items. This type is only used for
-- '_categoryEnabledSections'.
data ItemSection
= ItemProsConsSection
| ItemEcosystemSection
| ItemNotesSection
deriving (Eq, Ord, Show, Generic, Data)
deriveSafeCopySimple 0 'base ''ItemSection
instance A.ToJSON ItemSection where
toJSON = A.genericToJSON A.defaultOptions
-- TODO: add a field like “people to ask on IRC about this library if you
-- need help”
-- | An item (usually a library). Items are stored in categories.
data Item = Item {
_itemUid :: Uid Item, -- ^ Item ID
_itemName :: Text, -- ^ Item title
_itemCreated :: UTCTime, -- ^ When the item was created
_itemGroup_ :: Maybe Text, -- ^ Item group (affects item's color)
_itemDescription :: MarkdownBlock, -- ^ Item summary
_itemPros :: [Trait], -- ^ Pros (positive traits)
_itemProsDeleted :: [Trait], -- ^ Deleted pros go here (so that
-- it'd be easy to restore them)
_itemCons :: [Trait], -- ^ Cons (negative traits)
_itemConsDeleted :: [Trait], -- ^ Deleted cons go here
_itemEcosystem :: MarkdownBlock, -- ^ The ecosystem section
_itemNotes :: MarkdownTree, -- ^ The notes section
_itemLink :: Maybe Url, -- ^ Link to homepage or something
_itemKind :: ItemKind -- ^ Is it a library, tool, etc
}
deriving (Show, Generic, Data)
deriveSafeCopySorted 11 'extension ''Item
makeFields ''Item
changelog ''Item (Current 11, Past 10) []
deriveSafeCopySorted 10 'base ''Item_v10
instance A.ToJSON Item where
toJSON = A.genericToJSON A.defaultOptions {
A.fieldLabelModifier = over _head toLower . drop (T.length "_item") }
----------------------------------------------------------------------------
-- Category
----------------------------------------------------------------------------
-- | Category status
data CategoryStatus
= CategoryStub -- ^ “Stub” = just created
| CategoryWIP -- ^ “WIP” = work in progress
| CategoryFinished -- ^ “Finished” = complete or nearly complete
deriving (Eq, Show, Generic, Data)
deriveSafeCopySimple 2 'extension ''CategoryStatus
instance A.ToJSON CategoryStatus where
toJSON = A.genericToJSON A.defaultOptions
data CategoryStatus_v1
= CategoryStub_v1
| CategoryWIP_v1
| CategoryMostlyDone_v1
| CategoryFinished_v1
deriveSafeCopySimple 1 'base ''CategoryStatus_v1
instance Migrate CategoryStatus where
type MigrateFrom CategoryStatus = CategoryStatus_v1
migrate CategoryStub_v1 = CategoryStub
migrate CategoryWIP_v1 = CategoryWIP
migrate CategoryMostlyDone_v1 = CategoryFinished
migrate CategoryFinished_v1 = CategoryFinished
-- | A category
data Category = Category {
_categoryUid :: Uid Category,
_categoryTitle :: Text,
-- | When the category was created
_categoryCreated :: UTCTime,
-- | The “grandcategory” of the category (“meta”, “basics”, etc)
_categoryGroup_ :: Text,
_categoryStatus :: CategoryStatus,
_categoryNotes :: MarkdownBlock,
-- | Items stored in the category
_categoryItems :: [Item],
-- | Items that were deleted from the category. We keep them here to make
-- it easier to restore them
_categoryItemsDeleted :: [Item],
-- | Enabled sections in this category. E.g, if this set contains
-- 'ItemNotesSection', then notes will be shown for each item
_categoryEnabledSections :: Set ItemSection,
-- | All groups of items belonging to the category, as well as their
-- colors. Storing colors explicitly lets us keep colors consistent when
-- all items in a group are deleted
_categoryGroups :: Map Text Hue
}
deriving (Show, Generic, Data)
deriveSafeCopySorted 11 'extension ''Category
makeFields ''Category
changelog ''Category (Current 11, Past 10)
[Removed "_categoryProsConsEnabled" [t|Bool|],
Removed "_categoryEcosystemEnabled" [t|Bool|],
Removed "_categoryNotesEnabled" [t|Bool|],
Added "_categoryEnabledSections" [hs|
S.fromList $ concat
[ [ItemProsConsSection | _categoryProsConsEnabled]
, [ItemEcosystemSection | _categoryEcosystemEnabled]
, [ItemNotesSection | _categoryNotesEnabled] ] |] ]
deriveSafeCopySorted 10 'extension ''Category_v10
changelog ''Category (Past 10, Past 9)
[Added "_categoryNotesEnabled" [hs|True|]]
deriveSafeCopySorted 9 'extension ''Category_v9
changelog ''Category (Past 9, Past 8) []
deriveSafeCopySorted 8 'base ''Category_v8
instance A.ToJSON Category where
toJSON = A.genericToJSON A.defaultOptions {
A.fieldLabelModifier = over _head toLower . drop (T.length "_category") }
-- | Category identifier (used in URLs). E.g. for a category with title
-- “Performance optimization” and UID “t3c9hwzo” the slug would be
-- @performance-optimization-t3c9hwzo@.
categorySlug :: Category -> Text
categorySlug category =
format "{}-{}" (makeSlug (category^.title)) (category^.uid)
----------------------------------------------------------------------------
-- Utils
----------------------------------------------------------------------------
-- | A useful predicate; @hasUid x@ compares given object's UID with @x@.
hasUid :: HasUid a (Uid u) => Uid u -> a -> Bool
hasUid u x = x^.uid == u
| aelve/hslibs | src/Guide/Types/Core.hs | bsd-3-clause | 9,043 | 0 | 13 | 1,793 | 1,669 | 942 | 727 | -1 | -1 |
-- | Testing what work is shared by GHC. In general you can't assume
-- work will be shared. Sometimes GHC will do CSE and top level
-- floating to share work but not for say papLots which really requires
-- some partial evalutation kind of work. (e.g Max's super evaluator)
module Main where
papLots :: [Double] -> Double -> Double
papLots xs n = n * sum'
where sum' = foldl ((+) . cos . tan . cos . tan . cos . sin . cos) 0 xs
main :: IO ()
main = do
let papShared = papLots [1..5000000]
print "Starting..."
print "First run..."
print $ papLots [1..5000000] 2
print "Second run..."
print $ papLots [1..5000000] 4
print "Third run..."
print $ papShared 3
print "Fourth run..."
print $ papShared 5
| dterei/Scraps | haskell/PapWorkShare.hs | bsd-3-clause | 745 | 0 | 15 | 180 | 201 | 98 | 103 | 16 | 1 |
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__
{-# LANGUAGE MagicHash, UnboxedTuples #-}
#endif
{-# OPTIONS_HADDOCK prune #-}
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE Trustworthy #-}
#endif
-- |
-- Module : Data.ByteString.Char8
-- Copyright : (c) Don Stewart 2006-2008
-- (c) Duncan Coutts 2006-2011
-- License : BSD-style
--
-- Maintainer : [email protected], [email protected]
-- Stability : stable
-- Portability : portable
--
-- Manipulate 'ByteString's using 'Char' operations. All Chars will be
-- truncated to 8 bits. It can be expected that these functions will run
-- at identical speeds to their 'Word8' equivalents in "Data.ByteString".
--
-- More specifically these byte strings are taken to be in the
-- subset of Unicode covered by code points 0-255. This covers
-- Unicode Basic Latin, Latin-1 Supplement and C0+C1 Controls.
--
-- See:
--
-- * <http://www.unicode.org/charts/>
--
-- * <http://www.unicode.org/charts/PDF/U0000.pdf>
--
-- * <http://www.unicode.org/charts/PDF/U0080.pdf>
--
-- This module is intended to be imported @qualified@, to avoid name
-- clashes with "Prelude" functions. eg.
--
-- > import qualified Data.ByteString.Char8 as C
--
-- The Char8 interface to bytestrings provides an instance of IsString
-- for the ByteString type, enabling you to use string literals, and
-- have them implicitly packed to ByteStrings.
-- Use @{-\# LANGUAGE OverloadedStrings \#-}@ to enable this.
--
module Data.ByteString.Char8 (
-- * The @ByteString@ type
ByteString, -- abstract, instances: Eq, Ord, Show, Read, Data, Typeable, Monoid
-- * Introducing and eliminating 'ByteString's
empty, -- :: ByteString
singleton, -- :: Char -> ByteString
pack, -- :: String -> ByteString
unpack, -- :: ByteString -> String
-- * Basic interface
cons, -- :: Char -> ByteString -> ByteString
snoc, -- :: ByteString -> Char -> ByteString
append, -- :: ByteString -> ByteString -> ByteString
head, -- :: ByteString -> Char
uncons, -- :: ByteString -> Maybe (Char, ByteString)
unsnoc, -- :: ByteString -> Maybe (ByteString, Char)
last, -- :: ByteString -> Char
tail, -- :: ByteString -> ByteString
init, -- :: ByteString -> ByteString
null, -- :: ByteString -> Bool
length, -- :: ByteString -> Int
-- * Transformating ByteStrings
map, -- :: (Char -> Char) -> ByteString -> ByteString
reverse, -- :: ByteString -> ByteString
intersperse, -- :: Char -> ByteString -> ByteString
intercalate, -- :: ByteString -> [ByteString] -> ByteString
transpose, -- :: [ByteString] -> [ByteString]
-- * Reducing 'ByteString's (folds)
foldl, -- :: (a -> Char -> a) -> a -> ByteString -> a
foldl', -- :: (a -> Char -> a) -> a -> ByteString -> a
foldl1, -- :: (Char -> Char -> Char) -> ByteString -> Char
foldl1', -- :: (Char -> Char -> Char) -> ByteString -> Char
foldr, -- :: (Char -> a -> a) -> a -> ByteString -> a
foldr', -- :: (Char -> a -> a) -> a -> ByteString -> a
foldr1, -- :: (Char -> Char -> Char) -> ByteString -> Char
foldr1', -- :: (Char -> Char -> Char) -> ByteString -> Char
-- ** Special folds
concat, -- :: [ByteString] -> ByteString
concatMap, -- :: (Char -> ByteString) -> ByteString -> ByteString
any, -- :: (Char -> Bool) -> ByteString -> Bool
all, -- :: (Char -> Bool) -> ByteString -> Bool
maximum, -- :: ByteString -> Char
minimum, -- :: ByteString -> Char
-- * Building ByteStrings
-- ** Scans
scanl, -- :: (Char -> Char -> Char) -> Char -> ByteString -> ByteString
scanl1, -- :: (Char -> Char -> Char) -> ByteString -> ByteString
scanr, -- :: (Char -> Char -> Char) -> Char -> ByteString -> ByteString
scanr1, -- :: (Char -> Char -> Char) -> ByteString -> ByteString
-- ** Accumulating maps
mapAccumL, -- :: (acc -> Char -> (acc, Char)) -> acc -> ByteString -> (acc, ByteString)
mapAccumR, -- :: (acc -> Char -> (acc, Char)) -> acc -> ByteString -> (acc, ByteString)
-- ** Generating and unfolding ByteStrings
replicate, -- :: Int -> Char -> ByteString
unfoldr, -- :: (a -> Maybe (Char, a)) -> a -> ByteString
unfoldrN, -- :: Int -> (a -> Maybe (Char, a)) -> a -> (ByteString, Maybe a)
-- * Substrings
-- ** Breaking strings
take, -- :: Int -> ByteString -> ByteString
drop, -- :: Int -> ByteString -> ByteString
splitAt, -- :: Int -> ByteString -> (ByteString, ByteString)
takeWhile, -- :: (Char -> Bool) -> ByteString -> ByteString
dropWhile, -- :: (Char -> Bool) -> ByteString -> ByteString
span, -- :: (Char -> Bool) -> ByteString -> (ByteString, ByteString)
spanEnd, -- :: (Char -> Bool) -> ByteString -> (ByteString, ByteString)
break, -- :: (Char -> Bool) -> ByteString -> (ByteString, ByteString)
breakEnd, -- :: (Char -> Bool) -> ByteString -> (ByteString, ByteString)
group, -- :: ByteString -> [ByteString]
groupBy, -- :: (Char -> Char -> Bool) -> ByteString -> [ByteString]
inits, -- :: ByteString -> [ByteString]
tails, -- :: ByteString -> [ByteString]
-- ** Breaking into many substrings
split, -- :: Char -> ByteString -> [ByteString]
splitWith, -- :: (Char -> Bool) -> ByteString -> [ByteString]
-- ** Breaking into lines and words
lines, -- :: ByteString -> [ByteString]
words, -- :: ByteString -> [ByteString]
unlines, -- :: [ByteString] -> ByteString
unwords, -- :: ByteString -> [ByteString]
-- * Predicates
isPrefixOf, -- :: ByteString -> ByteString -> Bool
isSuffixOf, -- :: ByteString -> ByteString -> Bool
isInfixOf, -- :: ByteString -> ByteString -> Bool
-- ** Search for arbitrary substrings
breakSubstring, -- :: ByteString -> ByteString -> (ByteString,ByteString)
findSubstring, -- :: ByteString -> ByteString -> Maybe Int
findSubstrings, -- :: ByteString -> ByteString -> [Int]
-- * Searching ByteStrings
-- ** Searching by equality
elem, -- :: Char -> ByteString -> Bool
notElem, -- :: Char -> ByteString -> Bool
-- ** Searching with a predicate
find, -- :: (Char -> Bool) -> ByteString -> Maybe Char
filter, -- :: (Char -> Bool) -> ByteString -> ByteString
-- partition -- :: (Char -> Bool) -> ByteString -> (ByteString, ByteString)
-- * Indexing ByteStrings
index, -- :: ByteString -> Int -> Char
elemIndex, -- :: Char -> ByteString -> Maybe Int
elemIndices, -- :: Char -> ByteString -> [Int]
elemIndexEnd, -- :: Char -> ByteString -> Maybe Int
findIndex, -- :: (Char -> Bool) -> ByteString -> Maybe Int
findIndices, -- :: (Char -> Bool) -> ByteString -> [Int]
count, -- :: Char -> ByteString -> Int
-- * Zipping and unzipping ByteStrings
zip, -- :: ByteString -> ByteString -> [(Char,Char)]
zipWith, -- :: (Char -> Char -> c) -> ByteString -> ByteString -> [c]
unzip, -- :: [(Char,Char)] -> (ByteString,ByteString)
-- * Ordered ByteStrings
sort, -- :: ByteString -> ByteString
-- * Reading from ByteStrings
readInt, -- :: ByteString -> Maybe (Int, ByteString)
readInteger, -- :: ByteString -> Maybe (Integer, ByteString)
-- * Low level CString conversions
-- ** Copying ByteStrings
copy, -- :: ByteString -> ByteString
-- ** Packing CStrings and pointers
packCString, -- :: CString -> IO ByteString
packCStringLen, -- :: CStringLen -> IO ByteString
-- ** Using ByteStrings as CStrings
useAsCString, -- :: ByteString -> (CString -> IO a) -> IO a
useAsCStringLen, -- :: ByteString -> (CStringLen -> IO a) -> IO a
-- * I\/O with 'ByteString's
-- | ByteString I/O uses binary mode, without any character decoding
-- or newline conversion. The fact that it does not respect the Handle
-- newline mode is considered a flaw and may be changed in a future version.
-- ** Standard input and output
getLine, -- :: IO ByteString
getContents, -- :: IO ByteString
putStr, -- :: ByteString -> IO ()
putStrLn, -- :: ByteString -> IO ()
interact, -- :: (ByteString -> ByteString) -> IO ()
-- ** Files
readFile, -- :: FilePath -> IO ByteString
writeFile, -- :: FilePath -> ByteString -> IO ()
appendFile, -- :: FilePath -> ByteString -> IO ()
-- mmapFile, -- :: FilePath -> IO ByteString
-- ** I\/O with Handles
hGetLine, -- :: Handle -> IO ByteString
hGetContents, -- :: Handle -> IO ByteString
hGet, -- :: Handle -> Int -> IO ByteString
hGetSome, -- :: Handle -> Int -> IO ByteString
hGetNonBlocking, -- :: Handle -> Int -> IO ByteString
hPut, -- :: Handle -> ByteString -> IO ()
hPutNonBlocking, -- :: Handle -> ByteString -> IO ByteString
hPutStr, -- :: Handle -> ByteString -> IO ()
hPutStrLn, -- :: Handle -> ByteString -> IO ()
) where
import qualified Prelude as P
import Prelude hiding (reverse,head,tail,last,init,null
,length,map,lines,foldl,foldr,unlines
,concat,any,take,drop,splitAt,takeWhile
,dropWhile,span,break,elem,filter,unwords
,words,maximum,minimum,all,concatMap
,scanl,scanl1,scanr,scanr1
,appendFile,readFile,writeFile
,foldl1,foldr1,replicate
,getContents,getLine,putStr,putStrLn,interact
,zip,zipWith,unzip,notElem)
import qualified Data.ByteString as B
import qualified Data.ByteString.Internal as B
import qualified Data.ByteString.Unsafe as B
-- Listy functions transparently exported
import Data.ByteString (empty,null,length,tail,init,append
,inits,tails,reverse,transpose
,concat,take,drop,splitAt,intercalate
,sort,isPrefixOf,isSuffixOf,isInfixOf
,findSubstring,findSubstrings,breakSubstring,copy,group
,getLine, getContents, putStr, interact
,hGetContents, hGet, hGetSome, hPut, hPutStr
,hGetLine, hGetNonBlocking, hPutNonBlocking
,packCString,packCStringLen
,useAsCString,useAsCStringLen
)
import Data.ByteString.Internal
import Data.Char ( isSpace )
import qualified Data.List as List (intersperse)
import System.IO (Handle,stdout,openBinaryFile,hClose,hFileSize,IOMode(..))
#ifndef __NHC__
import Control.Exception (bracket)
#else
import IO (bracket)
#endif
import Foreign
#define STRICT1(f) f a | a `seq` False = undefined
#define STRICT2(f) f a b | a `seq` b `seq` False = undefined
#define STRICT3(f) f a b c | a `seq` b `seq` c `seq` False = undefined
#define STRICT4(f) f a b c d | a `seq` b `seq` c `seq` d `seq` False = undefined
------------------------------------------------------------------------
-- | /O(1)/ Convert a 'Char' into a 'ByteString'
singleton :: Char -> ByteString
singleton = B.singleton . c2w
{-# INLINE singleton #-}
-- | /O(n)/ Convert a 'String' into a 'ByteString'
--
-- For applications with large numbers of string literals, pack can be a
-- bottleneck.
pack :: String -> ByteString
pack = packChars
#if !defined(__GLASGOW_HASKELL__)
{-# INLINE [1] pack #-}
{-# RULES
"ByteString pack/packAddress" forall s .
pack (unpackCString# s) = inlinePerformIO (B.unsafePackAddress s)
#-}
#endif
-- | /O(n)/ Converts a 'ByteString' to a 'String'.
unpack :: ByteString -> [Char]
unpack = B.unpackChars
{-# INLINE unpack #-}
infixr 5 `cons` --same as list (:)
infixl 5 `snoc`
-- | /O(n)/ 'cons' is analogous to (:) for lists, but of different
-- complexity, as it requires a memcpy.
cons :: Char -> ByteString -> ByteString
cons = B.cons . c2w
{-# INLINE cons #-}
-- | /O(n)/ Append a Char to the end of a 'ByteString'. Similar to
-- 'cons', this function performs a memcpy.
snoc :: ByteString -> Char -> ByteString
snoc p = B.snoc p . c2w
{-# INLINE snoc #-}
-- | /O(1)/ Extract the head and tail of a ByteString, returning Nothing
-- if it is empty.
uncons :: ByteString -> Maybe (Char, ByteString)
uncons bs = case B.uncons bs of
Nothing -> Nothing
Just (w, bs') -> Just (w2c w, bs')
{-# INLINE uncons #-}
-- | /O(1)/ Extract the 'init' and 'last' of a ByteString, returning Nothing
-- if it is empty.
unsnoc :: ByteString -> Maybe (ByteString, Char)
unsnoc bs = case B.unsnoc bs of
Nothing -> Nothing
Just (bs', w) -> Just (bs', w2c w)
{-# INLINE unsnoc #-}
-- | /O(1)/ Extract the first element of a ByteString, which must be non-empty.
head :: ByteString -> Char
head = w2c . B.head
{-# INLINE head #-}
-- | /O(1)/ Extract the last element of a packed string, which must be non-empty.
last :: ByteString -> Char
last = w2c . B.last
{-# INLINE last #-}
-- | /O(n)/ 'map' @f xs@ is the ByteString obtained by applying @f@ to each element of @xs@
map :: (Char -> Char) -> ByteString -> ByteString
map f = B.map (c2w . f . w2c)
{-# INLINE map #-}
-- | /O(n)/ The 'intersperse' function takes a Char and a 'ByteString'
-- and \`intersperses\' that Char between the elements of the
-- 'ByteString'. It is analogous to the intersperse function on Lists.
intersperse :: Char -> ByteString -> ByteString
intersperse = B.intersperse . c2w
{-# INLINE intersperse #-}
-- | 'foldl', applied to a binary operator, a starting value (typically
-- the left-identity of the operator), and a ByteString, reduces the
-- ByteString using the binary operator, from left to right.
foldl :: (a -> Char -> a) -> a -> ByteString -> a
foldl f = B.foldl (\a c -> f a (w2c c))
{-# INLINE foldl #-}
-- | 'foldl\'' is like foldl, but strict in the accumulator.
foldl' :: (a -> Char -> a) -> a -> ByteString -> a
foldl' f = B.foldl' (\a c -> f a (w2c c))
{-# INLINE foldl' #-}
-- | 'foldr', applied to a binary operator, a starting value
-- (typically the right-identity of the operator), and a packed string,
-- reduces the packed string using the binary operator, from right to left.
foldr :: (Char -> a -> a) -> a -> ByteString -> a
foldr f = B.foldr (\c a -> f (w2c c) a)
{-# INLINE foldr #-}
-- | 'foldr\'' is a strict variant of foldr
foldr' :: (Char -> a -> a) -> a -> ByteString -> a
foldr' f = B.foldr' (\c a -> f (w2c c) a)
{-# INLINE foldr' #-}
-- | 'foldl1' is a variant of 'foldl' that has no starting value
-- argument, and thus must be applied to non-empty 'ByteStrings'.
foldl1 :: (Char -> Char -> Char) -> ByteString -> Char
foldl1 f ps = w2c (B.foldl1 (\x y -> c2w (f (w2c x) (w2c y))) ps)
{-# INLINE foldl1 #-}
-- | A strict version of 'foldl1'
foldl1' :: (Char -> Char -> Char) -> ByteString -> Char
foldl1' f ps = w2c (B.foldl1' (\x y -> c2w (f (w2c x) (w2c y))) ps)
{-# INLINE foldl1' #-}
-- | 'foldr1' is a variant of 'foldr' that has no starting value argument,
-- and thus must be applied to non-empty 'ByteString's
foldr1 :: (Char -> Char -> Char) -> ByteString -> Char
foldr1 f ps = w2c (B.foldr1 (\x y -> c2w (f (w2c x) (w2c y))) ps)
{-# INLINE foldr1 #-}
-- | A strict variant of foldr1
foldr1' :: (Char -> Char -> Char) -> ByteString -> Char
foldr1' f ps = w2c (B.foldr1' (\x y -> c2w (f (w2c x) (w2c y))) ps)
{-# INLINE foldr1' #-}
-- | Map a function over a 'ByteString' and concatenate the results
concatMap :: (Char -> ByteString) -> ByteString -> ByteString
concatMap f = B.concatMap (f . w2c)
{-# INLINE concatMap #-}
-- | Applied to a predicate and a ByteString, 'any' determines if
-- any element of the 'ByteString' satisfies the predicate.
any :: (Char -> Bool) -> ByteString -> Bool
any f = B.any (f . w2c)
{-# INLINE any #-}
-- | Applied to a predicate and a 'ByteString', 'all' determines if
-- all elements of the 'ByteString' satisfy the predicate.
all :: (Char -> Bool) -> ByteString -> Bool
all f = B.all (f . w2c)
{-# INLINE all #-}
-- | 'maximum' returns the maximum value from a 'ByteString'
maximum :: ByteString -> Char
maximum = w2c . B.maximum
{-# INLINE maximum #-}
-- | 'minimum' returns the minimum value from a 'ByteString'
minimum :: ByteString -> Char
minimum = w2c . B.minimum
{-# INLINE minimum #-}
-- | The 'mapAccumL' function behaves like a combination of 'map' and
-- 'foldl'; it applies a function to each element of a ByteString,
-- passing an accumulating parameter from left to right, and returning a
-- final value of this accumulator together with the new list.
mapAccumL :: (acc -> Char -> (acc, Char)) -> acc -> ByteString -> (acc, ByteString)
mapAccumL f = B.mapAccumL (\acc w -> case f acc (w2c w) of (acc', c) -> (acc', c2w c))
-- | The 'mapAccumR' function behaves like a combination of 'map' and
-- 'foldr'; it applies a function to each element of a ByteString,
-- passing an accumulating parameter from right to left, and returning a
-- final value of this accumulator together with the new ByteString.
mapAccumR :: (acc -> Char -> (acc, Char)) -> acc -> ByteString -> (acc, ByteString)
mapAccumR f = B.mapAccumR (\acc w -> case f acc (w2c w) of (acc', c) -> (acc', c2w c))
-- | 'scanl' is similar to 'foldl', but returns a list of successive
-- reduced values from the left:
--
-- > scanl f z [x1, x2, ...] == [z, z `f` x1, (z `f` x1) `f` x2, ...]
--
-- Note that
--
-- > last (scanl f z xs) == foldl f z xs.
scanl :: (Char -> Char -> Char) -> Char -> ByteString -> ByteString
scanl f z = B.scanl (\a b -> c2w (f (w2c a) (w2c b))) (c2w z)
-- | 'scanl1' is a variant of 'scanl' that has no starting value argument:
--
-- > scanl1 f [x1, x2, ...] == [x1, x1 `f` x2, ...]
scanl1 :: (Char -> Char -> Char) -> ByteString -> ByteString
scanl1 f = B.scanl1 (\a b -> c2w (f (w2c a) (w2c b)))
-- | scanr is the right-to-left dual of scanl.
scanr :: (Char -> Char -> Char) -> Char -> ByteString -> ByteString
scanr f z = B.scanr (\a b -> c2w (f (w2c a) (w2c b))) (c2w z)
-- | 'scanr1' is a variant of 'scanr' that has no starting value argument.
scanr1 :: (Char -> Char -> Char) -> ByteString -> ByteString
scanr1 f = B.scanr1 (\a b -> c2w (f (w2c a) (w2c b)))
-- | /O(n)/ 'replicate' @n x@ is a ByteString of length @n@ with @x@
-- the value of every element. The following holds:
--
-- > replicate w c = unfoldr w (\u -> Just (u,u)) c
--
-- This implemenation uses @memset(3)@
replicate :: Int -> Char -> ByteString
replicate w = B.replicate w . c2w
{-# INLINE replicate #-}
-- | /O(n)/, where /n/ is the length of the result. The 'unfoldr'
-- function is analogous to the List \'unfoldr\'. 'unfoldr' builds a
-- ByteString from a seed value. The function takes the element and
-- returns 'Nothing' if it is done producing the ByteString or returns
-- 'Just' @(a,b)@, in which case, @a@ is the next character in the string,
-- and @b@ is the seed value for further production.
--
-- Examples:
--
-- > unfoldr (\x -> if x <= '9' then Just (x, succ x) else Nothing) '0' == "0123456789"
unfoldr :: (a -> Maybe (Char, a)) -> a -> ByteString
unfoldr f x0 = B.unfoldr (fmap k . f) x0
where k (i, j) = (c2w i, j)
-- | /O(n)/ Like 'unfoldr', 'unfoldrN' builds a ByteString from a seed
-- value. However, the length of the result is limited by the first
-- argument to 'unfoldrN'. This function is more efficient than 'unfoldr'
-- when the maximum length of the result is known.
--
-- The following equation relates 'unfoldrN' and 'unfoldr':
--
-- > unfoldrN n f s == take n (unfoldr f s)
unfoldrN :: Int -> (a -> Maybe (Char, a)) -> a -> (ByteString, Maybe a)
unfoldrN n f w = B.unfoldrN n ((k `fmap`) . f) w
where k (i,j) = (c2w i, j)
{-# INLINE unfoldrN #-}
-- | 'takeWhile', applied to a predicate @p@ and a ByteString @xs@,
-- returns the longest prefix (possibly empty) of @xs@ of elements that
-- satisfy @p@.
takeWhile :: (Char -> Bool) -> ByteString -> ByteString
takeWhile f = B.takeWhile (f . w2c)
{-# INLINE takeWhile #-}
-- | 'dropWhile' @p xs@ returns the suffix remaining after 'takeWhile' @p xs@.
dropWhile :: (Char -> Bool) -> ByteString -> ByteString
dropWhile f = B.dropWhile (f . w2c)
#if defined(__GLASGOW_HASKELL__)
{-# INLINE [1] dropWhile #-}
#endif
{-# RULES
"ByteString specialise dropWhile isSpace -> dropSpace"
dropWhile isSpace = dropSpace
#-}
-- | 'break' @p@ is equivalent to @'span' ('not' . p)@.
break :: (Char -> Bool) -> ByteString -> (ByteString, ByteString)
break f = B.break (f . w2c)
#if defined(__GLASGOW_HASKELL__)
{-# INLINE [1] break #-}
#endif
{-# RULES
"ByteString specialise break (x==)" forall x.
break ((==) x) = breakChar x
"ByteString specialise break (==x)" forall x.
break (==x) = breakChar x
#-}
-- INTERNAL:
-- | 'breakChar' breaks its ByteString argument at the first occurence
-- of the specified char. It is more efficient than 'break' as it is
-- implemented with @memchr(3)@. I.e.
--
-- > break (=='c') "abcd" == breakChar 'c' "abcd"
--
breakChar :: Char -> ByteString -> (ByteString, ByteString)
breakChar c p = case elemIndex c p of
Nothing -> (p,empty)
Just n -> (B.unsafeTake n p, B.unsafeDrop n p)
{-# INLINE breakChar #-}
-- | 'span' @p xs@ breaks the ByteString into two segments. It is
-- equivalent to @('takeWhile' p xs, 'dropWhile' p xs)@
span :: (Char -> Bool) -> ByteString -> (ByteString, ByteString)
span f = B.span (f . w2c)
{-# INLINE span #-}
-- | 'spanEnd' behaves like 'span' but from the end of the 'ByteString'.
-- We have
--
-- > spanEnd (not.isSpace) "x y z" == ("x y ","z")
--
-- and
--
-- > spanEnd (not . isSpace) ps
-- > ==
-- > let (x,y) = span (not.isSpace) (reverse ps) in (reverse y, reverse x)
--
spanEnd :: (Char -> Bool) -> ByteString -> (ByteString, ByteString)
spanEnd f = B.spanEnd (f . w2c)
{-# INLINE spanEnd #-}
-- | 'breakEnd' behaves like 'break' but from the end of the 'ByteString'
--
-- breakEnd p == spanEnd (not.p)
breakEnd :: (Char -> Bool) -> ByteString -> (ByteString, ByteString)
breakEnd f = B.breakEnd (f . w2c)
{-# INLINE breakEnd #-}
{-
-- | 'breakChar' breaks its ByteString argument at the first occurence
-- of the specified Char. It is more efficient than 'break' as it is
-- implemented with @memchr(3)@. I.e.
--
-- > break (=='c') "abcd" == breakChar 'c' "abcd"
--
breakChar :: Char -> ByteString -> (ByteString, ByteString)
breakChar = B.breakByte . c2w
{-# INLINE breakChar #-}
-- | 'spanChar' breaks its ByteString argument at the first
-- occurence of a Char other than its argument. It is more efficient
-- than 'span (==)'
--
-- > span (=='c') "abcd" == spanByte 'c' "abcd"
--
spanChar :: Char -> ByteString -> (ByteString, ByteString)
spanChar = B.spanByte . c2w
{-# INLINE spanChar #-}
-}
-- | /O(n)/ Break a 'ByteString' into pieces separated by the byte
-- argument, consuming the delimiter. I.e.
--
-- > split '\n' "a\nb\nd\ne" == ["a","b","d","e"]
-- > split 'a' "aXaXaXa" == ["","X","X","X",""]
-- > split 'x' "x" == ["",""]
--
-- and
--
-- > intercalate [c] . split c == id
-- > split == splitWith . (==)
--
-- As for all splitting functions in this library, this function does
-- not copy the substrings, it just constructs new 'ByteStrings' that
-- are slices of the original.
--
split :: Char -> ByteString -> [ByteString]
split = B.split . c2w
{-# INLINE split #-}
-- | /O(n)/ Splits a 'ByteString' into components delimited by
-- separators, where the predicate returns True for a separator element.
-- The resulting components do not contain the separators. Two adjacent
-- separators result in an empty component in the output. eg.
--
-- > splitWith (=='a') "aabbaca" == ["","","bb","c",""]
--
splitWith :: (Char -> Bool) -> ByteString -> [ByteString]
splitWith f = B.splitWith (f . w2c)
{-# INLINE splitWith #-}
-- the inline makes a big difference here.
{-
-- | Like 'splitWith', except that sequences of adjacent separators are
-- treated as a single separator. eg.
--
-- > tokens (=='a') "aabbaca" == ["bb","c"]
--
tokens :: (Char -> Bool) -> ByteString -> [ByteString]
tokens f = B.tokens (f . w2c)
{-# INLINE tokens #-}
-}
-- | The 'groupBy' function is the non-overloaded version of 'group'.
groupBy :: (Char -> Char -> Bool) -> ByteString -> [ByteString]
groupBy k = B.groupBy (\a b -> k (w2c a) (w2c b))
-- | /O(1)/ 'ByteString' index (subscript) operator, starting from 0.
index :: ByteString -> Int -> Char
index = (w2c .) . B.index
{-# INLINE index #-}
-- | /O(n)/ The 'elemIndex' function returns the index of the first
-- element in the given 'ByteString' which is equal (by memchr) to the
-- query element, or 'Nothing' if there is no such element.
elemIndex :: Char -> ByteString -> Maybe Int
elemIndex = B.elemIndex . c2w
{-# INLINE elemIndex #-}
-- | /O(n)/ The 'elemIndexEnd' function returns the last index of the
-- element in the given 'ByteString' which is equal to the query
-- element, or 'Nothing' if there is no such element. The following
-- holds:
--
-- > elemIndexEnd c xs ==
-- > (-) (length xs - 1) `fmap` elemIndex c (reverse xs)
--
elemIndexEnd :: Char -> ByteString -> Maybe Int
elemIndexEnd = B.elemIndexEnd . c2w
{-# INLINE elemIndexEnd #-}
-- | /O(n)/ The 'elemIndices' function extends 'elemIndex', by returning
-- the indices of all elements equal to the query element, in ascending order.
elemIndices :: Char -> ByteString -> [Int]
elemIndices = B.elemIndices . c2w
{-# INLINE elemIndices #-}
-- | The 'findIndex' function takes a predicate and a 'ByteString' and
-- returns the index of the first element in the ByteString satisfying the predicate.
findIndex :: (Char -> Bool) -> ByteString -> Maybe Int
findIndex f = B.findIndex (f . w2c)
{-# INLINE findIndex #-}
-- | The 'findIndices' function extends 'findIndex', by returning the
-- indices of all elements satisfying the predicate, in ascending order.
findIndices :: (Char -> Bool) -> ByteString -> [Int]
findIndices f = B.findIndices (f . w2c)
-- | count returns the number of times its argument appears in the ByteString
--
-- > count = length . elemIndices
--
-- Also
--
-- > count '\n' == length . lines
--
-- But more efficiently than using length on the intermediate list.
count :: Char -> ByteString -> Int
count c = B.count (c2w c)
-- | /O(n)/ 'elem' is the 'ByteString' membership predicate. This
-- implementation uses @memchr(3)@.
elem :: Char -> ByteString -> Bool
elem c = B.elem (c2w c)
{-# INLINE elem #-}
-- | /O(n)/ 'notElem' is the inverse of 'elem'
notElem :: Char -> ByteString -> Bool
notElem c = B.notElem (c2w c)
{-# INLINE notElem #-}
-- | /O(n)/ 'filter', applied to a predicate and a ByteString,
-- returns a ByteString containing those characters that satisfy the
-- predicate.
filter :: (Char -> Bool) -> ByteString -> ByteString
filter f = B.filter (f . w2c)
{-# INLINE filter #-}
{-
-- | /O(n)/ and /O(n\/c) space/ A first order equivalent of /filter .
-- (==)/, for the common case of filtering a single Char. It is more
-- efficient to use /filterChar/ in this case.
--
-- > filterChar == filter . (==)
--
-- filterChar is around 10x faster, and uses much less space, than its
-- filter equivalent
--
filterChar :: Char -> ByteString -> ByteString
filterChar c ps = replicate (count c ps) c
{-# INLINE filterChar #-}
{-# RULES
"ByteString specialise filter (== x)" forall x.
filter ((==) x) = filterChar x
"ByteString specialise filter (== x)" forall x.
filter (== x) = filterChar x
#-}
-}
-- | /O(n)/ The 'find' function takes a predicate and a ByteString,
-- and returns the first element in matching the predicate, or 'Nothing'
-- if there is no such element.
find :: (Char -> Bool) -> ByteString -> Maybe Char
find f ps = w2c `fmap` B.find (f . w2c) ps
{-# INLINE find #-}
{-
-- | /O(n)/ A first order equivalent of /filter . (==)/, for the common
-- case of filtering a single Char. It is more efficient to use
-- filterChar in this case.
--
-- > filterChar == filter . (==)
--
-- filterChar is around 10x faster, and uses much less space, than its
-- filter equivalent
--
filterChar :: Char -> ByteString -> ByteString
filterChar c = B.filterByte (c2w c)
{-# INLINE filterChar #-}
-- | /O(n)/ A first order equivalent of /filter . (\/=)/, for the common
-- case of filtering a single Char out of a list. It is more efficient
-- to use /filterNotChar/ in this case.
--
-- > filterNotChar == filter . (/=)
--
-- filterNotChar is around 3x faster, and uses much less space, than its
-- filter equivalent
--
filterNotChar :: Char -> ByteString -> ByteString
filterNotChar c = B.filterNotByte (c2w c)
{-# INLINE filterNotChar #-}
-}
-- | /O(n)/ 'zip' takes two ByteStrings and returns a list of
-- corresponding pairs of Chars. If one input ByteString is short,
-- excess elements of the longer ByteString are discarded. This is
-- equivalent to a pair of 'unpack' operations, and so space
-- usage may be large for multi-megabyte ByteStrings
zip :: ByteString -> ByteString -> [(Char,Char)]
zip ps qs
| B.null ps || B.null qs = []
| otherwise = (unsafeHead ps, unsafeHead qs) : zip (B.unsafeTail ps) (B.unsafeTail qs)
-- | 'zipWith' generalises 'zip' by zipping with the function given as
-- the first argument, instead of a tupling function. For example,
-- @'zipWith' (+)@ is applied to two ByteStrings to produce the list
-- of corresponding sums.
zipWith :: (Char -> Char -> a) -> ByteString -> ByteString -> [a]
zipWith f = B.zipWith ((. w2c) . f . w2c)
-- | 'unzip' transforms a list of pairs of Chars into a pair of
-- ByteStrings. Note that this performs two 'pack' operations.
unzip :: [(Char,Char)] -> (ByteString,ByteString)
unzip ls = (pack (P.map fst ls), pack (P.map snd ls))
{-# INLINE unzip #-}
-- | A variety of 'head' for non-empty ByteStrings. 'unsafeHead' omits
-- the check for the empty case, which is good for performance, but
-- there is an obligation on the programmer to provide a proof that the
-- ByteString is non-empty.
unsafeHead :: ByteString -> Char
unsafeHead = w2c . B.unsafeHead
{-# INLINE unsafeHead #-}
-- ---------------------------------------------------------------------
-- Things that depend on the encoding
{-# RULES
"ByteString specialise break -> breakSpace"
break isSpace = breakSpace
#-}
-- | 'breakSpace' returns the pair of ByteStrings when the argument is
-- broken at the first whitespace byte. I.e.
--
-- > break isSpace == breakSpace
--
breakSpace :: ByteString -> (ByteString,ByteString)
breakSpace (PS x s l) = inlinePerformIO $ withForeignPtr x $ \p -> do
i <- firstspace (p `plusPtr` s) 0 l
return $! case () of {_
| i == 0 -> (empty, PS x s l)
| i == l -> (PS x s l, empty)
| otherwise -> (PS x s i, PS x (s+i) (l-i))
}
{-# INLINE breakSpace #-}
firstspace :: Ptr Word8 -> Int -> Int -> IO Int
STRICT3(firstspace)
firstspace ptr n m
| n >= m = return n
| otherwise = do w <- peekByteOff ptr n
if (not . isSpaceWord8) w then firstspace ptr (n+1) m else return n
-- | 'dropSpace' efficiently returns the 'ByteString' argument with
-- white space Chars removed from the front. It is more efficient than
-- calling dropWhile for removing whitespace. I.e.
--
-- > dropWhile isSpace == dropSpace
--
dropSpace :: ByteString -> ByteString
dropSpace (PS x s l) = inlinePerformIO $ withForeignPtr x $ \p -> do
i <- firstnonspace (p `plusPtr` s) 0 l
return $! if i == l then empty else PS x (s+i) (l-i)
{-# INLINE dropSpace #-}
firstnonspace :: Ptr Word8 -> Int -> Int -> IO Int
STRICT3(firstnonspace)
firstnonspace ptr n m
| n >= m = return n
| otherwise = do w <- peekElemOff ptr n
if isSpaceWord8 w then firstnonspace ptr (n+1) m else return n
{-
-- | 'dropSpaceEnd' efficiently returns the 'ByteString' argument with
-- white space removed from the end. I.e.
--
-- > reverse . (dropWhile isSpace) . reverse == dropSpaceEnd
--
-- but it is more efficient than using multiple reverses.
--
dropSpaceEnd :: ByteString -> ByteString
dropSpaceEnd (PS x s l) = inlinePerformIO $ withForeignPtr x $ \p -> do
i <- lastnonspace (p `plusPtr` s) (l-1)
return $! if i == (-1) then empty else PS x s (i+1)
{-# INLINE dropSpaceEnd #-}
lastnonspace :: Ptr Word8 -> Int -> IO Int
STRICT2(lastnonspace)
lastnonspace ptr n
| n < 0 = return n
| otherwise = do w <- peekElemOff ptr n
if isSpaceWord8 w then lastnonspace ptr (n-1) else return n
-}
-- | 'lines' breaks a ByteString up into a list of ByteStrings at
-- newline Chars. The resulting strings do not contain newlines.
--
lines :: ByteString -> [ByteString]
lines ps
| null ps = []
| otherwise = case search ps of
Nothing -> [ps]
Just n -> take n ps : lines (drop (n+1) ps)
where search = elemIndex '\n'
{-
-- Just as fast, but more complex. Should be much faster, I thought.
lines :: ByteString -> [ByteString]
lines (PS _ _ 0) = []
lines (PS x s l) = inlinePerformIO $ withForeignPtr x $ \p -> do
let ptr = p `plusPtr` s
STRICT1(loop)
loop n = do
let q = memchr (ptr `plusPtr` n) 0x0a (fromIntegral (l-n))
if q == nullPtr
then return [PS x (s+n) (l-n)]
else do let i = q `minusPtr` ptr
ls <- loop (i+1)
return $! PS x (s+n) (i-n) : ls
loop 0
-}
-- | 'unlines' is an inverse operation to 'lines'. It joins lines,
-- after appending a terminating newline to each.
unlines :: [ByteString] -> ByteString
unlines [] = empty
unlines ss = (concat $ List.intersperse nl ss) `append` nl -- half as much space
where nl = singleton '\n'
-- | 'words' breaks a ByteString up into a list of words, which
-- were delimited by Chars representing white space.
words :: ByteString -> [ByteString]
words = P.filter (not . B.null) . B.splitWith isSpaceWord8
{-# INLINE words #-}
-- | The 'unwords' function is analogous to the 'unlines' function, on words.
unwords :: [ByteString] -> ByteString
unwords = intercalate (singleton ' ')
{-# INLINE unwords #-}
-- ---------------------------------------------------------------------
-- Reading from ByteStrings
-- | readInt reads an Int from the beginning of the ByteString. If there is no
-- integer at the beginning of the string, it returns Nothing, otherwise
-- it just returns the int read, and the rest of the string.
readInt :: ByteString -> Maybe (Int, ByteString)
readInt as
| null as = Nothing
| otherwise =
case unsafeHead as of
'-' -> loop True 0 0 (B.unsafeTail as)
'+' -> loop False 0 0 (B.unsafeTail as)
_ -> loop False 0 0 as
where loop :: Bool -> Int -> Int -> ByteString -> Maybe (Int, ByteString)
STRICT4(loop)
loop neg i n ps
| null ps = end neg i n ps
| otherwise =
case B.unsafeHead ps of
w | w >= 0x30
&& w <= 0x39 -> loop neg (i+1)
(n * 10 + (fromIntegral w - 0x30))
(B.unsafeTail ps)
| otherwise -> end neg i n ps
end _ 0 _ _ = Nothing
end True _ n ps = Just (negate n, ps)
end _ _ n ps = Just (n, ps)
-- | readInteger reads an Integer from the beginning of the ByteString. If
-- there is no integer at the beginning of the string, it returns Nothing,
-- otherwise it just returns the int read, and the rest of the string.
readInteger :: ByteString -> Maybe (Integer, ByteString)
readInteger as
| null as = Nothing
| otherwise =
case unsafeHead as of
'-' -> first (B.unsafeTail as) >>= \(n, bs) -> return (-n, bs)
'+' -> first (B.unsafeTail as)
_ -> first as
where first ps | null ps = Nothing
| otherwise =
case B.unsafeHead ps of
w | w >= 0x30 && w <= 0x39 -> Just $
loop 1 (fromIntegral w - 0x30) [] (B.unsafeTail ps)
| otherwise -> Nothing
loop :: Int -> Int -> [Integer]
-> ByteString -> (Integer, ByteString)
STRICT4(loop)
loop d acc ns ps
| null ps = combine d acc ns empty
| otherwise =
case B.unsafeHead ps of
w | w >= 0x30 && w <= 0x39 ->
if d == 9 then loop 1 (fromIntegral w - 0x30)
(toInteger acc : ns)
(B.unsafeTail ps)
else loop (d+1)
(10*acc + (fromIntegral w - 0x30))
ns (B.unsafeTail ps)
| otherwise -> combine d acc ns ps
combine _ acc [] ps = (toInteger acc, ps)
combine d acc ns ps =
((10^d * combine1 1000000000 ns + toInteger acc), ps)
combine1 _ [n] = n
combine1 b ns = combine1 (b*b) $ combine2 b ns
combine2 b (n:m:ns) = let t = m*b + n in t `seq` (t : combine2 b ns)
combine2 _ ns = ns
------------------------------------------------------------------------
-- For non-binary text processing:
-- | Read an entire file strictly into a 'ByteString'. This is far more
-- efficient than reading the characters into a 'String' and then using
-- 'pack'. It also may be more efficient than opening the file and
-- reading it using hGet.
readFile :: FilePath -> IO ByteString
readFile f = bracket (openBinaryFile f ReadMode) hClose
(\h -> hFileSize h >>= hGet h . fromIntegral)
-- | Write a 'ByteString' to a file.
writeFile :: FilePath -> ByteString -> IO ()
writeFile f txt = bracket (openBinaryFile f WriteMode) hClose
(\h -> hPut h txt)
-- | Append a 'ByteString' to a file.
appendFile :: FilePath -> ByteString -> IO ()
appendFile f txt = bracket (openBinaryFile f AppendMode) hClose
(\h -> hPut h txt)
-- | Write a ByteString to a handle, appending a newline byte
hPutStrLn :: Handle -> ByteString -> IO ()
hPutStrLn h ps
| length ps < 1024 = hPut h (ps `B.snoc` 0x0a)
| otherwise = hPut h ps >> hPut h (B.singleton (0x0a)) -- don't copy
-- | Write a ByteString to stdout, appending a newline byte
putStrLn :: ByteString -> IO ()
putStrLn = hPutStrLn stdout
| markflorisson/hpack | testrepo/bytestring-0.10.2.0/Data/ByteString/Char8.hs | bsd-3-clause | 40,569 | 0 | 18 | 11,317 | 6,130 | 3,496 | 2,634 | -1 | -1 |
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE TemplateHaskell #-}
module Language.LSP.Types.SelectionRange where
import Data.Aeson.TH
import Language.LSP.Types.Common
import Language.LSP.Types.Location
import Language.LSP.Types.Progress
import Language.LSP.Types.StaticRegistrationOptions
import Language.LSP.Types.TextDocument
import Language.LSP.Types.Utils
data SelectionRangeClientCapabilities = SelectionRangeClientCapabilities
{ -- | Whether implementation supports dynamic registration for selection range providers. If this is set to 'True'
-- the client supports the new 'SelectionRangeRegistrationOptions' return value for the corresponding server
-- capability as well.
_dynamicRegistration :: Maybe Bool
}
deriving (Read, Show, Eq)
deriveJSON lspOptions ''SelectionRangeClientCapabilities
makeExtendingDatatype "SelectionRangeOptions" [''WorkDoneProgressOptions] []
deriveJSON lspOptions ''SelectionRangeOptions
makeExtendingDatatype
"SelectionRangeRegistrationOptions"
[ ''SelectionRangeOptions,
''TextDocumentRegistrationOptions,
''StaticRegistrationOptions
]
[]
deriveJSON lspOptions ''SelectionRangeRegistrationOptions
makeExtendingDatatype
"SelectionRangeParams"
[ ''WorkDoneProgressParams,
''PartialResultParams
]
[ ("_textDocument", [t|TextDocumentIdentifier|]),
("_positions", [t|List Position|])
]
deriveJSON lspOptions ''SelectionRangeParams
data SelectionRange = SelectionRange
{ -- | The 'range' of this selection range.
_range :: Range,
-- | The parent selection range containing this range. Therefore @parent.range@ must contain @this.range@.
_parent :: Maybe SelectionRange
}
deriving (Read, Show, Eq)
deriveJSON lspOptions ''SelectionRange
| alanz/haskell-lsp | lsp-types/src/Language/LSP/Types/SelectionRange.hs | mit | 1,753 | 0 | 9 | 231 | 275 | 159 | 116 | 35 | 0 |
-- |
-- Description : The convinience module to re-export public definitions
-- Copyright : (c) 2015 Egor Tensin <[email protected]>
-- License : MIT
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : Windows-only
--
-- An empty module to re-export everything required by the packaged
-- applications.
module WindowsEnv
( module WindowsEnv.Environment
) where
import WindowsEnv.Environment
| egor-tensin/windows-env | src/WindowsEnv.hs | mit | 445 | 0 | 5 | 78 | 27 | 21 | 6 | 3 | 0 |
module Data.Wright.Types where
import Data.Vector (Vector(..), vmap)
data XYZ t = XYZ t t t deriving (Show)
data LAB t = LAB t t t deriving (Show)
data RGB t = RGB t t t deriving (Show)
data Yxy t = Yxy t t t deriving (Show) -- "xyY"
type ℝ = Double
data Gamma = Gamma ℝ | LStar | SRGB
data Application = Graphics | Textiles --CIE94
data Model = Model
{ gamma :: Gamma
, white :: XYZ ℝ
, red :: Primary
, green :: Primary
, blue :: Primary
}
data Primary = Primary
{ x :: ℝ
, y :: ℝ
, z :: ℝ
}
instance Vector XYZ where
toVector (XYZ x y z) = (x, y, z)
fromVector = uncurry3 XYZ
instance Functor XYZ where
fmap = vmap
instance Vector LAB where
toVector (LAB l a b) = (l, a, b)
fromVector = uncurry3 LAB
instance Functor LAB where
fmap = vmap
instance Vector RGB where
toVector (RGB r g b) = (r, g, b)
fromVector = uncurry3 RGB
instance Functor RGB where
fmap = vmap
instance Vector Yxy where
toVector (Yxy y' x y) = (y', x, y)
fromVector = uncurry3 Yxy
instance Functor Yxy where
fmap = vmap
uncurry3 :: (a -> b -> c -> d) -> (a, b, c) -> d
uncurry3 f (a, b, c) = f a b c
| fmap-archive/wright | src/Data/Wright/Types.hs | mit | 1,149 | 0 | 9 | 298 | 497 | 283 | 214 | 41 | 1 |
module System.LookupEnv (lookupEnv) where
import System.Environment (getEnvironment)
lookupEnv :: String -> IO (Maybe String)
lookupEnv envVar = fmap (lookup envVar) $ getEnvironment
| yesodweb/clientsession | src/System/LookupEnv.hs | mit | 185 | 0 | 8 | 24 | 60 | 32 | 28 | 4 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Perform a build
module Stack.Build.Execute
( printPlan
, preFetch
, executePlan
-- * Running Setup.hs
, ExecuteEnv
, withExecuteEnv
, withSingleContext
) where
import Control.Applicative
import Control.Arrow ((&&&))
import Control.Concurrent.Async (withAsync, wait)
import Control.Concurrent.Execute
import Control.Concurrent.MVar.Lifted
import Control.Concurrent.STM
import Control.Exception.Enclosed (catchIO, tryIO)
import Control.Exception.Lifted
import Control.Monad (liftM, when, unless, void, join, filterM, (<=<))
import Control.Monad.Catch (MonadCatch, MonadMask)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Control (liftBaseWith)
import Control.Monad.Trans.Resource
import Data.Attoparsec.Text
import qualified Data.ByteString as S
import Data.Conduit
import qualified Data.Conduit.Binary as CB
import qualified Data.Conduit.List as CL
import qualified Data.Conduit.Text as CT
import Data.Either (isRight)
import Data.Foldable (forM_, any)
import Data.Function
import Data.IORef.RunOnce (runOnce)
import Data.List hiding (any)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Maybe
import Data.Maybe.Extra (forMaybeM)
import Data.Monoid ((<>))
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Data.Streaming.Process as Process
import Data.Streaming.Process hiding (callProcess, env)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8)
import Data.Time.Clock (getCurrentTime)
import Data.Traversable (forM)
import qualified Distribution.PackageDescription as C
import Distribution.System (OS (Windows),
Platform (Platform))
import Language.Haskell.TH as TH (location)
import Network.HTTP.Client.Conduit (HasHttpManager)
import Path
import Path.Extra (toFilePathNoTrailingSep)
import Path.IO
import Prelude hiding (FilePath, writeFile, any)
import Stack.Build.Cache
import Stack.Build.Haddock
import Stack.Build.Installed
import Stack.Build.Source
import Stack.Constants
import Stack.Coverage
import Stack.Fetch as Fetch
import Stack.GhcPkg
import Stack.Package
import Stack.PackageDump
import Stack.Types
import Stack.Types.Internal
import Stack.Types.StackT
import qualified System.Directory as D
import System.Environment (getExecutablePath)
import System.Exit (ExitCode (ExitSuccess))
import qualified System.FilePath as FP
import System.IO
import System.PosixCompat.Files (createLink)
import System.Process.Log (showProcessArgDebug)
import System.Process.Read
import System.Process.Run
#if !MIN_VERSION_process(1,2,1)
import System.Process.Internals (createProcess_)
#endif
type M env m = (MonadIO m,MonadReader env m,HasHttpManager env,HasBuildConfig env,MonadLogger m,MonadBaseControl IO m,MonadCatch m,MonadMask m,HasLogLevel env,HasEnvConfig env,HasTerminal env, HasConfig env)
-- | Fetch the packages necessary for a build, for example in combination with a dry run.
preFetch :: M env m => Plan -> m ()
preFetch plan
| Set.null idents = $logDebug "Nothing to fetch"
| otherwise = do
$logDebug $ T.pack $
"Prefetching: " ++
intercalate ", " (map packageIdentifierString $ Set.toList idents)
menv <- getMinimalEnvOverride
fetchPackages menv idents
where
idents = Set.unions $ map toIdent $ Map.toList $ planTasks plan
toIdent (name, task) =
case taskType task of
TTLocal _ -> Set.empty
TTUpstream package _ -> Set.singleton $ PackageIdentifier
name
(packageVersion package)
-- | Print a description of build plan for human consumption.
printPlan :: M env m
=> Plan
-> m ()
printPlan plan = do
case Map.elems $ planUnregisterLocal plan of
[] -> $logInfo "No packages would be unregistered."
xs -> do
$logInfo "Would unregister locally:"
forM_ xs $ \(ident, mreason) -> $logInfo $ T.concat
[ T.pack $ packageIdentifierString ident
, case mreason of
Nothing -> ""
Just reason -> T.concat
[ " ("
, reason
, ")"
]
]
$logInfo ""
case Map.elems $ planTasks plan of
[] -> $logInfo "Nothing to build."
xs -> do
$logInfo "Would build:"
mapM_ ($logInfo . displayTask) xs
let hasTests = not . Set.null . testComponents . taskComponents
hasBenches = not . Set.null . benchComponents . taskComponents
tests = Map.elems $ Map.filter hasTests $ planFinals plan
benches = Map.elems $ Map.filter hasBenches $ planFinals plan
unless (null tests) $ do
$logInfo ""
$logInfo "Would test:"
mapM_ ($logInfo . displayTask) tests
unless (null benches) $ do
$logInfo ""
$logInfo "Would benchmark:"
mapM_ ($logInfo . displayTask) benches
$logInfo ""
case Map.toList $ planInstallExes plan of
[] -> $logInfo "No executables to be installed."
xs -> do
$logInfo "Would install executables:"
forM_ xs $ \(name, loc) -> $logInfo $ T.concat
[ name
, " from "
, case loc of
Snap -> "snapshot"
Local -> "local"
, " database"
]
-- | For a dry run
displayTask :: Task -> Text
displayTask task = T.pack $ concat
[ packageIdentifierString $ taskProvides task
, ": database="
, case taskLocation task of
Snap -> "snapshot"
Local -> "local"
, ", source="
, case taskType task of
TTLocal lp -> concat
[ toFilePath $ lpDir lp
]
TTUpstream _ _ -> "package index"
, if Set.null missing
then ""
else ", after: " ++ intercalate "," (map packageIdentifierString $ Set.toList missing)
]
where
missing = tcoMissing $ taskConfigOpts task
data ExecuteEnv = ExecuteEnv
{ eeEnvOverride :: !EnvOverride
, eeConfigureLock :: !(MVar ())
, eeInstallLock :: !(MVar ())
, eeBuildOpts :: !BuildOpts
, eeBaseConfigOpts :: !BaseConfigOpts
, eeGhcPkgIds :: !(TVar (Map PackageIdentifier Installed))
, eeTempDir :: !(Path Abs Dir)
, eeSetupHs :: !(Path Abs File)
-- ^ Temporary Setup.hs for simple builds
, eeSetupExe :: !(Maybe (Path Abs File))
-- ^ Compiled version of eeSetupHs
, eeCabalPkgVer :: !Version
, eeTotalWanted :: !Int
, eeWanted :: !(Set PackageName)
, eeLocals :: ![LocalPackage]
, eeGlobalDB :: !(Path Abs Dir)
, eeGlobalDumpPkgs :: !(Map GhcPkgId (DumpPackage () ()))
, eeSnapshotDumpPkgs :: !(TVar (Map GhcPkgId (DumpPackage () ())))
, eeLocalDumpPkgs :: !(TVar (Map GhcPkgId (DumpPackage () ())))
}
-- | Get a compiled Setup exe
getSetupExe :: M env m
=> Path Abs File -- ^ Setup.hs input file
-> Path Abs Dir -- ^ temporary directory
-> m (Maybe (Path Abs File))
getSetupExe setupHs tmpdir = do
wc <- getWhichCompiler
econfig <- asks getEnvConfig
platformDir <- platformGhcRelDir
let config = getConfig econfig
baseNameS = concat
[ "setup-Simple-Cabal-"
, versionString $ envConfigCabalVersion econfig
, "-"
, compilerVersionString $ envConfigCompilerVersion econfig
]
exeNameS = baseNameS ++
case configPlatform config of
Platform _ Windows -> ".exe"
_ -> ""
outputNameS =
case wc of
Ghc -> exeNameS
Ghcjs -> baseNameS ++ ".jsexe"
jsExeNameS =
baseNameS ++ ".jsexe"
setupDir =
configStackRoot config </>
$(mkRelDir "setup-exe-cache") </>
platformDir
exePath <- fmap (setupDir </>) $ parseRelFile exeNameS
jsExePath <- fmap (setupDir </>) $ parseRelDir jsExeNameS
exists <- liftIO $ D.doesFileExist $ toFilePath exePath
if exists
then return $ Just exePath
else do
tmpExePath <- fmap (setupDir </>) $ parseRelFile $ "tmp-" ++ exeNameS
tmpOutputPath <- fmap (setupDir </>) $ parseRelFile $ "tmp-" ++ outputNameS
tmpJsExePath <- fmap (setupDir </>) $ parseRelDir $ "tmp-" ++ jsExeNameS
liftIO $ D.createDirectoryIfMissing True $ toFilePath setupDir
menv <- getMinimalEnvOverride
let args =
[ "-clear-package-db"
, "-global-package-db"
, "-hide-all-packages"
, "-package"
, "base"
, "-package"
, "Cabal-" ++ versionString (envConfigCabalVersion econfig)
, toFilePath setupHs
, "-o"
, toFilePath tmpOutputPath
] ++
["-build-runner" | wc == Ghcjs]
runCmd' (\cp -> cp { std_out = UseHandle stderr }) (Cmd (Just tmpdir) (compilerExeName wc) menv args) Nothing
when (wc == Ghcjs) $ renameDir tmpJsExePath jsExePath
renameFile tmpExePath exePath
return $ Just exePath
-- | Execute a callback that takes an 'ExecuteEnv'.
withExecuteEnv :: M env m
=> EnvOverride
-> BuildOpts
-> BaseConfigOpts
-> [LocalPackage]
-> [DumpPackage () ()] -- ^ global packages
-> [DumpPackage () ()] -- ^ snapshot packages
-> [DumpPackage () ()] -- ^ local packages
-> (ExecuteEnv -> m a)
-> m a
withExecuteEnv menv bopts baseConfigOpts locals globalPackages snapshotPackages localPackages inner = do
withCanonicalizedSystemTempDirectory stackProgName $ \tmpdir -> do
configLock <- newMVar ()
installLock <- newMVar ()
idMap <- liftIO $ newTVarIO Map.empty
let setupHs = tmpdir </> $(mkRelFile "Setup.hs")
liftIO $ writeFile (toFilePath setupHs) "import Distribution.Simple\nmain = defaultMain"
setupExe <- getSetupExe setupHs tmpdir
cabalPkgVer <- asks (envConfigCabalVersion . getEnvConfig)
globalDB <- getGlobalDB menv =<< getWhichCompiler
snapshotPackagesTVar <- liftIO $ newTVarIO (toDumpPackagesByGhcPkgId snapshotPackages)
localPackagesTVar <- liftIO $ newTVarIO (toDumpPackagesByGhcPkgId localPackages)
inner ExecuteEnv
{ eeEnvOverride = menv
, eeBuildOpts = bopts
-- Uncertain as to why we cannot run configures in parallel. This appears
-- to be a Cabal library bug. Original issue:
-- https://github.com/fpco/stack/issues/84. Ideally we'd be able to remove
-- this.
, eeConfigureLock = configLock
, eeInstallLock = installLock
, eeBaseConfigOpts = baseConfigOpts
, eeGhcPkgIds = idMap
, eeTempDir = tmpdir
, eeSetupHs = setupHs
, eeSetupExe = setupExe
, eeCabalPkgVer = cabalPkgVer
, eeTotalWanted = length $ filter lpWanted locals
, eeWanted = wantedLocalPackages locals
, eeLocals = locals
, eeGlobalDB = globalDB
, eeGlobalDumpPkgs = toDumpPackagesByGhcPkgId globalPackages
, eeSnapshotDumpPkgs = snapshotPackagesTVar
, eeLocalDumpPkgs = localPackagesTVar
}
where
toDumpPackagesByGhcPkgId = Map.fromList . map (\dp -> (dpGhcPkgId dp, dp))
-- | Perform the actual plan
executePlan :: M env m
=> EnvOverride
-> BuildOpts
-> BaseConfigOpts
-> [LocalPackage]
-> [DumpPackage () ()] -- ^ global packages
-> [DumpPackage () ()] -- ^ snapshot packages
-> [DumpPackage () ()] -- ^ local packages
-> InstalledMap
-> Plan
-> m ()
executePlan menv bopts baseConfigOpts locals globalPackages snapshotPackages localPackages installedMap plan = do
withExecuteEnv menv bopts baseConfigOpts locals globalPackages snapshotPackages localPackages (executePlan' installedMap plan)
unless (Map.null $ planInstallExes plan) $ do
snapBin <- (</> bindirSuffix) `liftM` installationRootDeps
localBin <- (</> bindirSuffix) `liftM` installationRootLocal
destDir <- asks $ configLocalBin . getConfig
createTree destDir
destDir' <- liftIO . D.canonicalizePath . toFilePath $ destDir
isInPATH <- liftIO . fmap (any (FP.equalFilePath destDir')) . (mapM D.canonicalizePath <=< filterM D.doesDirectoryExist) $ (envSearchPath menv)
when (not isInPATH) $
$logWarn $ T.concat
[ "Installation path "
, T.pack destDir'
, " not found in PATH environment variable"
]
platform <- asks getPlatform
let ext =
case platform of
Platform _ Windows -> ".exe"
_ -> ""
currExe <- liftIO getExecutablePath -- needed for windows, see below
installed <- forMaybeM (Map.toList $ planInstallExes plan) $ \(name, loc) -> do
let bindir =
case loc of
Snap -> snapBin
Local -> localBin
mfp <- resolveFileMaybe bindir $ T.unpack name ++ ext
case mfp of
Nothing -> do
$logWarn $ T.concat
[ "Couldn't find executable "
, name
, " in directory "
, T.pack $ toFilePath bindir
]
return Nothing
Just file -> do
let destFile = destDir' FP.</> T.unpack name ++ ext
$logInfo $ T.concat
[ "Copying from "
, T.pack $ toFilePath file
, " to "
, T.pack destFile
]
liftIO $ case platform of
Platform _ Windows | FP.equalFilePath destFile currExe ->
windowsRenameCopy (toFilePath file) destFile
_ -> D.copyFile (toFilePath file) destFile
return $ Just (destDir', [T.append name (T.pack ext)])
let destToInstalled = Map.fromListWith (++) installed
unless (Map.null destToInstalled) $ $logInfo ""
forM_ (Map.toList destToInstalled) $ \(dest, executables) -> do
$logInfo $ T.concat
[ "Copied executables to "
, T.pack dest
, ":"]
forM_ executables $ \exe -> $logInfo $ T.append "- " exe
config <- asks getConfig
menv' <- liftIO $ configEnvOverride config EnvSettings
{ esIncludeLocals = True
, esIncludeGhcPackagePath = True
, esStackExe = True
, esLocaleUtf8 = False
}
forM_ (boptsExec bopts) $ \(cmd, args) -> do
$logProcessRun cmd args
callProcess (Cmd Nothing cmd menv' args)
-- | Windows can't write over the current executable. Instead, we rename the
-- current executable to something else and then do the copy.
windowsRenameCopy :: FilePath -> FilePath -> IO ()
windowsRenameCopy src dest = do
D.copyFile src new
D.renameFile dest old
D.renameFile new dest
where
new = dest ++ ".new"
old = dest ++ ".old"
-- | Perform the actual plan (internal)
executePlan' :: M env m
=> InstalledMap
-> Plan
-> ExecuteEnv
-> m ()
executePlan' installedMap0 plan ee@ExecuteEnv {..} = do
when (toCoverage $ boptsTestOpts eeBuildOpts) deleteHpcReports
wc <- getWhichCompiler
cv <- asks $ envConfigCompilerVersion . getEnvConfig
case Map.toList $ planUnregisterLocal plan of
[] -> return ()
ids -> do
localDB <- packageDatabaseLocal
forM_ ids $ \(id', (ident, mreason)) -> do
$logInfo $ T.concat
[ T.pack $ packageIdentifierString ident
, ": unregistering"
, case mreason of
Nothing -> ""
Just reason -> T.concat
[ " ("
, reason
, ")"
]
]
unregisterGhcPkgId eeEnvOverride wc cv localDB id' ident
liftIO $ atomically $ modifyTVar' eeLocalDumpPkgs $ \initMap ->
foldl' (flip Map.delete) initMap $ Map.keys (planUnregisterLocal plan)
-- Yes, we're explicitly discarding result values, which in general would
-- be bad. monad-unlift does this all properly at the type system level,
-- but I don't want to pull it in for this one use case, when we know that
-- stack always using transformer stacks that are safe for this use case.
runInBase <- liftBaseWith $ \run -> return (void . run)
let actions = concatMap (toActions installedMap' runInBase ee) $ Map.elems $ Map.mergeWithKey
(\_ b f -> Just (Just b, Just f))
(fmap (\b -> (Just b, Nothing)))
(fmap (\f -> (Nothing, Just f)))
(planTasks plan)
(planFinals plan)
threads <- asks $ configJobs . getConfig
concurrentTests <- asks $ configConcurrentTests . getConfig
let keepGoing =
case boptsKeepGoing eeBuildOpts of
Just kg -> kg
Nothing -> boptsTests eeBuildOpts || boptsBenchmarks eeBuildOpts
concurrentFinal =
-- TODO it probably makes more sense to use a lock for test suites
-- and just have the execution blocked. Turning off all concurrency
-- on finals based on the --test option doesn't fit in well.
if boptsTests eeBuildOpts
then concurrentTests
else True
terminal <- asks getTerminal
errs <- liftIO $ runActions threads keepGoing concurrentFinal actions $ \doneVar -> do
let total = length actions
loop prev
| prev == total =
runInBase $ $logStickyDone ("Completed " <> T.pack (show total) <> " action(s).")
| otherwise = do
when terminal $ runInBase $
$logSticky ("Progress: " <> T.pack (show prev) <> "/" <> T.pack (show total))
done <- atomically $ do
done <- readTVar doneVar
check $ done /= prev
return done
loop done
if total > 1
then loop 0
else return ()
when (toCoverage $ boptsTestOpts eeBuildOpts) $ do
generateHpcUnifiedReport
generateHpcMarkupIndex
unless (null errs) $ throwM $ ExecutionFailure errs
when (boptsHaddock eeBuildOpts) $ do
snapshotDumpPkgs <- liftIO (readTVarIO eeSnapshotDumpPkgs)
localDumpPkgs <- liftIO (readTVarIO eeLocalDumpPkgs)
generateLocalHaddockIndex eeEnvOverride wc eeBaseConfigOpts localDumpPkgs eeLocals
generateDepsHaddockIndex eeEnvOverride wc eeBaseConfigOpts eeGlobalDumpPkgs snapshotDumpPkgs localDumpPkgs eeLocals
generateSnapHaddockIndex eeEnvOverride wc eeBaseConfigOpts eeGlobalDumpPkgs snapshotDumpPkgs
where
installedMap' = Map.difference installedMap0
$ Map.fromList
$ map (\(ident, _) -> (packageIdentifierName ident, ()))
$ Map.elems
$ planUnregisterLocal plan
toActions :: M env m
=> InstalledMap
-> (m () -> IO ())
-> ExecuteEnv
-> (Maybe Task, Maybe Task) -- build and final
-> [Action]
toActions installedMap runInBase ee (mbuild, mfinal) =
abuild ++ afinal
where
abuild =
case mbuild of
Nothing -> []
Just task@Task {..} ->
[ Action
{ actionId = ActionId taskProvides ATBuild
, actionDeps =
(Set.map (\ident -> ActionId ident ATBuild) (tcoMissing taskConfigOpts))
, actionDo = \ac -> runInBase $ singleBuild runInBase ac ee task installedMap False
}
]
afinal =
case mfinal of
Nothing -> []
Just task@Task {..} ->
(if taskAllInOne then [] else
[Action
{ actionId = ActionId taskProvides ATBuildFinal
, actionDeps = addBuild ATBuild
(Set.map (\ident -> ActionId ident ATBuild) (tcoMissing taskConfigOpts))
, actionDo = \ac -> runInBase $ singleBuild runInBase ac ee task installedMap True
}]) ++
[ Action
{ actionId = ActionId taskProvides ATFinal
, actionDeps = addBuild (if taskAllInOne then ATBuild else ATBuildFinal) Set.empty
, actionDo = \ac -> runInBase $ do
let comps = taskComponents task
tests = testComponents comps
benches = benchComponents comps
unless (Set.null tests) $ do
singleTest runInBase topts (Set.toList tests) ac ee task installedMap
unless (Set.null benches) $ do
singleBench runInBase beopts (Set.toList benches) ac ee task installedMap
}
]
where
addBuild aty =
case mbuild of
Nothing -> id
Just _ -> Set.insert $ ActionId taskProvides aty
bopts = eeBuildOpts ee
topts = boptsTestOpts bopts
beopts = boptsBenchmarkOpts bopts
-- | Generate the ConfigCache
getConfigCache :: MonadIO m
=> ExecuteEnv -> Task -> InstalledMap -> Bool -> Bool
-> m (Map PackageIdentifier GhcPkgId, ConfigCache)
getConfigCache ExecuteEnv {..} Task {..} installedMap enableTest enableBench = do
let extra =
-- We enable tests if the test suite dependencies are already
-- installed, so that we avoid unnecessary recompilation based on
-- cabal_macros.h changes when switching between 'stack build' and
-- 'stack test'. See:
-- https://github.com/commercialhaskell/stack/issues/805
case taskType of
TTLocal lp -> concat
[ ["--enable-tests" | enableTest || (depsPresent installedMap $ lpTestDeps lp)]
, ["--enable-benchmarks" | enableBench || (depsPresent installedMap $ lpBenchDeps lp)]
]
_ -> []
idMap <- liftIO $ readTVarIO eeGhcPkgIds
let getMissing ident =
case Map.lookup ident idMap of
Nothing -> error "singleBuild: invariant violated, missing package ID missing"
Just (Library ident' x) -> assert (ident == ident') $ Just (ident, x)
Just (Executable _) -> Nothing
missing' = Map.fromList $ mapMaybe getMissing $ Set.toList missing
TaskConfigOpts missing mkOpts = taskConfigOpts
opts = mkOpts missing'
allDeps = Set.fromList $ Map.elems missing' ++ Map.elems taskPresent
cache = ConfigCache
{ configCacheOpts = opts
{ coNoDirs = coNoDirs opts ++ map T.unpack extra
}
, configCacheDeps = allDeps
, configCacheComponents =
case taskType of
TTLocal lp -> Set.map renderComponent $ lpComponents lp
TTUpstream _ _ -> Set.empty
, configCacheHaddock =
shouldHaddockPackage eeBuildOpts eeWanted (packageIdentifierName taskProvides)
}
allDepsMap = Map.union missing' taskPresent
return (allDepsMap, cache)
-- | Ensure that the configuration for the package matches what is given
ensureConfig :: M env m
=> ConfigCache -- ^ newConfigCache
-> Path Abs Dir -- ^ package directory
-> ExecuteEnv
-> m () -- ^ announce
-> (Bool -> [String] -> m ()) -- ^ cabal
-> Path Abs File -- ^ .cabal file
-> m Bool
ensureConfig newConfigCache pkgDir ExecuteEnv {..} announce cabal cabalfp = do
newCabalMod <- liftIO (fmap modTime (D.getModificationTime (toFilePath cabalfp)))
needConfig <-
if boptsReconfigure eeBuildOpts
then return True
else do
-- Determine the old and new configuration in the local directory, to
-- determine if we need to reconfigure.
mOldConfigCache <- tryGetConfigCache pkgDir
mOldCabalMod <- tryGetCabalMod pkgDir
return $ fmap configCacheOpts mOldConfigCache /= Just (configCacheOpts newConfigCache)
|| mOldCabalMod /= Just newCabalMod
let ConfigureOpts dirs nodirs = configCacheOpts newConfigCache
when needConfig $ withMVar eeConfigureLock $ \_ -> do
deleteCaches pkgDir
announce
menv <- getMinimalEnvOverride
let programNames =
if eeCabalPkgVer < $(mkVersion "1.22")
then ["ghc", "ghc-pkg"]
else ["ghc", "ghc-pkg", "ghcjs", "ghcjs-pkg"]
exes <- forM programNames $ \name -> do
mpath <- findExecutable menv name
return $ case mpath of
Nothing -> []
Just x -> return $ concat ["--with-", name, "=", toFilePath x]
cabal False $ "configure" : concat
[ concat exes
, dirs
, nodirs
]
writeConfigCache pkgDir newConfigCache
writeCabalMod pkgDir newCabalMod
return needConfig
announceTask :: MonadLogger m => Task -> Text -> m ()
announceTask task x = $logInfo $ T.concat
[ T.pack $ packageIdentifierString $ taskProvides task
, ": "
, x
]
withSingleContext :: M env m
=> (m () -> IO ())
-> ActionContext
-> ExecuteEnv
-> Task
-> Maybe (Map PackageIdentifier GhcPkgId)
-- ^ All dependencies' package ids to provide to Setup.hs. If
-- Nothing, just provide global and snapshot package
-- databases.
-> Maybe String
-> ( Package
-> Path Abs File
-> Path Abs Dir
-> (Bool -> [String] -> m ())
-> (Text -> m ())
-> Bool
-> Maybe (Path Abs File, Handle)
-> m a)
-> m a
withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} mdeps msuffix inner0 =
withPackage $ \package cabalfp pkgDir ->
withLogFile package $ \mlogFile ->
withCabal package pkgDir mlogFile $ \cabal ->
inner0 package cabalfp pkgDir cabal announce console mlogFile
where
announce = announceTask task
wanted =
case taskType of
TTLocal lp -> lpWanted lp
TTUpstream _ _ -> False
console = wanted
&& all (\(ActionId ident _) -> ident == taskProvides) (Set.toList acRemaining)
&& eeTotalWanted == 1
withPackage inner =
case taskType of
TTLocal lp -> inner (lpPackage lp) (lpCabalFile lp) (lpDir lp)
TTUpstream package _ -> do
mdist <- liftM Just distRelativeDir
m <- unpackPackageIdents eeEnvOverride eeTempDir mdist $ Set.singleton taskProvides
case Map.toList m of
[(ident, dir)]
| ident == taskProvides -> do
let name = packageIdentifierName taskProvides
cabalfpRel <- parseRelFile $ packageNameString name ++ ".cabal"
let cabalfp = dir </> cabalfpRel
inner package cabalfp dir
_ -> error $ "withPackage: invariant violated: " ++ show m
withLogFile package inner
| console = inner Nothing
| otherwise = do
logPath <- buildLogPath package msuffix
createTree (parent logPath)
let fp = toFilePath logPath
bracket
(liftIO $ openBinaryFile fp WriteMode)
(liftIO . hClose)
$ \h -> inner (Just (logPath, h))
withCabal package pkgDir mlogFile inner = do
config <- asks getConfig
let envSettings = EnvSettings
{ esIncludeLocals = taskLocation task == Local
, esIncludeGhcPackagePath = False
, esStackExe = False
, esLocaleUtf8 = True
}
menv <- liftIO $ configEnvOverride config envSettings
-- When looking for ghc to build Setup.hs we want to ignore local binaries, see:
-- https://github.com/commercialhaskell/stack/issues/1052
menvWithoutLocals <- liftIO $ configEnvOverride config envSettings { esIncludeLocals = False }
getGhcPath <- runOnce $ liftIO $ join $ findExecutable menvWithoutLocals "ghc"
getGhcjsPath <- runOnce $ liftIO $ join $ findExecutable menvWithoutLocals "ghcjs"
distRelativeDir' <- distRelativeDir
esetupexehs <-
-- Avoid broken Setup.hs files causing problems for simple build
-- types, see:
-- https://github.com/commercialhaskell/stack/issues/370
case (packageSimpleType package, eeSetupExe) of
(True, Just setupExe) -> return $ Left setupExe
_ -> liftIO $ fmap Right $ getSetupHs pkgDir
inner $ \stripTHLoading args -> do
let cabalPackageArg =
"-package=" ++ packageIdentifierString
(PackageIdentifier cabalPackageName
eeCabalPkgVer)
packageArgs =
case mdeps of
-- This branch is taken when
-- 'explicit-setup-deps' is requested in your
-- stack.yaml file.
Just deps | explicitSetupDeps (packageName package) config ->
-- Stack always builds with the global Cabal for various
-- reproducibility issues.
let depsMinusCabal
= map ghcPkgIdString
$ Set.toList
$ addGlobalPackages deps (Map.elems eeGlobalDumpPkgs)
in
( "-clear-package-db"
: "-global-package-db"
: map (("-package-db=" ++) . toFilePathNoTrailingSep) (bcoExtraDBs eeBaseConfigOpts)
) ++
( ("-package-db=" ++ toFilePathNoTrailingSep (bcoSnapDB eeBaseConfigOpts))
: ("-package-db=" ++ toFilePathNoTrailingSep (bcoLocalDB eeBaseConfigOpts))
: "-hide-all-packages"
: cabalPackageArg
: map ("-package-id=" ++) depsMinusCabal
)
-- This branch is usually taken for builds, and
-- is always taken for `stack sdist`.
--
-- This approach is debatable. It adds access to the
-- snapshot package database for Cabal. There are two
-- possible objections:
--
-- 1. This doesn't isolate the build enough; arbitrary
-- other packages available could cause the build to
-- succeed or fail.
--
-- 2. This doesn't provide enough packages: we should also
-- include the local database when building local packages.
--
-- Currently, this branch is only taken via `stack
-- sdist` or when explicitly requested in the
-- stack.yaml file.
_ ->
cabalPackageArg
: "-clear-package-db"
: "-global-package-db"
: map (("-package-db=" ++) . toFilePathNoTrailingSep) (bcoExtraDBs eeBaseConfigOpts)
++ ["-package-db=" ++ toFilePathNoTrailingSep (bcoSnapDB eeBaseConfigOpts)]
setupArgs = ("--builddir=" ++ toFilePathNoTrailingSep distRelativeDir') : args
runExe exeName fullArgs = do
$logProcessRun (toFilePath exeName) fullArgs
-- Use createProcess_ to avoid the log file being closed afterwards
(Nothing, moutH, merrH, ph) <- liftIO $ createProcess_ "singleBuild" cp
let makeAbsolute = stripTHLoading -- If users want control, we should add a config option for this
ec <-
liftIO $
withAsync (runInBase $ maybePrintBuildOutput stripTHLoading makeAbsolute pkgDir LevelInfo mlogFile moutH) $ \outThreadID ->
withAsync (runInBase $ maybePrintBuildOutput False makeAbsolute pkgDir LevelWarn mlogFile merrH) $ \errThreadID -> do
ec <- waitForProcess ph
wait errThreadID
wait outThreadID
return ec
case ec of
ExitSuccess -> return ()
_ -> do
bss <-
case mlogFile of
Nothing -> return []
Just (logFile, h) -> do
liftIO $ hClose h
runResourceT
$ CB.sourceFile (toFilePath logFile)
=$= CT.decodeUtf8
$$ mungeBuildOutput stripTHLoading makeAbsolute pkgDir
=$ CL.consume
throwM $ CabalExitedUnsuccessfully
ec
taskProvides
exeName
fullArgs
(fmap fst mlogFile)
bss
where
cp0 = proc (toFilePath exeName) fullArgs
cp = cp0
{ cwd = Just $ toFilePath pkgDir
, Process.env = envHelper menv
-- Ideally we'd create a new pipe here and then close it
-- below to avoid the child process from taking from our
-- stdin. However, if we do this, the child process won't
-- be able to get the codepage on Windows that we want.
-- See:
-- https://github.com/commercialhaskell/stack/issues/738
-- , std_in = CreatePipe
, std_out =
case mlogFile of
Nothing -> CreatePipe
Just (_, h) -> UseHandle h
, std_err =
case mlogFile of
Nothing -> CreatePipe
Just (_, h) -> UseHandle h
}
wc <- getWhichCompiler
(exeName, fullArgs) <- case (esetupexehs, wc) of
(Left setupExe, _) -> return (setupExe, setupArgs)
(Right setuphs, compiler) -> do
distDir <- distDirFromDir pkgDir
let setupDir = distDir </> $(mkRelDir "setup")
outputFile = setupDir </> $(mkRelFile "setup")
createTree setupDir
compilerPath <-
case compiler of
Ghc -> getGhcPath
Ghcjs -> getGhcjsPath
runExe compilerPath $
[ "--make"
, "-odir", toFilePathNoTrailingSep setupDir
, "-hidir", toFilePathNoTrailingSep setupDir
, "-i", "-i."
] ++ packageArgs ++
[ toFilePath setuphs
, "-o", toFilePath outputFile
] ++
(case compiler of
Ghc -> []
Ghcjs -> ["-build-runner"])
return (outputFile, setupArgs)
runExe exeName $ (if boptsCabalVerbose eeBuildOpts then ("--verbose":) else id) fullArgs
maybePrintBuildOutput stripTHLoading makeAbsolute pkgDir level mlogFile mh =
case mh of
Just h ->
case mlogFile of
Just{} -> return ()
Nothing -> printBuildOutput stripTHLoading makeAbsolute pkgDir level h
Nothing -> return ()
singleBuild :: M env m
=> (m () -> IO ())
-> ActionContext
-> ExecuteEnv
-> Task
-> InstalledMap
-> Bool -- ^ Is this a final build?
-> m ()
singleBuild runInBase ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap isFinalBuild = do
(allDepsMap, cache) <- getConfigCache ee task installedMap enableTests enableBenchmarks
mprecompiled <- getPrecompiled cache
minstalled <-
case mprecompiled of
Just precompiled -> copyPreCompiled precompiled
Nothing -> realConfigAndBuild cache allDepsMap
case minstalled of
Nothing -> return ()
Just installed -> do
writeFlagCache installed cache
liftIO $ atomically $ modifyTVar eeGhcPkgIds $ Map.insert taskProvides installed
where
pname = packageIdentifierName taskProvides
shouldHaddockPackage' = shouldHaddockPackage eeBuildOpts eeWanted pname
doHaddock package = shouldHaddockPackage' &&
not isFinalBuild &&
-- Works around haddock failing on bytestring-builder since it has no modules
-- when bytestring is new enough.
packageHasExposedModules package
buildingFinals = isFinalBuild || taskAllInOne
enableTests = buildingFinals && any isCTest (taskComponents task)
enableBenchmarks = buildingFinals && any isCBench (taskComponents task)
annSuffix = if result == "" then "" else " (" <> result <> ")"
where
result = T.intercalate " + " $ concat $
[ ["lib" | taskAllInOne && hasLib]
, ["exe" | taskAllInOne && hasExe]
, ["test" | enableTests]
, ["bench" | enableBenchmarks]
]
(hasLib, hasExe) = case taskType of
TTLocal lp -> (packageHasLibrary (lpPackage lp), not (Set.null (exesToBuild lp)))
-- This isn't true, but we don't want to have this info for
-- upstream deps.
TTUpstream{} -> (False, False)
getPrecompiled cache =
case taskLocation task of
Snap | not shouldHaddockPackage' -> do
mpc <- readPrecompiledCache taskProvides
(configCacheOpts cache)
(configCacheDeps cache)
case mpc of
Nothing -> return Nothing
Just pc | maybe False
(bcoSnapInstallRoot eeBaseConfigOpts `isParentOf`)
(parseAbsFile =<< (pcLibrary pc)) ->
-- If old precompiled cache files are left around but snapshots are deleted,
-- it is possible for the precompiled file to refer to the very library
-- we're building, and if flags are changed it may try to copy the library
-- to itself. This check prevents that from happening.
return Nothing
Just pc | otherwise -> do
let allM _ [] = return True
allM f (x:xs) = do
b <- f x
if b then allM f xs else return False
b <- liftIO $ allM D.doesFileExist $ maybe id (:) (pcLibrary pc) $ pcExes pc
return $ if b then Just pc else Nothing
_ -> return Nothing
copyPreCompiled (PrecompiledCache mlib exes) = do
announceTask task "copying precompiled package"
forM_ mlib $ \libpath -> do
menv <- getMinimalEnvOverride
withMVar eeInstallLock $ \() -> do
-- We want to ignore the global and user databases.
-- Unfortunately, ghc-pkg doesn't take such arguments on the
-- command line. Instead, we'll set GHC_PACKAGE_PATH. See:
-- https://github.com/commercialhaskell/stack/issues/1146
menv' <- modifyEnvOverride menv
$ Map.insert
"GHC_PACKAGE_PATH"
(T.pack $ toFilePathNoTrailingSep $ bcoSnapDB eeBaseConfigOpts)
-- In case a build of the library with different flags already exists, unregister it
-- before copying.
catch
(readProcessNull Nothing menv' "ghc-pkg"
[ "unregister"
, "--force"
, packageIdentifierString taskProvides
])
(\(ReadProcessException _ _ _ _) -> return ())
readProcessNull Nothing menv' "ghc-pkg"
[ "register"
, "--force"
, libpath
]
liftIO $ forM_ exes $ \exe -> do
D.createDirectoryIfMissing True bindir
let dst = bindir FP.</> FP.takeFileName exe
createLink exe dst `catchIO` \_ -> D.copyFile exe dst
case (mlib, exes) of
(Nothing, _:_) -> markExeInstalled (taskLocation task) taskProvides
_ -> return ()
-- Find the package in the database
wc <- getWhichCompiler
let pkgDbs = [bcoSnapDB eeBaseConfigOpts]
case mlib of
Nothing -> return $ Just $ Executable taskProvides
Just _ -> do
mpkgid <- loadInstalledPkg eeEnvOverride wc pkgDbs eeSnapshotDumpPkgs pname
return $ Just $
case mpkgid of
Nothing -> assert False $ Executable taskProvides
Just pkgid -> Library taskProvides pkgid
where
bindir = toFilePath $ bcoSnapInstallRoot eeBaseConfigOpts </> bindirSuffix
realConfigAndBuild cache allDepsMap = withSingleContext runInBase ac ee task (Just allDepsMap) Nothing
$ \package cabalfp pkgDir cabal announce _console _mlogFile -> do
_neededConfig <- ensureConfig cache pkgDir ee (announce ("configure" <> annSuffix)) cabal cabalfp
if boptsOnlyConfigure eeBuildOpts
then return Nothing
else liftM Just $ realBuild cache package pkgDir cabal announce
realBuild cache package pkgDir cabal announce = do
wc <- getWhichCompiler
markExeNotInstalled (taskLocation task) taskProvides
case taskType of
TTLocal lp -> do
when enableTests $ unsetTestSuccess pkgDir
writeBuildCache pkgDir $ lpNewBuildCache lp
TTUpstream _ _ -> return ()
() <- announce ("build" <> annSuffix)
config <- asks getConfig
extraOpts <- extraBuildOptions eeBuildOpts
preBuildTime <- modTime <$> liftIO getCurrentTime
cabal (configHideTHLoading config) $ ("build" :) $ (++ extraOpts) $
case (taskType, taskAllInOne, isFinalBuild) of
(_, True, True) -> fail "Invariant violated: cannot have an all-in-one build that also has a final build step."
(TTLocal lp, False, False) -> primaryComponentOptions lp
(TTLocal lp, False, True) -> finalComponentOptions lp
(TTLocal lp, True, False) -> primaryComponentOptions lp ++ finalComponentOptions lp
(TTUpstream{}, _, _) -> []
checkForUnlistedFiles taskType preBuildTime pkgDir
when (doHaddock package) $ do
announce "haddock"
sourceFlag <- do
hyped <- tryProcessStdout Nothing eeEnvOverride "haddock" ["--hyperlinked-source"]
case hyped of
-- Fancy crosslinked source
Right _ -> do
return ["--haddock-option=--hyperlinked-source"]
-- Older hscolour colouring
Left _ -> do
hscolourExists <- doesExecutableExist eeEnvOverride "HsColour"
unless hscolourExists $ $logWarn
("Warning: haddock not generating hyperlinked sources because 'HsColour' not\n" <>
"found on PATH (use 'stack install hscolour' to install).")
return ["--hyperlink-source" | hscolourExists]
cabal False (concat [["haddock", "--html", "--hoogle", "--html-location=../$pkg-$version/"]
,sourceFlag])
unless isFinalBuild $ withMVar eeInstallLock $ \() -> do
announce "copy/register"
cabal False ["copy"]
when (packageHasLibrary package) $ cabal False ["register"]
let (installedPkgDb, installedDumpPkgsTVar) =
case taskLocation task of
Snap ->
( bcoSnapDB eeBaseConfigOpts
, eeSnapshotDumpPkgs )
Local ->
( bcoLocalDB eeBaseConfigOpts
, eeLocalDumpPkgs )
let ident = PackageIdentifier (packageName package) (packageVersion package)
mpkgid <- if packageHasLibrary package
then do
mpkgid <- loadInstalledPkg eeEnvOverride wc [installedPkgDb] installedDumpPkgsTVar (packageName package)
case mpkgid of
Nothing -> throwM $ Couldn'tFindPkgId $ packageName package
Just pkgid -> return $ Library ident pkgid
else do
markExeInstalled (taskLocation task) taskProvides -- TODO unify somehow with writeFlagCache?
return $ Executable ident
case taskLocation task of
Snap -> writePrecompiledCache eeBaseConfigOpts taskProvides
(configCacheOpts cache)
(configCacheDeps cache)
mpkgid (packageExes package)
Local -> return ()
return mpkgid
loadInstalledPkg menv wc pkgDbs tvar name = do
dps <- ghcPkgDescribe name menv wc pkgDbs $ conduitDumpPackage =$ CL.consume
case dps of
[] -> return Nothing
[dp] -> do
liftIO $ atomically $ modifyTVar' tvar (Map.insert (dpGhcPkgId dp) dp)
return $ Just (dpGhcPkgId dp)
_ -> error "singleBuild: invariant violated: multiple results when describing installed package"
-- | Check if any unlisted files have been found, and add them to the build cache.
checkForUnlistedFiles :: M env m => TaskType -> ModTime -> Path Abs Dir -> m ()
checkForUnlistedFiles (TTLocal lp) preBuildTime pkgDir = do
(addBuildCache,warnings) <-
addUnlistedToBuildCache
preBuildTime
(lpPackage lp)
(lpCabalFile lp)
(lpNewBuildCache lp)
mapM_ ($logWarn . ("Warning: " <>) . T.pack . show) warnings
unless (null addBuildCache) $
writeBuildCache pkgDir $
Map.unions (lpNewBuildCache lp : addBuildCache)
checkForUnlistedFiles (TTUpstream _ _) _ _ = return ()
-- | Determine if all of the dependencies given are installed
depsPresent :: InstalledMap -> Map PackageName VersionRange -> Bool
depsPresent installedMap deps = all
(\(name, range) ->
case Map.lookup name installedMap of
Just (_, installed) -> installedVersion installed `withinRange` range
Nothing -> False)
(Map.toList deps)
singleTest :: M env m
=> (m () -> IO ())
-> TestOpts
-> [Text]
-> ActionContext
-> ExecuteEnv
-> Task
-> InstalledMap
-> m ()
singleTest runInBase topts testsToRun ac ee task installedMap = do
-- FIXME: Since this doesn't use cabal, we should be able to avoid using a
-- fullblown 'withSingleContext'.
(allDepsMap, _cache) <- getConfigCache ee task installedMap True False
withSingleContext runInBase ac ee task (Just allDepsMap) (Just "test") $ \package _cabalfp pkgDir _cabal announce _console mlogFile -> do
config <- asks getConfig
let needHpc = toCoverage topts
toRun <-
if toDisableRun topts
then do
announce "Test running disabled by --no-run-tests flag."
return False
else if toRerunTests topts
then return True
else do
success <- checkTestSuccess pkgDir
if success
then do
unless (null testsToRun) $ announce "skipping already passed test"
return False
else return True
when toRun $ do
buildDir <- distDirFromDir pkgDir
hpcDir <- hpcDirFromDir pkgDir
when needHpc (createTree hpcDir)
errs <- liftM Map.unions $ forM (Map.toList (packageTests package)) $ \(testName, suiteInterface) -> do
let stestName = T.unpack testName
(testName', isTestTypeLib) <-
case suiteInterface of
C.TestSuiteLibV09{} -> return (stestName ++ "Stub", True)
C.TestSuiteExeV10{} -> return (stestName, False)
interface -> throwM (TestSuiteTypeUnsupported interface)
exeName <- testExeName testName'
tixPath <- liftM (pkgDir </>) $ parseRelFile $ exeName ++ ".tix"
exePath <- liftM (buildDir </>) $ parseRelFile $ "build/" ++ testName' ++ "/" ++ exeName
exists <- fileExists exePath
menv <- liftIO $ configEnvOverride config EnvSettings
{ esIncludeLocals = taskLocation task == Local
, esIncludeGhcPackagePath = True
, esStackExe = True
, esLocaleUtf8 = False
}
if exists
then do
-- We clear out the .tix files before doing a run.
when needHpc $ do
tixexists <- fileExists tixPath
when tixexists $
$logWarn ("Removing HPC file " <> T.pack (toFilePath tixPath))
removeFileIfExists tixPath
let args = toAdditionalArgs topts
argsDisplay = case args of
[] -> ""
_ -> ", args: " <> T.intercalate " " (map showProcessArgDebug args)
announce $ "test (suite: " <> testName <> argsDisplay <> ")"
-- Clear "Progress: ..." message before
-- redirecting output.
when (isNothing mlogFile) $ do
$logStickyDone ""
liftIO $ hFlush stdout
liftIO $ hFlush stderr
let output =
case mlogFile of
Nothing -> Inherit
Just (_, h) -> UseHandle h
cp = (proc (toFilePath exePath) args)
{ cwd = Just $ toFilePath pkgDir
, Process.env = envHelper menv
, std_in = CreatePipe
, std_out = output
, std_err = output
}
-- Use createProcess_ to avoid the log file being closed afterwards
(Just inH, Nothing, Nothing, ph) <- liftIO $ createProcess_ "singleBuild.runTests" cp
when isTestTypeLib $ do
logPath <- buildLogPath package (Just stestName)
createTree (parent logPath)
liftIO $ hPutStr inH $ show (logPath, testName)
liftIO $ hClose inH
ec <- liftIO $ waitForProcess ph
-- Add a trailing newline, incase the test
-- output didn't finish with a newline.
when (isNothing mlogFile) ($logInfo "")
-- Move the .tix file out of the package
-- directory into the hpc work dir, for
-- tidiness.
when needHpc $
updateTixFile (packageName package) tixPath
return $ case ec of
ExitSuccess -> Map.empty
_ -> Map.singleton testName $ Just ec
else do
$logError $ T.concat
[ "Test suite "
, testName
, " executable not found for "
, packageNameText $ packageName package
]
return $ Map.singleton testName Nothing
when needHpc $ do
let testsToRun' = map f testsToRun
f tName =
case Map.lookup tName (packageTests package) of
Just C.TestSuiteLibV09{} -> tName <> "Stub"
_ -> tName
generateHpcReport pkgDir package testsToRun'
bs <- liftIO $
case mlogFile of
Nothing -> return ""
Just (logFile, h) -> do
hClose h
S.readFile $ toFilePath logFile
unless (Map.null errs) $ throwM $ TestSuiteFailure
(taskProvides task)
errs
(fmap fst mlogFile)
bs
singleBench :: M env m
=> (m () -> IO ())
-> BenchmarkOpts
-> [Text]
-> ActionContext
-> ExecuteEnv
-> Task
-> InstalledMap
-> m ()
singleBench runInBase beopts benchesToRun ac ee task installedMap = do
-- FIXME: Since this doesn't use cabal, we should be able to avoid using a
-- fullblown 'withSingleContext'.
(allDepsMap, _cache) <- getConfigCache ee task installedMap False True
withSingleContext runInBase ac ee task (Just allDepsMap) (Just "bench") $ \_package _cabalfp _pkgDir cabal announce _console _mlogFile -> do
let args = map T.unpack benchesToRun <> maybe []
((:[]) . ("--benchmark-options=" <>))
(beoAdditionalArgs beopts)
toRun <-
if beoDisableRun beopts
then do
announce "Benchmark running disabled by --no-run-benchmarks flag."
return False
else do
return True
when toRun $ do
announce "benchmarks"
cabal False ("bench" : args)
-- | Grab all output from the given @Handle@ and log it, stripping
-- Template Haskell "Loading package" lines and making paths absolute.
-- thread.
printBuildOutput :: (MonadIO m, MonadBaseControl IO m, MonadLogger m,
MonadThrow m)
=> Bool -- ^ exclude TH loading?
-> Bool -- ^ convert paths to absolute?
-> Path Abs Dir -- ^ package's root directory
-> LogLevel
-> Handle
-> m ()
printBuildOutput excludeTHLoading makeAbsolute pkgDir level outH = void $
CB.sourceHandle outH
$$ CT.decodeUtf8
=$ mungeBuildOutput excludeTHLoading makeAbsolute pkgDir
=$ CL.mapM_ (monadLoggerLog $(TH.location >>= liftLoc) "" level)
-- | Strip Template Haskell "Loading package" lines and making paths absolute.
mungeBuildOutput :: MonadIO m
=> Bool -- ^ exclude TH loading?
-> Bool -- ^ convert paths to absolute?
-> Path Abs Dir -- ^ package's root directory
-> ConduitM Text Text m ()
mungeBuildOutput excludeTHLoading makeAbsolute pkgDir = void $
CT.lines
=$ CL.map stripCarriageReturn
=$ CL.filter (not . isTHLoading)
=$ CL.mapM toAbsolutePath
where
-- | Is this line a Template Haskell "Loading package" line
-- ByteString
isTHLoading :: Text -> Bool
isTHLoading _ | not excludeTHLoading = False
isTHLoading bs =
"Loading package " `T.isPrefixOf` bs &&
("done." `T.isSuffixOf` bs || "done.\r" `T.isSuffixOf` bs)
-- | Convert GHC error lines with file paths to have absolute file paths
toAbsolutePath bs | not makeAbsolute = return bs
toAbsolutePath bs = do
let (x, y) = T.break (== ':') bs
mabs <-
if isValidSuffix y
then do
efp <- liftIO $ tryIO $ resolveFile pkgDir (T.unpack x)
case efp of
Left _ -> return Nothing
Right fp -> return $ Just $ T.pack (toFilePath fp)
else return Nothing
case mabs of
Nothing -> return bs
Just fp -> return $ fp `T.append` y
-- | Match the line:column format at the end of lines
isValidSuffix = isRight . parseOnly (lineCol <* endOfInput)
lineCol = char ':' >> (decimal :: Parser Int)
>> char ':' >> (decimal :: Parser Int)
>> (string ":" <|> string ": Warning:")
>> return ()
-- | Strip @\r@ characters from the byte vector. Used because Windows.
stripCarriageReturn :: Text -> Text
stripCarriageReturn = T.filter (/= '\r')
-- | Find the Setup.hs or Setup.lhs in the given directory. If none exists,
-- throw an exception.
getSetupHs :: Path Abs Dir -- ^ project directory
-> IO (Path Abs File)
getSetupHs dir = do
exists1 <- fileExists fp1
if exists1
then return fp1
else do
exists2 <- fileExists fp2
if exists2
then return fp2
else throwM $ NoSetupHsFound dir
where
fp1 = dir </> $(mkRelFile "Setup.hs")
fp2 = dir </> $(mkRelFile "Setup.lhs")
-- Do not pass `-hpcdir` as GHC option if the coverage is not enabled.
-- This helps running stack-compiled programs with dynamic interpreters like `hint`.
-- Cfr: https://github.com/commercialhaskell/stack/issues/997
extraBuildOptions :: M env m => BuildOpts -> m [String]
extraBuildOptions bopts = do
let ddumpOpts = " -ddump-hi -ddump-to-file"
case toCoverage (boptsTestOpts bopts) of
True -> do
hpcIndexDir <- toFilePathNoTrailingSep <$> hpcRelativeDir
return ["--ghc-options", "-hpcdir " ++ hpcIndexDir ++ ddumpOpts]
False -> return ["--ghc-options", ddumpOpts]
-- Library and executable build components.
primaryComponentOptions :: LocalPackage -> [String]
primaryComponentOptions lp = concat
[ ["lib:" ++ packageNameString (packageName (lpPackage lp))
-- TODO: get this information from target parsing instead,
-- which will allow users to turn off library building if
-- desired
| packageHasLibrary (lpPackage lp)]
, map (T.unpack . T.append "exe:") $ Set.toList $ exesToBuild lp
]
exesToBuild :: LocalPackage -> Set Text
exesToBuild lp = packageExes (lpPackage lp)
-- NOTE: Ideally we'd do something like the following code, allowing
-- the user to control which executables get built. However, due to
-- https://github.com/haskell/cabal/issues/2780 we must build all
-- exes...
--
-- if lpWanted lp
-- then exeComponents (lpComponents lp)
-- -- Build all executables in the event that no
-- -- specific list is provided (as happens with
-- -- extra-deps).
-- else packageExes (lpPackage lp)
-- Test-suite and benchmark build components.
finalComponentOptions :: LocalPackage -> [String]
finalComponentOptions lp =
map (T.unpack . decodeUtf8 . renderComponent) $
Set.toList $
Set.filter (\c -> isCTest c || isCBench c) (lpComponents lp)
taskComponents :: Task -> Set NamedComponent
taskComponents task =
case taskType task of
TTLocal lp -> lpComponents lp
TTUpstream{} -> Set.empty
-- | Take the given list of package dependencies and the contents of the global
-- package database, and construct a set of installed package IDs that:
--
-- * Excludes the Cabal library (it's added later)
--
-- * Includes all packages depended on by this package
--
-- * Includes all global packages, unless: (1) it's hidden, (2) it's shadowed
-- by a depended-on package, or (3) one of its dependencies is not met.
--
-- See:
--
-- * https://github.com/commercialhaskell/stack/issues/941
--
-- * https://github.com/commercialhaskell/stack/issues/944
--
-- * https://github.com/commercialhaskell/stack/issues/949
addGlobalPackages :: Map PackageIdentifier GhcPkgId -- ^ dependencies of the package
-> [DumpPackage () ()] -- ^ global packages
-> Set GhcPkgId
addGlobalPackages deps globals0 =
res
where
-- Initial set of packages: the installed IDs of all dependencies
res0 = Map.elems $ Map.filterWithKey (\ident _ -> not $ isCabal ident) deps
-- First check on globals: it's not shadowed by a dep, it's not Cabal, and
-- it's exposed
goodGlobal1 dp = not (isDep dp)
&& not (isCabal $ dpPackageIdent dp)
&& dpIsExposed dp
globals1 = filter goodGlobal1 globals0
-- Create a Map of unique package names in the global database
globals2 = Map.fromListWith chooseBest
$ map (packageIdentifierName . dpPackageIdent &&& id) globals1
-- Final result: add in globals that have their dependencies met
res = loop id (Map.elems globals2) $ Set.fromList res0
----------------------------------
-- Some auxiliary helper functions
----------------------------------
-- Is the given package identifier for any version of Cabal
isCabal (PackageIdentifier name _) = name == $(mkPackageName "Cabal")
-- Is the given package name provided by the package dependencies?
isDep dp = packageIdentifierName (dpPackageIdent dp) `Set.member` depNames
depNames = Set.map packageIdentifierName $ Map.keysSet deps
-- Choose the best of two competing global packages (the newest version)
chooseBest dp1 dp2
| getVer dp1 < getVer dp2 = dp2
| otherwise = dp1
where
getVer = packageIdentifierVersion . dpPackageIdent
-- Are all dependencies of the given package met by the given Set of
-- installed packages
depsMet dp gids = all (`Set.member` gids) (dpDepends dp)
-- Find all globals that have all of their dependencies met
loop front (dp:dps) gids
-- This package has its deps met. Add it to the list of dependencies
-- and then traverse the list from the beginning (this package may have
-- been a dependency of an earlier one).
| depsMet dp gids = loop id (front dps) (Set.insert (dpGhcPkgId dp) gids)
-- Deps are not met, keep going
| otherwise = loop (front . (dp:)) dps gids
-- None of the packages we checked can be added, therefore drop them all
-- and return our results
loop _ [] gids = gids
| rubik/stack | src/Stack/Build/Execute.hs | bsd-3-clause | 66,832 | 0 | 35 | 25,642 | 14,204 | 7,104 | 7,100 | -1 | -1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
module Numeric.Units.Dimensional.Test where
import Numeric.Units.Dimensional.Prelude
import qualified Prelude
import Test.HUnit
testPower = TestLabel "Power test" $ TestList
[ TestCase $ (9 *~ one) @=? (3 *~ one) ^ pos2
, TestCase $ (1 *~ one) @=? (12.1231 *~ one) ^ zero
, TestCase $ (0.25 *~ one) @=? (2 *~ one) ^ neg2
]
testDimensionless = TestLabel "Dimensionless test" $ TestList
[ TestCase $ (3 Prelude.** 2) *~ one @=? (3 *~ one) ** (2 *~ one)
]
testShow = TestLabel "Test 'Show' instance" $ TestList
[ TestCase $ show (1 *~ one) @?= "1"
, TestCase $ show (2 *~ meter) @?= "2 m"
, TestCase $ show (2.0 *~ (meter / second)) @?= "2.0 m s^-1"
, TestCase $ show (2.0 *~ (meter ^ pos2 / second ^ pos2)) @?= "2.0 m^2 s^-2"
, TestCase $ show (undefined :: DVelocity) @?= "m s^-1"
]
-- Collect the test cases.
tests = TestList
[ testPower
, testDimensionless
, testShow
]
main = runTestTT tests
| bjornbm/dimensional-classic | Numeric/Units/Dimensional/Test.hs | bsd-3-clause | 1,009 | 0 | 16 | 241 | 351 | 192 | 159 | 22 | 1 |
import Data.Attoparsec.ByteString.Char8
import qualified Data.ByteString.Lazy as BL
import Options.Applicative (execParser)
import System.IO (hPutStrLn, stdout, stderr)
import CSV
import CommandLineArgs
import File
import Parse.Log
import Types
chooseParsingFunction :: CommandLineOpts -> (Parser LogEntry)
chooseParsingFunction args = case parseAsCommon args of
True -> parseAsCommonLogLine
False -> parseAsExtendedLogLine
getLog :: CommandLineOpts -> IO Log
getLog args = do
logFile <- readLog (logPath args)
let parserChoice = chooseParsingFunction args
return $ parseFileLines parserChoice logFile
main :: IO ()
main = do
args <- execParser opts
logData <- getLog args
let csv = toCSV logData
BL.hPut stdout csv
hPutStrLn stderr "Conversion to CSV completed."
return ()
| safarijv/ncsa-logparse | src/Main.hs | bsd-3-clause | 793 | 0 | 10 | 119 | 235 | 118 | 117 | 26 | 2 |
module Lib
( someFunc
) where
someFunc :: IO ()
someFunc = putStrLn "someFunc"
| erantapaa/parse-complex | src/Lib.hs | bsd-3-clause | 89 | 0 | 6 | 24 | 27 | 15 | 12 | 4 | 1 |
module Main where
import Graphics.UI.Gtk
import System.Glib.Signals (on)
import Data.List ( isPrefixOf )
import Data.Char ( toLower )
data Phone = Phone { name :: String, number :: Int, marked :: Bool }
main = do
initGUI
win <- windowNew
onDestroy win mainQuit
-- create a new list model
model <- listStoreNew
[Phone { name = "Foo", number = 12345, marked = False }
,Phone { name = "Bar", number = 67890, marked = True }
,Phone { name = "Baz", number = 39496, marked = False }]
view <- treeViewNewWithModel model
treeViewSetHeadersVisible view True
-- add a couple columns
col1 <- treeViewColumnNew
col2 <- treeViewColumnNew
col3 <- treeViewColumnNew
treeViewColumnSetTitle col1 "String column"
treeViewColumnSetTitle col2 "Int column"
treeViewColumnSetTitle col3 "Bool column"
renderer1 <- cellRendererTextNew
renderer2 <- cellRendererTextNew
renderer3 <- cellRendererToggleNew
cellLayoutPackStart col1 renderer1 True
cellLayoutPackStart col2 renderer2 True
cellLayoutPackStart col3 renderer3 True
cellLayoutSetAttributes col1 renderer1 model $ \row -> [ cellText := name row ]
cellLayoutSetAttributes col2 renderer2 model $ \row -> [ cellText := show (number row) ]
cellLayoutSetAttributes col3 renderer3 model $ \row -> [ cellToggleActive := marked row ]
treeViewAppendColumn view col1
treeViewAppendColumn view col2
treeViewAppendColumn view col3
-- update the model when the toggle buttons are activated
on renderer3 cellToggled $ \pathStr -> do
let (i:_) = stringToTreePath pathStr
val <- listStoreGetValue model i
listStoreSetValue model i val { marked = not (marked val) }
-- enable interactive search
treeViewSetEnableSearch view True
treeViewSetSearchEqualFunc view $ Just $ \str iter -> do
(i:_) <- treeModelGetPath model iter
row <- listStoreGetValue model i
return (map toLower str `isPrefixOf` map toLower (name row))
containerAdd win view
widgetShowAll win
mainGUI
| phischu/gtk2hs | gtk/demo/treelist/ListDemo.hs | lgpl-3.0 | 2,001 | 0 | 16 | 391 | 589 | 289 | 300 | 46 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sl-SI">
<title>Server-Sent Events | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | 0xkasun/security-tools | src/org/zaproxy/zap/extension/sse/resources/help_sl_SI/helpset_sl_SI.hs | apache-2.0 | 980 | 85 | 53 | 160 | 402 | 212 | 190 | -1 | -1 |
{-
Some tests to verify that serialisation works as expected
-}
module AllTests(tests)
where
import GHC.Packing
import qualified Data.Array.IArray as A
import Control.Concurrent
import System.Environment
import System.IO
import System.Directory
import qualified Data.ByteString as B
import Control.Exception
import Data.Typeable
import Distribution.TestSuite
-- this test uses the trySerialize routine. We expect to trigger some
-- exceptions and catch them as appropriate.
catchPackExc :: IO () -> IO ()
catchPackExc io = io `catch` (\e -> putStrLn (show (e::PackException)))
-- need a time-wasting function which allocates...
nfib :: Integer -> Integer
nfib 0 = 1
nfib 1 = 1
nfib n = let n1 = nfib (n-1)
n2 = nfib (n-2)
in 1 + 2*n1 + n2 - n1
-- make a test instance. Action should check result and return Bool
runIt :: String -> IO Bool -> TestInstance
runIt name action
= TestInstance
{ run = action >>= return . Finished .
(\b -> if b then Pass
else Fail "unexpected output (see log)")
, name = "Test case " ++ name
, tags = []
, options = []
, setOption = \_ _ -> Right (runIt name action)
}
tests :: IO [ Test ]
tests = do putStrLn "Running all tests"
mapM (return . Test . uncurry runIt) mytests
-- all configured tests, see below
mytests = [eval_array, pack_array, pack_ThreadId, pack_MVar ]
-- test data
arr, output :: A.Array Int Int
arr = A.array (0,127) [ (i,i) | i <- [0..127] ]
output = A.amap (2*) arr
n :: Int
n = 3
eval_array :: (String, IO Bool)
eval_array = ("eval. array",
do let out = show $ take n $ A.elems output
putStrLn $ "Evaluated: " ++ out
return (out == "[0,2,4]")
)
pack_array :: (String, IO Bool)
pack_array = ("duplicating an array of 128 elements",
do packet1 <- trySerialize output
putStrLn (take (3*80) (show packet1) ++ "...")
putStrLn "now unpacking (deserialize):"
copy <- deserialize packet1
putStrLn ("unpacked, now evaluate")
putStrLn (show copy)
return $ copy == A.amap (2*) arr
)
expectException :: Typeable a => PackException -> IO (Serialized a) -> IO Bool
expectException exception action
= do putStrLn ("expect exception " ++ show exception)
action >>= print
return False
`catch` \e -> do putStrLn ("Got: " ++ show e)
return (e == exception)
pack_ThreadId :: (String, IO Bool)
pack_ThreadId = ("packing a thread ID (unsupported)",
do t <- myThreadId
expectException P_UNSUPPORTED $ trySerialize t
)
pack_MVar :: (String, IO Bool)
pack_MVar = ("packing an MVar (should be cannotpack)",
do m <- newEmptyMVar :: IO (MVar Integer)
expectException P_CANNOTPACK $ trySerialize m
)
| ajnsit/packman | Test/AllTests.hs | bsd-3-clause | 3,030 | 0 | 13 | 933 | 882 | 466 | 416 | 67 | 2 |
--------------------------------------------------------------------------------
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module Hakyll.Check
( Check (..)
, check
) where
--------------------------------------------------------------------------------
import Control.Applicative ((<$>))
import Control.Monad (forM_)
import Control.Monad.Reader (ask)
import Control.Monad.RWS (RWST, runRWST)
import Control.Monad.Trans (liftIO)
import Control.Monad.Writer (tell)
import Data.List (isPrefixOf)
import Data.Monoid (Monoid (..))
import Data.Set (Set)
import qualified Data.Set as S
import Network.URI (unEscapeString)
import System.Directory (doesDirectoryExist, doesFileExist)
import System.Exit (ExitCode (..))
import System.FilePath (takeDirectory, takeExtension, (</>))
import qualified Text.HTML.TagSoup as TS
--------------------------------------------------------------------------------
#ifdef CHECK_EXTERNAL
import Control.Exception (AsyncException (..),
SomeException (..), handle, throw)
import Control.Monad.State (get, modify)
import Data.List (intercalate)
import Data.Typeable (cast)
import Data.Version (versionBranch)
import GHC.Exts (fromString)
import qualified Network.HTTP.Conduit as Http
import qualified Network.HTTP.Types as Http
import qualified Paths_hakyll as Paths_hakyll
#endif
--------------------------------------------------------------------------------
import Hakyll.Core.Configuration
import Hakyll.Core.Logger (Logger)
import qualified Hakyll.Core.Logger as Logger
import Hakyll.Core.Util.File
import Hakyll.Web.Html
--------------------------------------------------------------------------------
data Check = All | InternalLinks
deriving (Eq, Ord, Show)
--------------------------------------------------------------------------------
check :: Configuration -> Logger -> Check -> IO ExitCode
check config logger check' = do
((), write) <- runChecker checkDestination config logger check'
return $ if checkerFaulty write > 0 then ExitFailure 1 else ExitSuccess
--------------------------------------------------------------------------------
data CheckerRead = CheckerRead
{ checkerConfig :: Configuration
, checkerLogger :: Logger
, checkerCheck :: Check
}
--------------------------------------------------------------------------------
data CheckerWrite = CheckerWrite
{ checkerFaulty :: Int
, checkerOk :: Int
} deriving (Show)
--------------------------------------------------------------------------------
instance Monoid CheckerWrite where
mempty = CheckerWrite 0 0
mappend (CheckerWrite f1 o1) (CheckerWrite f2 o2) =
CheckerWrite (f1 + f2) (o1 + o2)
--------------------------------------------------------------------------------
type CheckerState = Set String
--------------------------------------------------------------------------------
type Checker a = RWST CheckerRead CheckerWrite CheckerState IO a
--------------------------------------------------------------------------------
runChecker :: Checker a -> Configuration -> Logger -> Check
-> IO (a, CheckerWrite)
runChecker checker config logger check' = do
let read' = CheckerRead
{ checkerConfig = config
, checkerLogger = logger
, checkerCheck = check'
}
(x, _, write) <- runRWST checker read' S.empty
Logger.flush logger
return (x, write)
--------------------------------------------------------------------------------
checkDestination :: Checker ()
checkDestination = do
config <- checkerConfig <$> ask
files <- liftIO $ getRecursiveContents
(const $ return False) (destinationDirectory config)
let htmls =
[ destinationDirectory config </> file
| file <- files
, takeExtension file == ".html"
]
forM_ htmls checkFile
--------------------------------------------------------------------------------
checkFile :: FilePath -> Checker ()
checkFile filePath = do
logger <- checkerLogger <$> ask
contents <- liftIO $ readFile filePath
Logger.header logger $ "Checking file " ++ filePath
let urls = getUrls $ TS.parseTags contents
forM_ urls $ \url -> do
Logger.debug logger $ "Checking link " ++ url
checkUrl filePath url
--------------------------------------------------------------------------------
checkUrl :: FilePath -> String -> Checker ()
checkUrl filePath url
| isExternal url = checkExternalUrl url
| hasProtocol url = skip "Unknown protocol, skipping"
| otherwise = checkInternalUrl filePath url
where
validProtoChars = ['A'..'Z'] ++ ['a'..'z'] ++ ['0'..'9'] ++ "+-."
hasProtocol str = case break (== ':') str of
(proto, ':' : _) -> all (`elem` validProtoChars) proto
_ -> False
--------------------------------------------------------------------------------
ok :: String -> Checker ()
ok _ = tell $ mempty {checkerOk = 1}
--------------------------------------------------------------------------------
skip :: String -> Checker ()
skip reason = do
logger <- checkerLogger <$> ask
Logger.debug logger $ reason
tell $ mempty {checkerOk = 1}
--------------------------------------------------------------------------------
faulty :: String -> Checker ()
faulty url = do
logger <- checkerLogger <$> ask
Logger.error logger $ "Broken link to " ++ show url
tell $ mempty {checkerFaulty = 1}
--------------------------------------------------------------------------------
checkInternalUrl :: FilePath -> String -> Checker ()
checkInternalUrl base url = case url' of
"" -> ok url
_ -> do
config <- checkerConfig <$> ask
let dest = destinationDirectory config
dir = takeDirectory base
filePath
| "/" `isPrefixOf` url' = dest ++ url'
| otherwise = dir </> url'
exists <- checkFileExists filePath
if exists then ok url else faulty url
where
url' = stripFragments $ unEscapeString url
--------------------------------------------------------------------------------
checkExternalUrl :: String -> Checker ()
#ifdef CHECK_EXTERNAL
checkExternalUrl url = do
logger <- checkerLogger <$> ask
needsCheck <- (== All) . checkerCheck <$> ask
checked <- (url `S.member`) <$> get
if not needsCheck || checked
then Logger.debug logger "Already checked, skipping"
else do
isOk <- liftIO $ handle (failure logger) $
Http.withManager $ \mgr -> do
request <- Http.parseUrl urlToCheck
response <- Http.http (settings request) mgr
let code = Http.statusCode (Http.responseStatus response)
return $ code >= 200 && code < 300
modify $ if schemeRelative url
then S.insert urlToCheck . S.insert url
else S.insert url
if isOk then ok url else faulty url
where
-- Add additional request info
settings r = r
{ Http.method = "HEAD"
, Http.redirectCount = 10
, Http.requestHeaders = ("User-Agent", ua) : Http.requestHeaders r
}
-- Nice user agent info
ua = fromString $ "hakyll-check/" ++
(intercalate "." $ map show $ versionBranch $ Paths_hakyll.version)
-- Catch all the things except UserInterrupt
failure logger (SomeException e) = case cast e of
Just UserInterrupt -> throw UserInterrupt
_ -> Logger.error logger (show e) >> return False
-- Check scheme-relative links
schemeRelative = isPrefixOf "//"
urlToCheck = if schemeRelative url then "http:" ++ url else url
#else
checkExternalUrl _ = return ()
#endif
--------------------------------------------------------------------------------
-- | Wraps doesFileExist, also checks for index.html
checkFileExists :: FilePath -> Checker Bool
checkFileExists filePath = liftIO $ do
file <- doesFileExist filePath
dir <- doesDirectoryExist filePath
case (file, dir) of
(True, _) -> return True
(_, True) -> doesFileExist $ filePath </> "index.html"
_ -> return False
--------------------------------------------------------------------------------
stripFragments :: String -> String
stripFragments = takeWhile (not . flip elem ['?', '#'])
| Minoru/hakyll | src/Hakyll/Check.hs | bsd-3-clause | 9,141 | 0 | 21 | 2,443 | 2,008 | 1,071 | 937 | 120 | 3 |
module PackageTests.ReexportedModules.Check where
import Data.Version
import PackageTests.PackageTester
import System.FilePath
import Test.Tasty.HUnit
import Data.Maybe
import Data.List
import Control.Monad
import Data.Char
import Text.ParserCombinators.ReadP
orFail :: String -> [(a, String)] -> a
orFail err r = case find (all isSpace . snd) r of
Nothing -> error err
Just (i, _) -> i
find' :: (a -> Bool) -> [a] -> Maybe a
find' = find
suite :: FilePath -> Assertion
suite ghcPath = do
-- ToDo: Turn this into a utility function
(_, _, xs) <- run Nothing ghcPath [] ["--info"]
let compat = (>= Version [7,9] [])
. orFail "could not parse version"
. readP_to_S parseVersion
. snd
. fromJust
. find' ((=="Project version").fst)
. orFail "could not parse ghc --info output"
. reads
$ xs
when compat $ do
let spec = PackageSpec
{ directory = "PackageTests" </> "ReexportedModules"
, configOpts = []
, distPref = Nothing
}
result <- cabal_build spec ghcPath
assertBuildSucceeded result
| corngood/cabal | Cabal/tests/PackageTests/ReexportedModules/Check.hs | bsd-3-clause | 1,220 | 0 | 20 | 391 | 350 | 187 | 163 | 35 | 2 |
{-# LANGUAGE ScopedTypeVariables, JavaScriptFFI, ForeignFunctionInterface #-}
module Main where
import Prelude hiding (print, putStrLn)
import Control.Concurrent
import Control.Concurrent.MVar
import Control.Exception
import Control.Monad
import Data.List (intersperse)
import GHCJS.Types
import GHCJS.Concurrent
import GHCJS.Foreign.Callback
import qualified Data.JSString as JSS
---------------------------------------------------------------------------
-- our usual standard io implementation is asynchronous, use this more
-- primitive mechanism to print the results
print :: Show a => a -> IO ()
print = putStrLn . show
putStrLn :: String -> IO ()
putStrLn = js_log . JSS.pack
foreign import javascript unsafe
"console.log($1);"
js_log :: JSString -> IO ()
---------------------------------------------------------------------------
foreign import javascript unsafe
"$1();"
js_runCallback :: Callback a -> IO ()
---------------------------------------------------------------------------
main :: IO ()
main = sequence_ .
intersperse (putStrLn "---------------------------") $
[ synchronously1
, wouldBlock1
, wouldBlock2
, callback1
, callback2
, callback3
]
printIsSync :: IO ()
printIsSync = do
tid <- myThreadId
sync <- isThreadSynchronous tid
ca <- isThreadContinueAsync tid
print (sync, ca)
printMVar :: MVar () -> IO ()
printMVar = print <=< takeMVar
synchronously1 :: IO ()
synchronously1 = do
putStrLn "synchronously1"
printIsSync
synchronously printIsSync
printIsSync
let h x m = m `catch` \(e::SomeException) -> do
putStrLn ("handler: " ++ x)
printIsSync
h "outside" (synchronously $ printIsSync >> error "err")
printIsSync
synchronously (h "inside" (printIsSync >> error "err"))
printIsSync
putStrLn "synchronously1 done"
-- blocking on MVar should give us an exception, the exception can be handled
wouldBlock1 :: IO ()
wouldBlock1 = do
putStrLn "wouldBlock1"
x `catch` \(e::SomeException) ->
do putStrLn "exception caught: outer"
print e
putStrLn "wouldBlock1 done"
where
x = do
mv1 <- newEmptyMVar
printIsSync
synchronously $ do
printIsSync
printMVar mv1 `catch` \(e::SomeException) -> do
putStrLn "exeption caught inner1"
print e
printMVar mv1 `catch` \(e::WouldBlockException) -> do
putStrLn "exeption caught inner2"
print e
putStrLn "ok"
printMVar mv1
putStrLn "unreachable"
printIsSync
-- threadDelay should give us the exception too
wouldBlock2 :: IO ()
wouldBlock2 = do
putStrLn "wouldBlock2"
threadDelay 500000
let x = synchronously $ do
threadDelay 500000 `catch` \(e::WouldBlockException) ->
putStrLn "exception caught: inner"
printIsSync
putStrLn "ok"
threadDelay 500000
putStrLn "unreachable"
x `catch` \(e::WouldBlockException) ->
putStrLn "exception caught: outer"
putStrLn "wouldBlock2 done"
-- synchronous callbacks give us an exception
callback1 :: IO ()
callback1 = do
putStrLn "callback1"
mv1 <- newEmptyMVar
cb1 <- syncCallback ThrowWouldBlock $ do
printIsSync
putStrLn "ok"
printMVar mv1 `catch` \(e::WouldBlockException) ->
putStrLn "exception: would block"
printIsSync
putStrLn "ok"
printMVar mv1
-- thread would block error ends up on stderr
putStrLn "unreachable"
js_runCallback cb1
putStrLn "callback1 finished"
releaseCallback cb1
callback2 :: IO ()
callback2 = do
putStrLn "callback2"
mv1 <- newEmptyMVar
mv2 <- newEmptyMVar
cb1 <- syncCallback ContinueAsync $ do
printIsSync
putStrLn "ok"
printMVar mv1
putStrLn "callback"
printIsSync
putMVar mv2 ()
js_runCallback cb1
putStrLn "main"
putMVar mv1 ()
printMVar mv2
putStrLn "main"
putStrLn "callback2 done"
-- async callbacks are async
callback3 :: IO ()
callback3 = do
putStrLn "callback3"
mv1 <- newEmptyMVar
cb1 <- asyncCallback $ do
putStrLn "async callback"
printIsSync
putMVar mv1 ()
printMVar mv1
putStrLn "callback"
putMVar mv1 ()
js_runCallback cb1
printMVar mv1
putStrLn "main"
putMVar mv1 ()
printMVar mv1
putStrLn "main"
printIsSync
releaseCallback cb1
putStrLn "callback3 done"
| seereason/ghcjs | test/conc/syncThreads.hs | mit | 4,442 | 6 | 16 | 1,059 | 1,131 | 524 | 607 | 141 | 1 |
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies, FlexibleContexts, GADTs #-}
module FDsFromGivens2 where
class C a b | a -> b where
cop :: a -> b -> ()
data KCC where
KCC :: C Char Char => () -> KCC
f :: C Char [a] => a -> a
f = undefined
bar :: KCC -> a -> a
bar (KCC _) = f
| ezyang/ghc | testsuite/tests/typecheck/should_fail/FDsFromGivens2.hs | bsd-3-clause | 297 | 0 | 9 | 72 | 114 | 62 | 52 | 10 | 1 |
{-# Language TypeFamilies #-}
{-# Language GeneralizedNewtypeDeriving #-}
module Main where
import RegExp.AST
import RegExp.Match (match)
import RegExp.BackMatch (backMatch)
import RegExp.Parser (parseRegExp)
import Ersatz
import Booleans
import Control.Monad
import Data.Char
import Data.List (intersperse)
import Data.Map (Map)
import qualified Data.Map as Map
import Prelude hiding ((&&),(||),all,and,any,or,not)
newtype Letter = Letter Bit5
deriving (Equatable, Show)
instance Variable Letter where
literally m = Letter <$> literally m
instance Codec Letter where
type Decoded Letter = Char
encode x = Letter (encode (fromIntegral (ord x - ord 'A')))
decode sol (Letter x) =
do w8 <- decode sol x
return (chr (fromIntegral w8 + ord 'A'))
-- | Matching using a faster algorithm unless there are backreferences
smartMatch :: RegExpFull Char -> [Letter] -> Bit
smartMatch regexp =
let regexp' = fmap encode regexp in
case simplify regexp' of
Just s -> match (===) s
Nothing -> backMatch regexp'
main :: IO ()
main =
do Just xs <- getModel problem
putStr (render xs)
problem :: MonadSAT s m => m (Map Pos Letter)
problem =
do cells <- sequence $ Map.fromList [ (x, exists) | x <- [minBound .. maxBound] ]
let r xs reStr = assert (smartMatch re inp)
where
re = either error id (parseRegExp reStr)
inp = map (cells Map.!) xs
r [P00,P01,P02,P03,P04,P05,P06] ".*H.*H.*"
r [P10,P11,P12,P13,P14,P15,P16,P17] "(DI|NS|TH|OM)*"
r [P20,P21,P22,P23,P24,P25,P26,P27,P28] "F.*[AO].*[AO].*"
r [P30,P31,P32,P33,P34,P35,P36,P37,P38,P39] "(O|RHH|MM)*"
r [P40,P41,P42,P43,P44,P45,P46,P47,P48,P49,P4a] ".*"
r [P50,P51,P52,P53,P54,P55,P56,P57,P58,P59,P5a,P5b] "C*MC(CCC|MM)*"
r [P60,P61,P62,P63,P64,P65,P66,P67,P68,P69,P6a,P6b,P6c] "[^C]*[^R]*III.*"
r [P70,P71,P72,P73,P74,P75,P76,P77,P78,P79,P7a,P7b] "(...?)\\1*"
r [P80,P81,P82,P83,P84,P85,P86,P87,P88,P89,P8a] "([^X]|XCC)*"
r [P90,P91,P92,P93,P94,P95,P96,P97,P98,P99] "(RR|HHH)*.?"
r [Pa0,Pa1,Pa2,Pa3,Pa4,Pa5,Pa6,Pa7,Pa8] "N.*X.X.X.*E"
r [Pb0,Pb1,Pb2,Pb3,Pb4,Pb5,Pb6,Pb7] "R*D*M*"
r [Pc0,Pc1,Pc2,Pc3,Pc4,Pc5,Pc6] ".(C|HH)*"
r [P00,P10,P20,P30,P40,P50,P60] "(ND|ET|IN)[^X]*"
r [P01,P11,P21,P31,P41,P51,P61,P70] "[CHMNOR]*I[CHMNOR]*"
r [P02,P12,P22,P32,P42,P52,P62,P71,P80] "P+(..)\\1.*"
r [P03,P13,P23,P33,P43,P53,P63,P72,P81,P90] "(E|CR|MN)*"
r [P04,P14,P24,P34,P44,P54,P64,P73,P82,P91,Pa0] "([^MC]|MM|CC)*"
r [P05,P15,P25,P35,P45,P55,P65,P74,P83,P92,Pa1,Pb0] "[AM]*CM(RC)*R?"
r [P06,P16,P26,P36,P46,P56,P66,P75,P84,P93,Pa2,Pb1,Pc0] ".*"
r [P17,P27,P37,P47,P57,P67,P76,P85,P94,Pa3,Pb2,Pc1] ".*PRR.*DDC.*"
r [P28,P38,P48,P58,P68,P77,P86,P95,Pa4,Pb3,Pc2] "(HHX|[^HX])*"
r [P39,P49,P59,P69,P78,P87,P96,Pa5,Pb4,Pc3] "([^EMC]|EM)*"
r [P4a,P5a,P6a,P79,P88,P97,Pa6,Pb5,Pc4] ".*OXR.*"
r [P5b,P6b,P7a,P89,P98,Pa7,Pb6,Pc5] ".*LR.*RL.*"
r [P6c,P7b,P8a,P99,Pa8,Pb7,Pc6] ".*SE.*UE.*"
r [Pc0,Pb0,Pa0,P90,P80,P70,P60] ".*G.*V.*H.*"
r [Pc1,Pb1,Pa1,P91,P81,P71,P61,P50] "[CR]*"
r [Pc2,Pb2,Pa2,P92,P82,P72,P62,P51,P40] ".*XEXM*"
r [Pc3,Pb3,Pa3,P93,P83,P73,P63,P52,P41,P30] ".*DD.*CCM.*"
r [Pc4,Pb4,Pa4,P94,P84,P74,P64,P53,P42,P31,P20] ".*XHCR.*X.*"
r [Pc5,Pb5,Pa5,P95,P85,P75,P65,P54,P43,P32,P21,P10] ".*(.)(.)(.)(.)\\4\\3\\2\\1.*"
r [Pc6,Pb6,Pa6,P96,P86,P76,P66,P55,P44,P33,P22,P11,P00] ".*(IN|SE|HI)"
r [Pb7,Pa7,P97,P87,P77,P67,P56,P45,P34,P23,P12,P01] "[^C]*MMM[^C]*"
r [Pa8,P98,P88,P78,P68,P57,P46,P35,P24,P13,P02] ".*(.)C\\1X\\1.*"
r [P99,P89,P79,P69,P58,P47,P36,P25,P14,P03] "[CEIMU]*OH[AEMOR]*"
r [P8a,P7a,P6a,P59,P48,P37,P26,P15,P04] "(RX|[^R])*"
r [P7b,P6b,P5a,P49,P38,P27,P16,P05] "[^M]*M[^M]*"
r [P6c,P5b,P4a,P39,P28,P17,P06] "(S|MM|HHH)*"
return cells
data Pos = P00|P01|P02|P03|P04|P05|P06
| P10|P11|P12|P13|P14|P15|P16|P17
| P20|P21|P22|P23|P24|P25|P26|P27|P28
| P30|P31|P32|P33|P34|P35|P36|P37|P38|P39
| P40|P41|P42|P43|P44|P45|P46|P47|P48|P49|P4a
| P50|P51|P52|P53|P54|P55|P56|P57|P58|P59|P5a|P5b
| P60|P61|P62|P63|P64|P65|P66|P67|P68|P69|P6a|P6b|P6c
| P70|P71|P72|P73|P74|P75|P76|P77|P78|P79|P7a|P7b
| P80|P81|P82|P83|P84|P85|P86|P87|P88|P89|P8a
| P90|P91|P92|P93|P94|P95|P96|P97|P98|P99
| Pa0|Pa1|Pa2|Pa3|Pa4|Pa5|Pa6|Pa7|Pa8
| Pb0|Pb1|Pb2|Pb3|Pb4|Pb5|Pb6|Pb7
| Pc0|Pc1|Pc2|Pc3|Pc4|Pc5|Pc6
deriving (Eq, Ord, Bounded, Enum, Read, Show)
render :: Map Pos Char -> String
render m = unlines
[ r 6 [P00,P01,P02,P03,P04,P05,P06]
, r 5 [P10,P11,P12,P13,P14,P15,P16,P17]
, r 4 [P20,P21,P22,P23,P24,P25,P26,P27,P28]
, r 3 [P30,P31,P32,P33,P34,P35,P36,P37,P38,P39]
, r 2 [P40,P41,P42,P43,P44,P45,P46,P47,P48,P49,P4a]
, r 1 [P50,P51,P52,P53,P54,P55,P56,P57,P58,P59,P5a,P5b]
, r 0 [P60,P61,P62,P63,P64,P65,P66,P67,P68,P69,P6a,P6b,P6c]
, r 1 [P70,P71,P72,P73,P74,P75,P76,P77,P78,P79,P7a,P7b]
, r 2 [P80,P81,P82,P83,P84,P85,P86,P87,P88,P89,P8a]
, r 3 [P90,P91,P92,P93,P94,P95,P96,P97,P98,P99]
, r 4 [Pa0,Pa1,Pa2,Pa3,Pa4,Pa5,Pa6,Pa7,Pa8]
, r 5 [Pb0,Pb1,Pb2,Pb3,Pb4,Pb5,Pb6,Pb7]
, r 6 [Pc0,Pc1,Pc2,Pc3,Pc4,Pc5,Pc6]
]
where
r n xs = replicate n ' ' ++ intersperse ' ' (map (m Map.!) xs)
| glguy/5puzzle | RegExp.hs | isc | 6,026 | 0 | 14 | 1,569 | 2,933 | 1,758 | 1,175 | 111 | 2 |
module Bowling (score, BowlingError(..)) where
data BowlingError = IncompleteGame
| InvalidRoll { rollIndex :: Int, rollValue :: Int }
deriving (Eq, Show)
score :: [Int] -> Either BowlingError Int
score rolls = error "You need to implement this function."
| exercism/xhaskell | exercises/practice/bowling/src/Bowling.hs | mit | 279 | 0 | 8 | 62 | 79 | 46 | 33 | 6 | 1 |
{-# LANGUAGE Safe #-}
{-# LANGUAGE DoAndIfThenElse #-}
module Control.Concurrent.Singular.Event.Primitive (
Event (), newEvent, always, sync
) where
import Control.Concurrent.Singular.Event.Status
import Data.List.Util
import Control.Concurrent.MVar (newEmptyMVar, putMVar, takeMVar)
import Control.Monad (unless)
import Data.Monoid (Monoid, mempty, mappend)
data BaseEvent a = BaseEvent {
commit :: !(IO (Maybe a)),
block :: StatusRef -> (a -> IO ()) -> IO ()
}
instance Functor BaseEvent where
fmap f (BaseEvent commit' block') = BaseEvent {
commit = fmap (fmap f) commit',
block = \status handler -> block' status (handler . f)
}
newtype Event a = Event { unEvent :: [BaseEvent a] }
newEvent :: IO (Maybe a) -> (StatusRef -> (a -> IO ()) -> IO ()) -> Event a
newEvent commit' block' = Event [BaseEvent commit' block']
always :: a -> Event a
always x = newEvent (return $ Just x) (\ _ _ -> return ())
instance Functor Event where
fmap f = Event . map (fmap f) . unEvent
instance Monoid (Event a) where
mempty = Event []
mappend (Event x) (Event y) = Event (x ++ y)
sync :: Event a -> IO a
sync (Event bases) = shuffle bases >>= commit'
where
commit' [] = block'
commit' (x:xs) = commit x >>= maybe (commit' xs) return
block' = do
status <- newStatusRef
output <- newEmptyMVar
let block'' [] = return ()
block'' (x:xs) = do
block x status $ putMVar output
status' <- readStatusRef status
unless (status' == Synced) $
block'' xs
shuffle bases >>= block''
takeMVar output
| YellPika/Hannel | src/Control/Concurrent/Singular/Event/Primitive.hs | mit | 1,667 | 0 | 17 | 445 | 643 | 333 | 310 | 46 | 3 |
{-# LANGUAGE TypeOperators #-}
import Numeric.Noise.Perlin
import System.Random (randomIO,randomIO,randomRs,newStdGen,mkStdGen)
import Graphics.Gloss
import Graphics.Gloss.Data.Color (makeColor8)
import qualified Data.Array.Repa as R
type Seed = Int
rangeMap :: (Ord a) => b -> [(a,b)] -> a -> b
rangeMap def rs x = case dropWhile (\(a,_) -> x > a) rs of
(_,b):_ -> b
_______ -> def
main = do
heightSeed <- randomIO :: IO Int
treeSeed <- randomIO :: IO Int
let heightP = perlin heightSeed 16 (1/128) (1/2)
treeP = perlin treeSeed 16 (1/128) (1/2)
w = 1024 :: Int
h = 1024 :: Int
shape = (R.Z R.:. w R.:. h)
heightArray <- R.computeP $ R.fromFunction shape
(\(R.Z R.:.x R.:. y) -> ( fromIntegral x
, fromIntegral y
, rangeMap 255 [(-0.9,0),(0.25,130)] $
noiseValue heightP (fromIntegral x, fromIntegral y, 0)
)
) :: IO (R.Array R.U R.DIM2 (Float,Float,Int))
let heightPic = R.map (\(x,y,z) -> scale 1 1
$ translate (x - fromIntegral w / 2) (y - fromIntegral h / 2)
$ color (makeColor8 z z z 255)
$ rectangleSolid 1 1
) heightArray
let trees = randomPerlin heightSeed treeSeed (0,3) (w,h)
treePic = shapeMap (\x y z -> let fx = fromIntegral x
fy = fromIntegral y in
scale 1 1
$ translate (fx - fromIntegral w / 2) (fy - fromIntegral h / 2)
$ (if z>3.25 then color (makeColor8 0 255 0 255) else color (makeColor8 0 0 0 255))
$ rectangleSolid 1 1
) trees
display (FullScreen (1600, 1200)) black $ pictures $ R.toList heightPic
-- display (InWindow "Perlin Test" (1600, 1200) (0, 0)) black $ pictures $ R.toList treePic
shapeMap f array = R.fromFunction (R.extent array) (\sh@(R.Z R.:.x R.:. y) -> f x y $ array R.! sh)
randomPerlin :: Seed -- Perlin Seed
-> Seed -- Random Seed
-> (Double,Double) -- Random Range
-> (Int,Int) -- Matrix Width & Height
-> R.Array R.U R.DIM2 Double
randomPerlin pSeed rSeed range (w,h) = R.fromListUnboxed shape zips
where
perl = perlin pSeed 16 (1/128) (1/2)
shape = R.ix2 w h
rnds = randomRs range $ mkStdGen rSeed
zips = zipWith (\(x,y) rnd -> rnd + noiseValue perl (fromIntegral x, fromIntegral y, 0))
[(x,y) | x <- [0..w-1], y <- [0..h-1]]
rnds | RTS2013/RTS | tests/mapGeneration_test/PerlinNoise/HaskellPerlin.hs | mit | 2,801 | 0 | 22 | 1,089 | 1,036 | 551 | 485 | 52 | 2 |
module Data.BigBunnyAndDeer.DeerInfo
( findDeerEntry
, updateDeerInfo
, writeDeerInfo
, parseRawDeerInfo
, fetchDeerInfo
) where
import Data.Maybe
import Control.Arrow
import Control.Monad.IO.Class
import System.Directory
import qualified System.IO.Strict as SIO
import qualified Data.IntMap as IM
import Data.Default
import Data.Coerce
import Data.BigBunnyAndDeer.Type
deerEntryToPair :: DeerEntry -> (Int, Maybe Integer)
deerEntryToPair (DeerEntry a b) = (a,b)
updateDeerInfo :: DeerId -> Integer -> DeerInfo -> DeerInfo
updateDeerInfo did newTS = coerce $ IM.alter (Just . alterEntry) did
where
alterEntry :: Maybe DeerEntry -> DeerEntry
alterEntry old = case old of
Nothing -> DeerEntry 1 (Just newTS)
Just (DeerEntry tt _) -> DeerEntry (succ tt) (Just newTS)
fetchDeerInfo :: FilePath -> IO DeerInfo
fetchDeerInfo fp = do
b <- liftIO $ doesFileExist fp
if b
then parseRawDeerInfo <$> getRawDeerInfo fp
else return (coerce (def :: IM.IntMap DeerEntry))
getRawDeerInfo :: FilePath -> IO String
getRawDeerInfo = SIO.readFile
parseLine :: String -> (Int, (Int, Maybe Integer))
parseLine = read
dumpDeerInfo :: DeerInfo -> String
dumpDeerInfo =
coerce >>>
IM.toList >>>
map (second deerEntryToPair >>> show) >>>
unlines
parseRawDeerInfo :: String -> DeerInfo
parseRawDeerInfo =
lines >>>
map (parseLine >>> second (uncurry DeerEntry)) >>>
IM.fromList >>> DeerInfo
findDeerEntry :: DeerInfo -> DeerId -> DeerEntry
findDeerEntry di did = fromMaybe def (IM.lookup did (coerce di))
writeDeerInfo :: FilePath -> DeerInfo -> IO ()
writeDeerInfo fp di = writeFile fp (dumpDeerInfo di)
| Javran/BigBunnyAndDeer | src/Data/BigBunnyAndDeer/DeerInfo.hs | mit | 1,674 | 0 | 13 | 316 | 532 | 283 | 249 | 48 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module System.Directory.Watchman.Expression
( Expression
, renderExpression
, true
, false
, System.Directory.Watchman.Expression.all
, System.Directory.Watchman.Expression.any
, (.&&)
, (.||)
, dirname
, dirname'
, empty
, exists
, match
, match'
, name
, name'
, System.Directory.Watchman.Expression.not
, size
, suffix
, type_
, caseSensitive
, caseInsensitive
, basename
, wholename
, depth
, includeDotFiles
, noEscape
) where
import Data.Foldable (foldl')
import Data.ByteString (ByteString)
import Data.Int
import qualified Data.Map.Strict as M
import qualified Data.Sequence as Seq
import System.Directory.Watchman.FileType
import System.Directory.Watchman.WFilePath
import System.Directory.Watchman.BSER
data CaseSensitivity
= CaseSensitive
| CaseInsensitive
deriving (Show, Eq, Ord)
class HasCaseSensitivityOption a where
setCaseSensitivity :: CaseSensitivity -> a -> a
data PathScope
= BaseName
| WholeName
deriving (Show, Eq, Ord)
class HasPathScopeOption a where
setPathScope :: PathScope -> a -> a
data Expression
= EAllOf ![Expression]
| EAnyOf ![Expression]
| EDirName !WFilePath !DirNameParams
| ETrue
| EFalse
| EEmpty
| EExists
| EMatch !ByteString !MatchParams
| EName ![WFilePath] !NameParams
| ESince () -- TODO !!!
| ENot !Expression
| ESize !(Comparison Int64)
| ESuffix !ByteString
| EType !FileType
deriving (Show, Eq, Ord)
data DirNameParams = DirNameParams
{ _DirNameParams_Depth :: !(Comparison Int)
, _DirNameParams_CaseSensitivity :: !CaseSensitivity
}
deriving (Show, Eq, Ord)
defaultDirNameParams :: DirNameParams
defaultDirNameParams = DirNameParams
{ _DirNameParams_Depth = Ge 0
, _DirNameParams_CaseSensitivity = CaseSensitive
}
instance HasCaseSensitivityOption DirNameParams where
setCaseSensitivity c x = x { _DirNameParams_CaseSensitivity = c }
data MatchParams = MatchParams
{ _MatchParams_CaseSensitivity :: !CaseSensitivity
, _MatchParams_PathScope :: !PathScope
, _MatchParams_IncludeDotFiles :: !Bool
, _MatchParams_NoEscape :: !Bool
}
deriving (Show, Eq, Ord)
defaultMatchParams :: MatchParams
defaultMatchParams = MatchParams
{ _MatchParams_CaseSensitivity = CaseSensitive
, _MatchParams_PathScope = BaseName
, _MatchParams_IncludeDotFiles = False
, _MatchParams_NoEscape = False
}
instance HasCaseSensitivityOption MatchParams where
setCaseSensitivity c x = x { _MatchParams_CaseSensitivity = c }
instance HasPathScopeOption MatchParams where
setPathScope c x = x { _MatchParams_PathScope = c }
data NameParams = NameParams
{ _NameParams_CaseSensitivity :: !CaseSensitivity
, _NameParams_PathScope :: !PathScope
}
deriving (Show, Eq, Ord)
defaultNameParams :: NameParams
defaultNameParams = NameParams
{ _NameParams_CaseSensitivity = CaseSensitive
, _NameParams_PathScope = BaseName
}
instance HasCaseSensitivityOption NameParams where
setCaseSensitivity c x = x { _NameParams_CaseSensitivity = c }
instance HasPathScopeOption NameParams where
setPathScope c x = x { _NameParams_PathScope = c }
true :: Expression
true = ETrue
false :: Expression
false = EFalse
all :: [Expression] -> Expression
all = EAllOf
any :: [Expression] -> Expression
any = EAnyOf
infixr 3 .&&
(.&&) :: Expression -> Expression -> Expression
lhs .&& rhs = EAllOf [lhs, rhs]
infixr 2 .||
(.||) :: Expression -> Expression -> Expression
lhs .|| rhs = EAnyOf [lhs, rhs]
dirname :: WFilePath -> Expression
dirname path = EDirName path defaultDirNameParams
dirname' :: WFilePath -> [DirNameParams -> DirNameParams] -> Expression
dirname' path modifiers = EDirName path (applyModifiers defaultDirNameParams modifiers)
empty :: Expression
empty = EEmpty
exists :: Expression
exists = EExists
match :: ByteString -> Expression
match pattern = EMatch pattern defaultMatchParams
match' :: ByteString -> [MatchParams -> MatchParams] -> Expression
match' pattern modifiers = EMatch pattern (applyModifiers defaultMatchParams modifiers)
name :: [WFilePath] -> Expression
name files = EName files defaultNameParams
name' :: [WFilePath] -> [NameParams -> NameParams] -> Expression
name' files modifiers = EName files (applyModifiers defaultNameParams modifiers)
not :: Expression -> Expression
not = ENot
size :: Comparison Int64 -> Expression
size = ESize
suffix :: ByteString -> Expression
suffix = ESuffix
type_ :: FileType -> Expression
type_ = EType
applyModifiers :: a -> [a -> a] -> a
applyModifiers def modifiers = foldl' (\x f -> f x) def modifiers
caseSensitive :: HasCaseSensitivityOption a => a -> a
caseSensitive = setCaseSensitivity CaseSensitive
caseInsensitive :: HasCaseSensitivityOption a => a -> a
caseInsensitive = setCaseSensitivity CaseInsensitive
basename :: HasPathScopeOption a => a -> a
basename = setPathScope BaseName
wholename :: HasPathScopeOption a => a -> a
wholename = setPathScope BaseName
depth :: Comparison Int -> DirNameParams -> DirNameParams
depth c x = x { _DirNameParams_Depth = c }
includeDotFiles :: MatchParams -> MatchParams
includeDotFiles x = x { _MatchParams_IncludeDotFiles = True }
noEscape :: MatchParams -> MatchParams
noEscape x = x { _MatchParams_NoEscape = True }
data Comparison a
= Eq !a -- ^ Equal
| Ne !a -- ^ Not Equal
| Gt !a -- ^ Greater Than
| Ge !a -- ^ Greater Than or Equal
| Lt !a -- ^ Less Than
| Le !a -- ^ Less Than or Equal
deriving (Show, Eq, Ord)
renderPathScope :: PathScope -> BSERValue
renderPathScope BaseName = BSERString "basename"
renderPathScope WholeName = BSERString "wholename"
renderOperator :: Comparison a -> BSERValue
renderOperator (Eq _) = BSERString "eq"
renderOperator (Ne _) = BSERString "ne"
renderOperator (Gt _) = BSERString "gt"
renderOperator (Ge _) = BSERString "ge"
renderOperator (Lt _) = BSERString "lt"
renderOperator (Le _) = BSERString "le"
comparisonValue :: Integral n => Comparison n -> BSERValue
comparisonValue (Eq v) = compactBSERInt v
comparisonValue (Ne v) = compactBSERInt v
comparisonValue (Gt v) = compactBSERInt v
comparisonValue (Ge v) = compactBSERInt v
comparisonValue (Lt v) = compactBSERInt v
comparisonValue (Le v) = compactBSERInt v
renderExpression :: Expression -> BSERValue
renderExpression (EAllOf exprs) =
BSERArray (BSERString "allof" Seq.<| Seq.fromList (map renderExpression exprs))
renderExpression (EAnyOf exprs) =
BSERArray (BSERString "anyof" Seq.<| Seq.fromList (map renderExpression exprs))
renderExpression (EDirName (WFilePath p) (DirNameParams d caseSensitivity)) =
BSERArray (Seq.fromList [BSERString exprName, BSERString p, BSERArray (Seq.fromList [BSERString "depth", renderOperator d, comparisonValue d])])
where
exprName = case caseSensitivity of { CaseSensitive -> "dirname"; CaseInsensitive -> "idirname" }
renderExpression ETrue = BSERString "true"
renderExpression EFalse = BSERString "false"
renderExpression EEmpty = BSERString "empty"
renderExpression EExists = BSERString "exists"
renderExpression (EMatch pattern (MatchParams caseSensitivity pathScope includeDotFiles_ noEscape_)) =
BSERArray (Seq.fromList [BSERString exprName, BSERString pattern, renderPathScope pathScope] Seq.>< flags)
where
exprName = case caseSensitivity of { CaseSensitive -> "match"; CaseInsensitive -> "imatch" }
flagsMap = M.unions
[ if includeDotFiles_ then M.singleton "includedotfiles" (BSERBool True) else M.empty
, if noEscape_ then M.singleton "noescape" (BSERBool True) else M.empty
]
flags = if M.null flagsMap then Seq.empty else Seq.singleton (BSERObject flagsMap)
renderExpression (EName files (NameParams caseSensitivity pathScope)) =
BSERArray (Seq.fromList [BSERString exprName, BSERArray (Seq.fromList (map (BSERString . toByteString) files)), renderPathScope pathScope])
where
exprName = case caseSensitivity of { CaseSensitive -> "name"; CaseInsensitive -> "iname" }
renderExpression (ESince _) = error "TODO 928352935423"
renderExpression (ENot expr) =
BSERArray (Seq.fromList [BSERString "not", renderExpression expr])
renderExpression (ESize s) =
BSERArray (Seq.fromList [BSERString "size", renderOperator s, comparisonValue s])
renderExpression (ESuffix s) =
BSERArray (Seq.fromList [BSERString "suffix", BSERString s])
renderExpression (EType t) =
BSERArray (Seq.fromList [BSERString "type", BSERString (fileTypeChar t)])
| bitc/hs-watchman | src/System/Directory/Watchman/Expression.hs | mit | 8,617 | 0 | 15 | 1,560 | 2,417 | 1,292 | 1,125 | 265 | 7 |
module Problem7 ( flattenList, NestedList(..) ) where
data NestedList a = Elem a | List [NestedList a]
flattenList :: NestedList a -> [a]
flattenList (Elem a) = [a]
flattenList (List a) = concatMap flattenList a | chanind/haskell-99-problems | Problem7.hs | mit | 213 | 0 | 8 | 36 | 88 | 49 | 39 | 5 | 1 |
module Sudoku.Strategy.HiddenSingle where
import Prelude
import Sudoku
import Sudoku.Strategy
import Data.List
concatRowCandidates :: Sudoku -> Int -> String
concatRowCandidates su i = concat [ findCandidates su i j | j<-[0..(columnCount su - 1)] ]
concatColumnCandidates :: Sudoku -> Int -> String
concatColumnCandidates su j = concat [ findCandidates su i j | i<-[0..(rowCount su - 1)] ]
concatBlockCandidates :: Sudoku -> Int -> Int -> String
concatBlockCandidates su i j
= concat [ findCandidates su k l | k<-[i'..i''], l<-[j'..j''] ]
where
h = blockHeight su
w = blockWidth su
i' = (div i h) * h
j' = (div j w) * w
i'' = i' + h - 1
j'' = j' + w - 1
findUniqueRowCandidates :: Sudoku -> Int -> Int -> String
findUniqueRowCandidates su i _ = concat $ filter (\x -> length x == 1) $ group $ sort $ concatRowCandidates su i
findUniqueColumnCandidates :: Sudoku -> Int -> Int -> String
findUniqueColumnCandidates su _ j = concat $ filter (\x -> length x == 1) $ group $ sort $ concatColumnCandidates su j
findUniqueBlockCandidates :: Sudoku -> Int -> Int -> String
findUniqueBlockCandidates su i j = concat $ filter (\x -> length x == 1) $ group $ sort $ concatBlockCandidates su i j
resolveCandidates :: Sudoku -> Int -> Int -> (Char, String)
resolveCandidates su i j | not $ null urci = (s, urci)
| not $ null ucci = (s, ucci)
| not $ null ubci = (s, ubci)
| otherwise = (s, cs)
where
s = su !! i !! j
cs = findCandidates su i j
urci = intersect cs (findUniqueRowCandidates su i j)
ucci = intersect cs (findUniqueColumnCandidates su i j)
ubci = intersect cs (findUniqueBlockCandidates su i j)
resolveAllCandidates :: Sudoku -> [[(Char, String)]]
resolveAllCandidates su = mapWithIndeces su (\i j -> resolveCandidates su i j)
solve :: Sudoku -> Sudoku
solve su = run su resolveAllCandidates
| thomasbrus/sudoku-solver | src/Sudoku/Strategy/HiddenSingle.hs | mit | 2,092 | 0 | 13 | 622 | 766 | 395 | 371 | 38 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module TypePlay.Check.SimplyTyped where
import Prelude hiding (map,foldl,elem)
import Data.List (map,foldl,(\\),union,elem)
import Data.Text (Text)
import qualified Data.Text as T
import Control.Monad (when)
import Control.Monad.Trans.Error (throwError)
import Data.Monoid ((<>))
type Sym = Text
data Expr
= Var Sym
| App Expr Expr
| Lam Sym Type Expr
deriving (Eq, Read, Show)
-- Simple Typing (t -> t B)
data Type
= Base
| Arrow Type Type
deriving (Eq, Read, Show)
-- x e e \x:t.e
-- Type checker will take an expression and return the type.
-- The TC will also need the types of all free vars in the expr.
-- Representing the environment is a list of vars and their respective types.
newtype Env = Env [(Sym, Type)] deriving (Show)
initialEnv :: Env
initialEnv = Env []
extend :: Sym -> Type -> Env -> Env
extend s t (Env r) = Env ((s,t) : r)
-- Type checking will be written using a monadic style
-- so we can have some error handling funnies.
type ErrorMsg = Text
type TC a = Either ErrorMsg a
findVar :: Env -> Sym -> TC Type
findVar (Env r) s =
case lookup s r of
Just t -> return t
Nothing -> throwError $ "Cannot find variable " <> s
tCheck :: Env -> Expr -> TC Type
tCheck r (Var s) =
findVar r s
tCheck r (App f a) = do
tf <- tCheck r f
case tf of
Arrow at rt -> do
ta <- tCheck r a
when (ta /= at) $ throwError "Bad function argument type"
return rt
_ -> throwError "Non-function in application"
tCheck r (Lam s t e) = do
let r' = extend s t r
te <- tCheck r' e
return $ Arrow t te
typeCheck :: Expr -> Type
typeCheck e =
case tCheck initialEnv e of
Left msg -> error ("Type error:\n" <> msg)
Right t -> t
-- (\x. \y.x)(\z.z)
-- App (Lam "x" $ Lam "y" $ Var "x") (Lam "z" $ Var "z")
-- In beta reduction, on the lambda form is a redex.
-- (\x.e)a reduces to e[x:=a]
-- This means that all (free) occurrences of 'x' in 'e' become 'a'
-- alpha-substitution, which is simply renaming a bound variable.
-- \x.x can be changed to \y.y
-- Start with normal order to WHNF. Weak Head Normal Form.
-- In WHNF - ensure there is no redex along the "spine" of the expr.
-- Starting from the root and following the left branch in applications.
-- Walk down the spine collecting arguments (right branch of App) until
-- we reach a lambda or a variable.
-- If we reach a variable we have WHNF so reconsititute the App again.
-- If we reach a lambda we get to the crux of evaluation
-- We need a Beta-reduction
-- for App (Lam v e) a then v[e:=a]
-- we use the `subst` function for this
whnf :: Expr -> Expr
whnf ee = spine ee []
where
spine (App f a) as = spine f (a:as)
spine (Lam s e) (a:as) = spine (subst s a e) as
spine f as = foldl App f as
-- Free variables are those that occur in an Expr, but are not bound
-- within that Expr. Collect them in a list.
freeVars :: Expr -> [Sym]
freeVars (Var s) = [s]
freeVars (App f a) = freeVars f `union` freeVars a
freeVars (Lam i e) = freeVars e \\ [i]
-- Now for substitution
-- subst b[v:=x]
subst :: Sym -> Expr -> Expr -> Expr
subst v x b = sub b
where
sub e@(Var i) = if i == v then x else e
sub (App f a) = App (sub f) (sub a)
sub (Lam i e) =
if v == i then Lam i e
else if i `elem` fvx then
let
i' = cloneSym e i
e' = substVar i i' e
in Lam i' (sub e')
else Lam i (sub e)
fvx = freeVars x
cloneSym e i = loop i
where
loop i' = if i' `elem` vars then loop (i <> "'") else i'
vars = fvx <> freeVars e
substVar :: Sym -> Sym -> Expr -> Expr
substVar s s' e = subst s (Var s') e
-- For comparing Exprs without alpha-conversions
alphaEq :: Expr -> Expr -> Bool
alphaEq (Var v) (Var v') = v == v'
alphaEq (App f a) (App f' a') = alphaEq f f' && alphaEq a a'
alphaEq (Lam s e) (Lam s' e') = alphaEq e (substVar s' s e')
alphaEq _ _ = False
-- Reduction to Normal Form (where no redexes remain)
nf :: Expr -> Expr
nf ee = spine ee []
where
spine (App f a) as = spine f (a:as)
spine (Lam s e) [] = Lam s (nf e)
spine (Lam s e) (a:as) = spine (subst s a e) as
spine f as = app f as
app f as = foldl App f (map nf as)
betaEq :: Expr -> Expr -> Bool
betaEq e1 e2 = alphaEq (nf e1) (nf e2)
-- Testing !
--[z,s,m,n] = map (Var . (:[])) "zsmn"
--app2 f x y = App (App f x) y
--zero = Lam "s" $ Lam "z" z
--one = Lam "s" $ Lam "z" $ App s z
--two = Lam "s" $ Lam "z" $ App s $ App s z
--three = Lam "s" $ Lam "z" $ App s $ App s $ App s z
--plus = Lam "m" $ Lam "n" $ Lam "s" $ Lam "z" $ app2 m s (app2 n s z) | mankyKitty/TypePlay | src/TypePlay/Check/SimplyTyped.hs | mit | 4,823 | 40 | 15 | 1,397 | 1,505 | 790 | 715 | 92 | 7 |
{-# LANGUAGE OverloadedStrings #-}
import System.Environment
import Globals
import ProcessChain
main :: IO ()
main = do
args <- getArgs
case args of
[] -> statsOn gDefaultUniverse
u:_ -> statsOn u
| mihaimaruseac/petulant-octo-avenger | src/Main.hs | mit | 212 | 0 | 10 | 46 | 66 | 33 | 33 | 10 | 2 |
{-
This file handles the passing of parameters and manages things to do with
the fields within a header comment.
Author(s): Lewis Deane
License: MIT
Last Modified: 20/7/2016
-}
module FieldTools (getFields) where
import Control.Applicative
import Data.List (isPrefixOf, sortBy, elemIndex)
import Data.List.Split (splitOn)
import Data.String.Utils (replace)
import Data.Time.Calendar
import Data.Time.Clock
import qualified Config as C
data Field = Author | Documentation | Email | License | LastModified | Maintainer | Website deriving (Show, Eq)
data State = Visible | Hidden | Custom String deriving (Show, Eq)
data FieldState = FieldState { field :: Field, state :: State } deriving (Show)
type Params = [String]
-- Define the order in which we want the various fields to appear in.
fieldOrder :: [Field]
fieldOrder = [Author, Maintainer, Email, Website, License, Documentation, LastModified]
-- Sort the field order by the order in which we defined above.
sortFields :: [FieldState] -> [FieldState]
sortFields x = sortBy f x
where f a b = compare (k a) (k b)
k y = (field y) `elemIndex` fieldOrder
-- Parse the parameters passed to the function so we can work out what we want with each field.
parseParams :: Params -> [FieldState]
parseParams [] = []
parseParams (x:xs) = if "-" `isPrefixOf` x
then FieldState { field = field', state = Hidden } : (parseParams xs)
else if length xs > 0
then if (f . head) xs
then FieldState { field = field', state = Custom (head xs)} : (parseParams (tail xs))
else FieldState { field = field', state = Visible} : (parseParams xs)
else FieldState { field = field', state = Visible } : (parseParams xs)
where field' = (getFieldFromShortcut . tail) x
f a = not $ any (\x -> x `isPrefixOf` a) ["-", "+"]
-- Merges the default fields with the parameters we have just passed.
mergeWithDefaultFields :: [FieldState] -> IO [FieldState]
mergeWithDefaultFields fields = do
def <- map (\x -> FieldState {field = x, state = Visible}) <$> getDefaultFields
(return . sortFields) $ merge def fields
-- Define what we want to happen when we are merging fields.
merge :: [FieldState] -> [FieldState] -> [FieldState]
merge def fields = foldl f def fields
where f list e = if any (\z -> field z == field e) list then map (m e) list else e : list
m new old = if field new == field old
then new
else old
-- Define what value we want respective fields to take.
fieldValue :: Field -> IO String
fieldValue x | x == Author = author
| x == Documentation = doc
| x == Email = email
| x == Maintainer = maintainer
| x == License = license
| x == LastModified = date
| x == Website = website
| otherwise = error "No such field."
-- Define what title we want respective fields to have.
fieldTitle :: Field -> String
fieldTitle x | x == Author = "Author(s)"
| x == Documentation = "Documentation"
| x == Email = "Email"
| x == Maintainer = "Maintainer(s)"
| x == License = "License"
| x == LastModified = "Last Modified"
| x == Website = "Website"
| otherwise = error "No such field."
-- Gets the default fields.
getDefaultFields :: IO [Field]
getDefaultFields = (map getFieldFromShortcut . splitOn ",") <$> C.readValue "default-fields"
-- Gets the appropriate field from the shortcut.
getFieldFromShortcut :: String -> Field
getFieldFromShortcut x | x == "a" = Author
| x == "d" = Documentation
| x == "e" = Email
| x == "m" = Maintainer
| x == "l" = License
| x == "lm" = LastModified
| x == "w" = Website
| otherwise = error $ x ++ " is not a valid field."
-- Returns a list of fields that we want after filtering out ones not wanted.
getFields :: Params -> IO [(String, String)]
getFields fields = do
merged <- (mergeWithDefaultFields . parseParams) fields
let merged' = filter (not . isHidden) merged
isHidden = (\x -> state x == Hidden)
sequence $ map extractValue merged'
-- Pairs a field with it value. Uses the default value unless the state is 'Custom', then we take the custom value.
extractValue :: FieldState -> IO (String, String)
extractValue FieldState { field = a, state = b } = if isCustom b
then return $ (fieldTitle a, getCustomValue b)
else fieldValue a >>= (return . (\y -> (fieldTitle a, y)))
-- Decides if the state passed in is 'Custom' or not.
isCustom :: State -> Bool
isCustom (Custom _) = True
isCustom _ = False
-- Gets the value from inside the 'Custom' wrapper.
getCustomValue :: State -> String
getCustomValue (Custom x) = x
getCustomValue _ = error "Custom state not passed to function."
-- Gets the value associated with the 'author' key in the config file.
author :: IO String
author = C.readValue "author"
-- Gets the value associated with the 'documentation' key in the config file.
doc :: IO String
doc = C.readValue "doc"
-- Gets the value associated with the 'email' key in the config file.
email :: IO String
email = C.readValue "email"
-- Gets the value associated with the 'maintainer' key in the config file.
maintainer :: IO String
maintainer = C.readValue "maintainer"
-- Gets the value associated with the 'license' key in the config file.
license :: IO String
license = C.readValue "license"
-- Gets the value associated with the 'website' key in the config file.
website :: IO String
website = C.readValue "website"
-- Gets the value associated with the 'comment-width' key in the config file.
commentWidth :: IO String
commentWidth = C.readValue "comment-width"
-- Gets todays date in the format the user has specificied in the config file.
date :: IO String
date = do
ct <- getCurrentTime
dformat <- C.readValue "date-format"
let f (y, m, d) = (replace "dd" (show d) . replace "mm" (show m) . replace "yy" ((drop 2 . show) y) . replace "yyyy" (show y)) dformat
(return . f . toGregorian . utctDay) ct
| lewisjdeane/Comet | FieldTools.hs | mit | 7,335 | 0 | 19 | 2,676 | 1,691 | 893 | 798 | 104 | 4 |
-- |
-- The types and functions are trivial and self-descriptive,
-- hence this sentence is the sole documentation you get on them.
module Success.Pure
(
Success,
-- * Creation
nothing,
failure,
success,
-- * Execution
asEither,
asMaybe,
)
where
import Prelude
import Data.Foldable
import Data.Traversable
import Control.Applicative
import Control.Monad
import Control.Monad.Error.Class
newtype Success a b =
Success (Either (Maybe a) b)
deriving (Functor, Applicative, Monad, MonadError (Maybe a), Show, Foldable, Traversable)
instance Alternative (Success a) where
{-# INLINE empty #-}
empty =
Success (Left Nothing)
{-# INLINE (<|>) #-}
(<|>) =
\case
Success (Right x) -> const (Success (Right x))
Success (Left _) -> id
instance MonadPlus (Success a) where
{-# INLINE mzero #-}
mzero =
empty
{-# INLINE mplus #-}
mplus =
(<|>)
{-# INLINE nothing #-}
nothing :: Success a b
nothing =
Success (Left Nothing)
{-# INLINE failure #-}
failure :: a -> Success a b
failure failure =
Success (Left (Just failure))
{-# INLINE success #-}
success :: b -> Success a b
success =
pure
{-# INLINE asEither #-}
asEither :: Success a b -> Either (Maybe a) b
asEither (Success x) =
x
{-# INLINE asMaybe #-}
asMaybe :: Success a b -> Maybe b
asMaybe (Success x) =
case x of
Left _ -> Nothing
Right x -> Just x
| nikita-volkov/success | library/Success/Pure.hs | mit | 1,387 | 0 | 13 | 310 | 417 | 226 | 191 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Static (mkEmbedded) where
import Crypto.Hash.MD5 (hashlazy)
import qualified Data.ByteString.Base64 as B64
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Network.Mime (MimeType)
import System.FilePath ((</>))
import WaiAppStatic.Storage.Embedded
hash :: BL.ByteString -> T.Text
hash = T.take 8 . T.decodeUtf8 . B64.encode . hashlazy
staticFiles :: [(FilePath, MimeType)]
staticFiles = [
("index.html" , "text/html")
, ("main.js" , "application/x-javascript")
, ("html-sanitizer.js" , "application/x-javascript")
, ("ext.js" , "application/x-javascript")
, ("FileSaver.js" , "application/x-javascript")
, ("main.css" , "text/css")
]
embedFile :: (FilePath, MimeType) -> IO EmbeddableEntry
embedFile (file, mime) = do
f <- BL.readFile $ "static" </> file
return $ EmbeddableEntry {
eLocation = T.pack file
, eMimeType = mime
, eContent = Left (hash f, f)
}
mkEmbedded :: IO [EmbeddableEntry]
mkEmbedded = mapM embedFile staticFiles
| bitemyapp/hterm | Static.hs | mit | 1,326 | 0 | 12 | 410 | 310 | 188 | 122 | 29 | 1 |
add (x,y) = x + y
add' x y = x + y
second xs = head (tail xs)
swap (x, y) = (y, x)
pair x y = (x, y) -- a -> b -> (a, b)
double x = x * 2 -- Num a => a -> a
palindrome xs = reverse xs == xs
-- How is the type of this function determined?
twice f x = f (f x) -- (a -> a) -> a -> a
f xs = take 3 (reverse xs)
| jugalps/edX | FP101x/week2/week2.hs | mit | 314 | 0 | 7 | 96 | 156 | 81 | 75 | 9 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Application
( getApplicationDev
, appMain
, develMain
, makeFoundation
-- * for DevelMain
, getApplicationRepl
, shutdownApp
-- * for GHCI
, handler
, db
) where
import Control.Monad.Logger (liftLoc, runLoggingT)
import Database.Persist.Sqlite (createSqlitePool, runSqlPool,
sqlDatabase, sqlPoolSize)
import Import
import Language.Haskell.TH.Syntax (qLocation)
import Network.Wai.Handler.Warp (Settings, defaultSettings,
defaultShouldDisplayException,
runSettings, setHost,
setOnException, setPort, getPort)
import Network.Wai.Middleware.RequestLogger (Destination (Logger),
IPAddrSource (..),
OutputFormat (..), destination,
mkRequestLogger, outputFormat)
import System.Log.FastLogger (defaultBufSize, newStdoutLoggerSet,
toLogStr)
-- Import all relevant handler modules here.
-- Don't forget to add new modules to your cabal file!
import Handler.Common
import Handler.Home
import Handler.Game
import Handler.GameHome
-- This line actually creates our YesodDispatch instance. It is the second half
-- of the call to mkYesodData which occurs in Foundation.hs. Please see the
-- comments there for more details.
mkYesodDispatch "App" resourcesApp
-- | This function allocates resources (such as a database connection pool),
-- performs initialization and return a foundation datatype value. This is also
-- the place to put your migrate statements to have automatic database
-- migrations handled by Yesod.
makeFoundation :: AppSettings -> IO App
makeFoundation appSettings = do
-- Some basic initializations: HTTP connection manager, logger, and static
-- subsite.
appHttpManager <- newManager
appLogger <- newStdoutLoggerSet defaultBufSize >>= makeYesodLogger
appStatic <-
(if appMutableStatic appSettings then staticDevel else static)
(appStaticDir appSettings)
-- We need a log function to create a connection pool. We need a connection
-- pool to create our foundation. And we need our foundation to get a
-- logging function. To get out of this loop, we initially create a
-- temporary foundation without a real connection pool, get a log function
-- from there, and then create the real foundation.
let mkFoundation appConnPool = App {..}
tempFoundation = mkFoundation $ error "connPool forced in tempFoundation"
logFunc = messageLoggerSource tempFoundation appLogger
-- Create the database connection pool
pool <- flip runLoggingT logFunc $ createSqlitePool
(sqlDatabase $ appDatabaseConf appSettings)
(sqlPoolSize $ appDatabaseConf appSettings)
-- Perform database migration using our application's logging settings.
runLoggingT (runSqlPool (runMigration migrateAll) pool) logFunc
-- Return the foundation
return $ mkFoundation pool
-- | Convert our foundation to a WAI Application by calling @toWaiAppPlain@ and
-- applyng some additional middlewares.
makeApplication :: App -> IO Application
makeApplication foundation = do
logWare <- mkRequestLogger def
{ outputFormat =
if appDetailedRequestLogging $ appSettings foundation
then Detailed True
else Apache
(if appIpFromHeader $ appSettings foundation
then FromFallback
else FromSocket)
, destination = Logger $ loggerSet $ appLogger foundation
}
-- Create the WAI application and apply middlewares
appPlain <- toWaiAppPlain foundation
return $ logWare $ defaultMiddlewaresNoLogging appPlain
-- | Warp settings for the given foundation value.
warpSettings :: App -> Settings
warpSettings foundation =
setPort (appPort $ appSettings foundation)
$ setHost (appHost $ appSettings foundation)
$ setOnException (\_req e ->
when (defaultShouldDisplayException e) $ messageLoggerSource
foundation
(appLogger foundation)
$(qLocation >>= liftLoc)
"yesod"
LevelError
(toLogStr $ "Exception from Warp: " ++ show e))
defaultSettings
-- | For yesod devel, return the Warp settings and WAI Application.
getApplicationDev :: IO (Settings, Application)
getApplicationDev = do
settings <- getAppSettings
foundation <- makeFoundation settings
wsettings <- getDevSettings $ warpSettings foundation
app <- makeApplication foundation
return (wsettings, app)
getAppSettings :: IO AppSettings
getAppSettings = loadAppSettings [configSettingsYml] [] useEnv
-- | main function for use by yesod devel
develMain :: IO ()
develMain = develMainHelper getApplicationDev
-- | The @main@ function for an executable running this site.
appMain :: IO ()
appMain = do
-- Get the settings from all relevant sources
settings <- loadAppSettingsArgs
-- fall back to compile-time values, set to [] to require values at runtime
[configSettingsYmlValue]
-- allow environment variables to override
useEnv
-- Generate the foundation from the settings
foundation <- makeFoundation settings
-- Generate a WAI Application from the foundation
app <- makeApplication foundation
-- Run the application with Warp
runSettings (warpSettings foundation) app
--------------------------------------------------------------
-- Functions for DevelMain.hs (a way to run the app from GHCi)
--------------------------------------------------------------
getApplicationRepl :: IO (Int, App, Application)
getApplicationRepl = do
settings <- getAppSettings
foundation <- makeFoundation settings
wsettings <- getDevSettings $ warpSettings foundation
app1 <- makeApplication foundation
return (getPort wsettings, foundation, app1)
shutdownApp :: App -> IO ()
shutdownApp _ = return ()
---------------------------------------------
-- Functions for use in development with GHCi
---------------------------------------------
-- | Run a handler
handler :: Handler a -> IO a
handler h = getAppSettings >>= makeFoundation >>= flip unsafeHandler h
-- | Run DB queries
db :: ReaderT SqlBackend (HandlerT App IO) a -> IO a
db = handler . runDB
| total-git/missingno | yesodMissingNo/Application.hs | mit | 6,654 | 0 | 16 | 1,722 | 1,019 | 545 | 474 | -1 | -1 |
-- Copyright (C) 2008 Diego Souza <[email protected]>
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, write to the Free Software Foundation, Inc.,
-- 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
module Battleship.Environment (water_drag, water_fudge_factor) where
import Battleship.Vector as V
-- * Drag coeficiente (fudge factor)
water_fudge_factor :: (Num a) => a
water_fudge_factor = 100
-- * Calculates the water drag force imposed to a given object.
--
-- The simplified model for this is as follows:
-- $drag = \frac{1}{2} p_f u^2 a_f k$
--
-- Where,
-- $p_f$: Density of water;
-- $a_f$: Frontal area;
-- $k$: Drag coeficiente (fudge factor);
water_drag :: V.VectorT Double -> Double -> V.VectorT Double
water_drag u af = V.mscalar (0.5 * af * k) u2
where
u2 = V.vmap (\v -> -1 * (signum v) * (v**2)) u
k = water_fudge_factor
-- * Contains all aspects of the simulated world
-- data World = World
-- {
-- -- * The current position in time.
-- --
-- -- The tick is defined as $t = \frac{1}{25} \texttt{seconds}$.
-- -- tick :: Integer
-- }
-- deriving (Show,Eq)
| dgvncsz0f/boats | src/main/haskell/Battleship/Environment.hs | gpl-2.0 | 1,766 | 0 | 13 | 401 | 175 | 111 | 64 | 8 | 1 |
{-#LANGUAGE FlexibleContexts #-}
module Carnap.Calculi.NaturalDeduction.Parser.Hardegree (toProofTreeHardegree, toDeductionHardegree)
where
import Data.Tree
import Data.Typeable
import Data.List (findIndex)
import Text.Parsec
import Carnap.Core.Data.Types
import Carnap.Calculi.Util
import Carnap.Calculi.NaturalDeduction.Syntax
import Carnap.Calculi.NaturalDeduction.Parser.Util
import Carnap.Languages.ClassicalSequent.Syntax
import Carnap.Languages.ClassicalSequent.Parser
parseShowWithLine :: Parsec String u [r] -> Parsec String u (FixLang lex a) -> Parsec String u (DeductionLine r lex a)
parseShowWithLine r f = do dpth <- indent
string "Show" <|> string "show"
optional $ char ':'
spaces
phi <- f
(rule, deps) <- rline r
return $ ShowWithLine phi dpth rule (map (\x->(x,x)) deps)
toDeductionHardegree :: Parsec String () [r] -> Parsec String () (FixLang lex a) -> String
-> Deduction r lex a
toDeductionHardegree r f = toDeduction (parseLine r f)
where parseLine r f = try (parseAssertLine r f)
<|> try (parseShowWithLine r f)
<|> try (parsePartialLine f)
--XXX: need double "try" here to avoid
--throwing away errors if first parser fails
{- |
In a Hardegree deduction, find the prooftree corresponding to
*line n* in ded, where proof line numbers start at 1
-}
toProofTreeHardegree ::
( Inference r lex sem
, Sequentable lex
, Typeable sem
) => Deduction r lex sem -> Int -> Either (ProofErrorMessage lex) (ProofTree r lex sem)
toProofTreeHardegree ded n = case ded !! (n - 1) of
(AssertLine f r dpth depairs) ->
do let deps = map fst depairs
mapM_ checkDep deps
deps' <- mapM (toProofTreeHardegree ded) deps
return $ Node (ProofLine n (SS $ liftToSequent f) r) deps'
where checkDep depline = takeRange depline n >>= scan
(ShowWithLine f d r@(r':_) depairs) ->
do m <- matchShow --the last line of the subproof
let deps = map fst depairs
mapM_ checkDep deps
dp <- case indirectInference r' of
Just (TypedProof prooftype) -> subproofProcess prooftype (n + 1) m
Nothing -> subproofProcess (ProofType 0 . length $ premisesOf r') (n + 1) m --Hardegree allows this, and it's rather nice in his notation
deps' <- mapM (toProofTreeHardegree ded) (dp ++ deps)
return $ Node (ProofLine n (SS $ liftToSequent f) r) deps'
where checkDep depline = takeRange depline n >>= scan
--For this system, extra subproof deps need to occur
--outside of the subproof, i.e. be available from the
--show line. Arbitrary choice on my part.
matchShow = let ded' = drop n ded in
case findIndex (\l -> depth l <= d) ded' of
Nothing -> Right (length ded)
Just m' -> Right (n + m')
-- XXX: since we're looking for the line number, starting at 1,
-- the index (starting at zero) of the next line is actually what we want
(PartialLine _ e _) -> Left $ NoParse e n
where err :: String -> Either (ProofErrorMessage lex) a
err x = Left $ GenericError x n
ln = length ded
--line h is accessible from the end of the chunk if everything in
--the chunk has depth greater than or equal to that of line h,
--and h is not a show line with no matching QED
scan chunk@(h:t) =
if all (\x -> depth h <= depth x) chunk
then case h of
(ShowWithLine _ _ _ _) -> if any (\x -> depth h == depth x) t
then Right True
else err "To cite a show line at this point, the line be available---the associated subproof must be closed above this point."
_ -> Right True
else err "It looks like you're citing a line which is not in your subproof. If you're not, you may need to tidy up your proof."
takeRange m' n' =
if n' <= m'
then err "Dependency is later than assertion"
else Right $ lineRange m' n'
--sublist, given by line numbers
lineRange m n = drop (m - 1) $ take n ded
subproofProcess (ProofType assumptionNumber conclusionNumber) first last
| length available < (assumptionNumber + conclusionNumber) = err "this subproof doesn't have enough available lines to apply this rule"
| let firstlines = map (\x -> ded !! (x - 1)) (take assumptionNumber available) in
any (not . isAssumptionLine) firstlines =
err $ "this rule requires the first " ++ show assumptionNumber ++ " available lines of the subproof to be assumptions"
| otherwise = return $ take assumptionNumber available ++ drop (length available - conclusionNumber) available
where available = filter (\x -> depth (ded !! (x - 1)) == depth (ded !! (first - 1))) [first .. last]
| opentower/carnap | Carnap/src/Carnap/Calculi/NaturalDeduction/Parser/Hardegree.hs | gpl-3.0 | 5,564 | 0 | 19 | 1,999 | 1,353 | 680 | 673 | 76 | 9 |
{-# LANGUAGE Arrows, ScopedTypeVariables #-}
module Mescaline.Synth.SSF.DiskSampler
where
import Control.Applicative
import Control.Arrow
import Control.CCA.Types
import Data.Accessor ((^.))
import qualified Data.Foldable as Fold
import qualified Data.IntMap as Map
import Data.Monoid
import Mescaline
import qualified Mescaline.Database.Unit as Unit
import qualified Mescaline.Database.SourceFile as SourceFile
import Mescaline.Synth.SSF as SF
import Mescaline.Synth.SSF.BufferCache (Buffer, BufferCache)
import qualified Mescaline.Synth.SSF.BufferCache as BC
import Mescaline.Pattern.Event (SynthParams)
import qualified Mescaline.Pattern.Event as P
import Prelude hiding (init)
import qualified Prelude as P
import Sound.OpenSoundControl (OSC(..), Time(..), immediately)
import Sound.SC3 hiding (constant, free, gate, send, sync)
import Sound.SC3.Lang.Collection (clump)
-- import Sound.SC3.Lang.Pattern
import Sound.SC3.Server.Command.Completion
import Sound.SC3.Server.Notification (n_end)
import qualified Sound.SC3.Server.State as State
data VoiceState = Running | Stopped | Finished deriving (Enum, Eq, Show)
data Voice = Voice {
state :: VoiceState
, nodeId :: State.NodeId
, time :: Double
, unit :: Unit.Unit
, params :: SynthParams
, buffer :: Buffer
} deriving (Eq, Show)
finishVoice :: Voice -> Voice
finishVoice v =
case state v of
Stopped -> v { state = Finished }
_ -> v
diskBufferSize :: Int
diskBufferSize = 8192*8
-- | Attack-sustain-release envelope parameter constructor.
asr :: UGen -> UGen -> UGen -> [EnvCurve] -> [UGen]
asr aT sL rT c = env [0, sL, 0] [aT, rT] c 1 (-1)
voiceGateEnvelope :: Bool
voiceGateEnvelope = False
voiceEnvGate :: UGen
voiceEnvGate = envGen AR gate 1 0 1 RemoveSynth (asr attackTime sustainLevel releaseTime [EnvLin])
where gate = control KR "gate" 1
sustainLevel = control KR "sustainLevel" 1
attackTime = control KR "attackTime" 0
releaseTime = control KR "releaseTime" 0
voiceEnvFixed :: UGen
voiceEnvFixed = envGen AR 1 1 0 1 RemoveSynth (envLinen attackTime dur releaseTime sustainLevel)
where dur = control KR "dur" 0
sustainLevel = control KR "sustainLevel" 1
attackTime = control KR "attackTime" 0
releaseTime = control KR "releaseTime" 0
voiceEnv :: UGen
voiceEnv = if voiceGateEnvelope then voiceEnvGate else voiceEnvFixed
toStereo :: UGen -> UGen
toStereo u =
case mceChannels u of
[x] -> pan2 x pan amp
[x, y] -> balance2 x y pan amp
xs -> sum (map (\[x, y] -> balance2 x y pan amp) $ clump 2 xs)
where
pan = control KR "pan" 0
amp = control KR "amp" 1
output :: UGen -> UGen
output = offsetOut (control KR "out" 0)
voiceDef :: Int -> UGen
voiceDef n = output $ toStereo $ vDiskIn n bufnum rate NoLoop * voiceEnv
where
bufnum = control KR "bufnum" (-1)
rate = bufRateScale KR bufnum
voiceDefMem :: Int -> UGen
voiceDefMem n = output $ toStereo $ playBuf n bufnum rate 1 (control KR "start" (0)) NoLoop DoNothing * voiceEnv
where
bufnum = control KR "bufnum" (-1)
rate = bufRateScale KR bufnum
voiceDefName :: Int -> String
voiceDefName 1 = "es.globero.mescaline.voice_1"
voiceDefName 2 = "es.globero.mescaline.voice_2"
voiceDefName nc = "es.globero.mescaline.voice_" ++ (show nc)
bundle :: Double -> [OSC] -> OSC
bundle = Bundle . UTCr
bundle' :: [OSC] -> OSC
bundle' = Bundle immediately
startVoice :: Voice -> OSC
startVoice voice =
bundle (time voice + params voice ^. P.latency)
[s_new (voiceDefName $ BC.numChannels $ buffer voice) (fromIntegral $ nodeId voice) AddToTail 0
([ ("bufnum", fromIntegral $ BC.uid $ buffer voice),
("attackTime", params voice ^. P.attackTime),
("releaseTime", params voice ^. P.attackTime),
("sustainLevel", params voice ^. P.sustainLevel) ]
++
if voiceGateEnvelope
then []
else [("dur", Unit.duration $ unit voice)])
]
stopVoice :: Voice -> OSC
stopVoice voice =
if voiceGateEnvelope
then bundle (time voice + params voice ^. P.latency)
[n_set1 (fromIntegral $ nodeId voice) "gate" (params voice ^. P.gateLevel)]
else bundle' []
-- Effectful interface
allocVoice :: SF (BufferCache, Event ((Double, Unit.Unit, SynthParams), Voice -> Maybe OSC)) (BufferCache, Event Voice)
allocVoice = proc (cache, e) -> do
e_nid <- alloc_ State.nodeId -< e
(cache', e_buf) <- BC.allocBuffer -< (cache, fmap (\((t, u, p), completion) nid ->
((BC.allocBytes
(SourceFile.numChannels $ Unit.sourceFile u)
diskBufferSize)
, (completion . Voice Running nid t u p))) e <*> e_nid)
returnA -< (cache', fmap (\nid ((t, u, p), _) -> Voice Running nid t u p) e_nid <*> e <*> e_buf)
freeVoices :: SF (BufferCache, Event [Voice]) BufferCache
freeVoices = proc (cache, e_voices) -> do
send >>> sync -< (Bundle immediately . fmap (b_close . fromIntegral . BC.uid . buffer)) <$> e_voices
BC.freeBufferList -< (cache, fmap buffer <$> e_voices)
broadcast :: Functor f => a -> f b -> f (a, b)
broadcast a = fmap ((,) a)
ignore :: Functor f => a -> f b -> f ((), b)
ignore _ = broadcast ()
newtype IL a = IL { unIL :: [(State.NodeId, a)] }
instance Functor IL where
fmap f (IL xs) = IL (fmap (second f) xs)
reduceIL :: forall a d . d -> IL (d -> (a, d)) -> (IL a, d)
reduceIL s0 (IL xs) = first (IL . zip (map fst xs))
$ Fold.foldl'
(\(cs, s) f -> let (c, s') = f s in (c : cs, s'))
([], s0)
(map snd xs)
filterIL :: (a -> Bool) -> IL a -> IL a
filterIL f = IL . P.filter (f . snd) . unIL
type VoiceSF = SF () Voice
type Sampler = IL Voice
data AllocVoice = AllocVoice Double Unit.Unit P.SynthParams
voiceSF :: Voice -> VoiceSF
voiceSF voice =
switch (pure voice &&& switchEvt voice) $ \voice -> pure $ voice { state = Stopped }
where
t = time voice
dur = Unit.duration (unit voice)
switchEvt voice =
once voice
>>> delayEvent_ (t + dur)
>>> arr (fmap stopVoice)
>>> send
>>> waitFor (n_end (nodeId voice))
>>> tag voice
updateSampler :: (Event Voice, IL Voice) -> Event (IL VoiceSF -> IL VoiceSF)
updateSampler (e_voice, (IL voices)) =
Event $
IL
-- Add voice in event to collection
. (event id (\voice -> ((nodeId voice, voiceSF voice):)) e_voice)
-- Filter voices with state Finished
. (P.filter (not . (flip elem finished) . fst))
-- Update state to Finished for voices with state Stopped
. fmap (\(nid, sf) ->
if nid `elem` stopped
then (nid, fmap (\voice -> voice { state = Finished }) sf)
else (nid, sf))
. unIL
where
finished = fmap fst . P.filter ((== Finished) . state . snd) $ voices
stopped = fmap fst . P.filter ((== Stopped) . state . snd) $ voices
samplerCore :: IL VoiceSF -> SF (Event Voice) (Sampler, Event [Voice])
samplerCore vs = core vs
>>> arr (filterIL ((== Running) . state))
&&& arr (ilToEvent . filterIL ((== Finished) . state))
where
core vs = pSwitch
ignore reduceIL vs
(arr updateSampler >>> init NoEvent)
(\vs' f -> core (f vs'))
ilToEvent (IL []) = NoEvent
ilToEvent (IL xs) = Event $ fmap snd xs
sampler :: SF (Event AllocVoice) Sampler
sampler =
proc action -> do
rec
bc <- init BC.newEmpty -< bc''
-- Alloc voice
(bc', e_voice) <- allocVoice -< (bc, fmap (\(AllocVoice t u p) -> ((t, u, p), completion)) action)
-- Free finished voices, fed back from samplerCore's output
bc'' <- freeVoices -< (bc', e_finished)
-- Get running and finished voices
(vs, e_finished) <- samplerCore (IL []) -< e_voice
returnA -< vs
where
-- Hmm, how to generalize from lists?
completion voice =
Just $ b_read'
(startVoice voice)
(fromIntegral $ BC.uid $ buffer voice)
(SourceFile.path sourceFile)
(truncate $ SourceFile.sampleRate sourceFile * Unit.onset (unit voice))
(-1) 0 1
where
sourceFile = Unit.sourceFile (unit voice)
-- voice :: SF (Event (Action, Buffer)) Voice
-- -- voice = undefined
-- voice = proc (cache, e) -> do
--
-- voice@(Voice nid buf) <- allocVoice cache unit (\voice ->
-- Just $ b_read'
-- (startVoice voice unit params t)
-- (fromIntegral $ BC.uid $ buffer voice)
-- (SourceFile.path sourceFile)
-- (truncate $ SourceFile.sampleRate sourceFile * Unit.onset unit)
-- (-1) 0 1)
-- -- tu <- utcr
-- -- FIXME: Why is this necessary?!
-- S.unsafeSync
-- -- C.send conn (startVoice voice t)
-- -- print (t-tu, t+dur-tu)
-- liftIO $ pauseThreadUntil (t + dur)
-- S.send $ stopVoice voice params (t + dur)
-- S.waitFor $ n_end nid
-- -- tu' <- utcr
-- -- liftIO $ putStrLn ("node end: " ++ show voice)
-- freeVoice cache voice
-- return ()
-- where
-- dur = Unit.duration unit
-- sourceFile = Unit.sourceFile unit
-- data Sampler = Sampler Connection BufferCache
--
-- initSampler :: Server BufferCache
-- initSampler = do
-- S.sync $ bundle' [ d_recv (synthdef (voiceDefName 1) (voiceDef 1)),
-- d_recv (synthdef (voiceDefName 2) (voiceDef 2)) ]
-- BC.newWith (replicate 4 (BC.allocBytes 1 diskBufferSize) ++ replicate 4 (BC.allocBytes 1 diskBufferSize))
--
-- newSampler :: IO Sampler
-- newSampler = do
-- let s = State.new Process.defaultServerOptions
-- t <- Process.openTransport opts "127.0.0.1" :: IO UDP
-- conn <- Conn.new s t
-- cache <- runServer initSampler conn
-- return (Sampler conn cache)
-- where
-- opts = Process.defaultRTOptionsUDP
--
-- freeSampler :: Sampler -> IO ()
-- freeSampler (Sampler conn cache) = flip runServer conn $ do
-- S.send (g_freeAll [0])
-- BC.free cache
--
-- playEvent :: Sampler -> P.SynthEvent -> IO ThreadId
-- playEvent (Sampler conn cache) e = runServer (fork $ playUnit cache (e ^. P.unit) (e ^. P.synth) (e ^. P.time)) conn
--
-- runSampler :: Sampler -> Chan P.SynthEvent -> IO ()
-- runSampler s c = loop
-- where
-- loop = do
-- e <- readChan c
-- -- print (t, e)
-- -- runServer (fork $ playUnit cache (e ^. P.unit) (e ^. P.synth) (e ^. P.time)) conn
-- playEvent s e
-- runSampler s c
--
-- -- Disk based sampler
-- playPatternDisk :: Double -> P.Pattern -> Chan P.Input -> Sampler -> IO ()
-- playPatternDisk tick pattern ichan (Sampler conn cache) = do
-- ochan <- newChan
-- forkIO $ loop ochan
-- P.execute tick pattern ichan ochan
-- where
-- loop c = readChan c >>= f >> loop c
-- f (_, SF.NoEvent) = return ()
-- f (_, SF.Event e) = runServer (fork (playUnit cache (e ^. P.unit) (e ^. P.synth) (e ^. P.time)) >> return ()) conn
| kaoskorobase/mescaline | lib/mescaline/Mescaline/Synth/SSF/DiskSampler.hs | gpl-3.0 | 11,868 | 3 | 21 | 3,638 | 3,081 | 1,699 | 1,382 | 186 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.AnalyticsReporting.Types
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.AnalyticsReporting.Types
(
-- * Service Configuration
analyticsReportingService
-- * OAuth Scopes
, analyticsScope
, analyticsReadOnlyScope
-- * MetricFilter
, MetricFilter
, metricFilter
, mfNot
, mfOperator
, mfMetricName
, mfComparisonValue
-- * EventData
, EventData
, eventData
, edEventCategory
, edEventCount
, edEventValue
, edEventLabel
, edEventAction
-- * MetricFilterOperator
, MetricFilterOperator (..)
-- * ReportRow
, ReportRow
, reportRow
, rrMetrics
, rrDimensions
-- * PivotHeaderEntry
, PivotHeaderEntry
, pivotHeaderEntry
, pheDimensionValues
, pheDimensionNames
, pheMetric
-- * PivotValueRegion
, PivotValueRegion
, pivotValueRegion
, pvrValues
-- * ReportRequest
, ReportRequest
, reportRequest
, rMetrics
, rPivots
, rCohortGroup
, rSamplingLevel
, rIncludeEmptyRows
, rDateRanges
, rViewId
, rHideTotals
, rHideValueRanges
, rMetricFilterClauses
, rSegments
, rPageToken
, rDimensions
, rDimensionFilterClauses
, rPageSize
, rOrderBys
, rFiltersExpression
-- * MetricHeaderEntry
, MetricHeaderEntry
, metricHeaderEntry
, mheName
, mheType
-- * GoalSetData
, GoalSetData
, goalSetData
, gsdGoals
-- * CohortGroup
, CohortGroup
, cohortGroup
, cgCohorts
, cgLifetimeValue
-- * SimpleSegment
, SimpleSegment
, simpleSegment
, ssOrFiltersForSegment
-- * DimensionFilterClause
, DimensionFilterClause
, dimensionFilterClause
, dfcOperator
, dfcFilters
-- * DimensionFilterClauseOperator
, DimensionFilterClauseOperator (..)
-- * CohortType
, CohortType (..)
-- * OrderBy
, OrderBy
, orderBy
, obOrderType
, obSortOrder
, obFieldName
-- * Dimension
, Dimension
, dimension
, dName
, dHistogramBuckets
-- * SearchUserActivityRequestActivityTypesItem
, SearchUserActivityRequestActivityTypesItem (..)
-- * DateRangeValues
, DateRangeValues
, dateRangeValues
, drvPivotValueRegions
, drvValues
-- * PivotHeader
, PivotHeader
, pivotHeader
, phTotalPivotGroupsCount
, phPivotHeaderEntries
-- * SearchUserActivityRequest
, SearchUserActivityRequest
, searchUserActivityRequest
, suarViewId
, suarDateRange
, suarUser
, suarPageToken
, suarPageSize
, suarActivityTypes
-- * MetricHeader
, MetricHeader
, metricHeader
, mhPivotHeaders
, mhMetricHeaderEntries
-- * SegmentFilter
, SegmentFilter
, segmentFilter
, sfNot
, sfSimpleSegment
, sfSequenceSegment
-- * OrderByOrderType
, OrderByOrderType (..)
-- * DateRange
, DateRange
, dateRange
, drEndDate
, drStartDate
-- * Report
, Report
, report
, rNextPageToken
, rData
, rColumnHeader
-- * PageviewData
, PageviewData
, pageviewData
, pdPageTitle
, pdPagePath
-- * EcommerceDataActionType
, EcommerceDataActionType (..)
-- * ReportData
, ReportData
, reportData
, rdMinimums
, rdRows
, rdTotals
, rdSamplesReadCounts
, rdMaximums
, rdDataLastRefreshed
, rdRowCount
, rdSamplingSpaceSizes
, rdIsDataGolden
-- * SegmentFilterClause
, SegmentFilterClause
, segmentFilterClause
, sfcMetricFilter
, sfcNot
, sfcDimensionFilter
-- * SegmentSequenceStep
, SegmentSequenceStep
, segmentSequenceStep
, sssMatchType
, sssOrFiltersForSegment
-- * OrFiltersForSegment
, OrFiltersForSegment
, orFiltersForSegment
, offsSegmentFilterClauses
-- * SegmentDefinition
, SegmentDefinition
, segmentDefinition
, sdSegmentFilters
-- * OrderBySortOrder
, OrderBySortOrder (..)
-- * User
, User
, user
, uUserId
, uType
-- * Pivot
, Pivot
, pivot
, pStartGroup
, pMetrics
, pMaxGroupCount
, pDimensions
, pDimensionFilterClauses
-- * TransactionData
, TransactionData
, transactionData
, tdTransactionId
, tdTransactionTax
, tdTransactionShipping
, tdTransactionRevenue
-- * SequenceSegment
, SequenceSegment
, sequenceSegment
, ssFirstStepShouldMatchFirstHit
, ssSegmentSequenceSteps
-- * Metric
, Metric
, metric
, mFormattingType
, mAlias
, mExpression
-- * SegmentMetricFilterOperator
, SegmentMetricFilterOperator (..)
-- * MetricHeaderEntryType
, MetricHeaderEntryType (..)
-- * SegmentMetricFilter
, SegmentMetricFilter
, segmentMetricFilter
, smfOperator
, smfMetricName
, smfMaxComparisonValue
, smfScope
, smfComparisonValue
-- * ReportRequestSamplingLevel
, ReportRequestSamplingLevel (..)
-- * Xgafv
, Xgafv (..)
-- * GetReportsRequest
, GetReportsRequest
, getReportsRequest
, grrUseResourceQuotas
, grrReportRequests
-- * UserActivitySession
, UserActivitySession
, userActivitySession
, uasPlatform
, uasDeviceCategory
, uasActivities
, uasSessionDate
, uasDataSource
, uasSessionId
-- * SegmentSequenceStepMatchType
, SegmentSequenceStepMatchType (..)
-- * GoalData
, GoalData
, goalData
, gdGoalPreviousStep2
, gdGoalName
, gdGoalPreviousStep3
, gdGoalIndex
, gdGoalCompletionLocation
, gdGoalPreviousStep1
, gdGoalCompletions
, gdGoalValue
-- * Activity
, Activity
, activity
, aEvent
, aHostname
, aActivityType
, aMedium
, aPageview
, aCampaign
, aGoals
, aChannelGrouping
, aAppview
, aSource
, aActivityTime
, aEcommerce
, aCustomDimension
, aKeyword
, aLandingPagePath
-- * Cohort
, Cohort
, cohort
, cDateRange
, cName
, cType
-- * SegmentMetricFilterScope
, SegmentMetricFilterScope (..)
-- * EcommerceData
, EcommerceData
, ecommerceData
, edEcommerceType
, edTransaction
, edProducts
, edActionType
-- * EcommerceDataEcommerceType
, EcommerceDataEcommerceType (..)
-- * DimensionFilter
, DimensionFilter
, dimensionFilter
, dfNot
, dfOperator
, dfExpressions
, dfDimensionName
, dfCaseSensitive
-- * GetReportsResponse
, GetReportsResponse
, getReportsResponse
, grrReports
, grrResourceQuotasRemaining
, grrQueryCost
-- * DimensionFilterOperator
, DimensionFilterOperator (..)
-- * ActivityActivityType
, ActivityActivityType (..)
-- * MetricFormattingType
, MetricFormattingType (..)
-- * CustomDimension
, CustomDimension
, customDimension
, cdValue
, cdIndex
-- * Segment
, Segment
, segment
, sDynamicSegment
, sSegmentId
-- * ProductData
, ProductData
, productData
, pdProductName
, pdProductSKU
, pdItemRevenue
, pdProductQuantity
-- * SegmentDimensionFilterOperator
, SegmentDimensionFilterOperator (..)
-- * ResourceQuotasRemaining
, ResourceQuotasRemaining
, resourceQuotasRemaining
, rqrHourlyQuotaTokensRemaining
, rqrDailyQuotaTokensRemaining
-- * SegmentDimensionFilter
, SegmentDimensionFilter
, segmentDimensionFilter
, sdfOperator
, sdfMinComparisonValue
, sdfMaxComparisonValue
, sdfExpressions
, sdfDimensionName
, sdfCaseSensitive
-- * DynamicSegment
, DynamicSegment
, dynamicSegment
, dsUserSegment
, dsName
, dsSessionSegment
-- * MetricFilterClause
, MetricFilterClause
, metricFilterClause
, mfcOperator
, mfcFilters
-- * MetricFilterClauseOperator
, MetricFilterClauseOperator (..)
-- * UserType
, UserType (..)
-- * ColumnHeader
, ColumnHeader
, columnHeader
, chMetricHeader
, chDimensions
-- * SearchUserActivityResponse
, SearchUserActivityResponse
, searchUserActivityResponse
, suarNextPageToken
, suarSampleRate
, suarSessions
, suarTotalRows
-- * ScreenviewData
, ScreenviewData
, screenviewData
, sdMobileDeviceModel
, sdMobileDeviceBranding
, sdAppName
, sdScreenName
) where
import Network.Google.AnalyticsReporting.Types.Product
import Network.Google.AnalyticsReporting.Types.Sum
import Network.Google.Prelude
-- | Default request referring to version 'v4' of the Analytics Reporting API. This contains the host and root path used as a starting point for constructing service requests.
analyticsReportingService :: ServiceConfig
analyticsReportingService
= defaultService (ServiceId "analyticsreporting:v4")
"analyticsreporting.googleapis.com"
-- | View and manage your Google Analytics data
analyticsScope :: Proxy '["https://www.googleapis.com/auth/analytics"]
analyticsScope = Proxy
-- | See and download your Google Analytics data
analyticsReadOnlyScope :: Proxy '["https://www.googleapis.com/auth/analytics.readonly"]
analyticsReadOnlyScope = Proxy
| brendanhay/gogol | gogol-analyticsreporting/gen/Network/Google/AnalyticsReporting/Types.hs | mpl-2.0 | 9,799 | 0 | 7 | 2,617 | 1,170 | 811 | 359 | 323 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ViewPatterns #-}
-- Module : Network.AWS.Auth
-- Copyright : (c) 2013 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
-- |
module Network.AWS.Auth where
import Control.Applicative
import Control.Concurrent
import Control.Error
import Control.Monad
import Control.Monad.IO.Class
import qualified Data.Aeson as Aeson
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as LBS
import Data.IORef
import Data.Monoid
import Data.String
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
import Data.Time
import Network.AWS.EC2.Metadata
import Network.AWS.Internal hiding (Env)
import System.Environment
data Credentials
= AuthBasic Text Text
-- ^ Basic credentials containing an access key and a secret key.
| AuthSession Text Text Text
-- ^ Session credentials containing access key, secret key, and a security token.
| AuthProfile Text
-- ^ A specific IAM Profile name to query the local instance-data for credentials.
| AuthEnv Text Text
-- ^ Environment variable names to read for the access and secret keys.
| AuthDiscover
-- ^ Attempt to read the default access and secret keys from the environment,
-- falling back to the first available IAM profile if they are not set.
--
-- This attempts to resolve <http://instance-data> rather than directly
-- retrieving <http://169.254.169.254> for IAM profile information to ensure
-- the request terminates promptly if not running on EC2.
deriving (Eq, Ord)
instance Show Credentials where
show (AuthBasic a _) = Text.unpack $ Text.concat ["Basic ", a, " ****"]
show (AuthSession a _ _) = Text.unpack $ Text.concat ["Session ", a, " **** ****"]
show (AuthProfile n) = Text.unpack $ "Profile " <> n
show (AuthEnv a s) = Text.unpack $ Text.concat ["Env ", a, " ", s]
show AuthDiscover = "Discover"
-- | Default access key environment variable: 'AWS_ACCESS_KEY'
accessKey :: Text
accessKey = "AWS_ACCESS_KEY"
-- | Default secret key environment variable: 'AWS_SECRET_KEY'
secretKey :: Text
secretKey = "AWS_SECRET_KEY"
credentials :: (Applicative m, MonadIO m)
=> Credentials
-> EitherT String m (IORef Auth)
credentials = mk
where
mk (AuthBasic a s) = ref $ Auth a s Nothing Nothing
mk (AuthSession a s t) = ref $ Auth a s (Just t) Nothing
mk (AuthProfile n) = fromProfile n
mk (AuthEnv a s) = fromKeys a s
mk AuthDiscover = fromKeys accessKey secretKey
<|> (defaultProfile >>= fromProfile)
fromKeys a s = Auth <$> key a <*> key s <*> pure Nothing <*> pure Nothing
>>= ref
key (Text.unpack -> k) = fmapLT (fromString . show) (syncIO $ lookupEnv k)
>>= failWith (fromString $ "Unable to read ENV variable: " ++ k)
>>= return . Text.pack
ref = liftIO . newIORef
defaultProfile :: (Applicative m, MonadIO m) => EitherT String m Text
defaultProfile = do
ls <- BS.lines <$> meta (IAM $ SecurityCredentials Nothing)
p <- tryHead "Unable to get default IAM Profile from metadata" ls
return $ Text.decodeUtf8 p
-- | The IORef wrapper + timer is designed so that multiple concurrenct
-- accesses of 'Auth' from the 'AWS' environment are not required to calculate
-- expiry and sequentially queue to update it.
--
-- The forked timer ensures a singular owner and pre-emptive refresh of the
-- temporary session credentials.
fromProfile :: (Applicative m, MonadIO m)
=> Text
-> EitherT String m (IORef Auth)
fromProfile name = do
!a@Auth{..} <- auth
fmapLT show . syncIO . liftIO $ do
ref <- newIORef a
start ref expiration
return ref
where
auth :: (Applicative m, MonadIO m) => EitherT String m Auth
auth = do
m <- LBS.fromStrict <$> meta (IAM . SecurityCredentials $ Just name)
hoistEither $ Aeson.eitherDecode m
start ref = maybe (return ()) (timer ref <=< delay)
delay n = truncate . diffUTCTime n <$> getCurrentTime
-- FIXME: guard against a lower expiration than the -60
-- remove the error . show shenanigans
timer ref n = void . forkIO $ do
threadDelay $ (n - 60) * 1000000
!a@Auth{..} <- eitherT (error . show) return auth
atomicWriteIORef ref a
start ref expiration
| brendanhay/amazonka-limited | src/Network/AWS/Auth.hs | mpl-2.0 | 5,104 | 0 | 14 | 1,378 | 1,070 | 572 | 498 | 81 | 5 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.RegionDisks.SetIAMPolicy
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Sets the access control policy on the specified resource. Replaces any
-- existing policy.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.regionDisks.setIamPolicy@.
module Network.Google.Resource.Compute.RegionDisks.SetIAMPolicy
(
-- * REST Resource
RegionDisksSetIAMPolicyResource
-- * Creating a Request
, regionDisksSetIAMPolicy
, RegionDisksSetIAMPolicy
-- * Request Lenses
, rdsipProject
, rdsipPayload
, rdsipResource
, rdsipRegion
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.regionDisks.setIamPolicy@ method which the
-- 'RegionDisksSetIAMPolicy' request conforms to.
type RegionDisksSetIAMPolicyResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"disks" :>
Capture "resource" Text :>
"setIamPolicy" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] RegionSetPolicyRequest :>
Post '[JSON] Policy
-- | Sets the access control policy on the specified resource. Replaces any
-- existing policy.
--
-- /See:/ 'regionDisksSetIAMPolicy' smart constructor.
data RegionDisksSetIAMPolicy =
RegionDisksSetIAMPolicy'
{ _rdsipProject :: !Text
, _rdsipPayload :: !RegionSetPolicyRequest
, _rdsipResource :: !Text
, _rdsipRegion :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'RegionDisksSetIAMPolicy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rdsipProject'
--
-- * 'rdsipPayload'
--
-- * 'rdsipResource'
--
-- * 'rdsipRegion'
regionDisksSetIAMPolicy
:: Text -- ^ 'rdsipProject'
-> RegionSetPolicyRequest -- ^ 'rdsipPayload'
-> Text -- ^ 'rdsipResource'
-> Text -- ^ 'rdsipRegion'
-> RegionDisksSetIAMPolicy
regionDisksSetIAMPolicy pRdsipProject_ pRdsipPayload_ pRdsipResource_ pRdsipRegion_ =
RegionDisksSetIAMPolicy'
{ _rdsipProject = pRdsipProject_
, _rdsipPayload = pRdsipPayload_
, _rdsipResource = pRdsipResource_
, _rdsipRegion = pRdsipRegion_
}
-- | Project ID for this request.
rdsipProject :: Lens' RegionDisksSetIAMPolicy Text
rdsipProject
= lens _rdsipProject (\ s a -> s{_rdsipProject = a})
-- | Multipart request metadata.
rdsipPayload :: Lens' RegionDisksSetIAMPolicy RegionSetPolicyRequest
rdsipPayload
= lens _rdsipPayload (\ s a -> s{_rdsipPayload = a})
-- | Name or id of the resource for this request.
rdsipResource :: Lens' RegionDisksSetIAMPolicy Text
rdsipResource
= lens _rdsipResource
(\ s a -> s{_rdsipResource = a})
-- | The name of the region for this request.
rdsipRegion :: Lens' RegionDisksSetIAMPolicy Text
rdsipRegion
= lens _rdsipRegion (\ s a -> s{_rdsipRegion = a})
instance GoogleRequest RegionDisksSetIAMPolicy where
type Rs RegionDisksSetIAMPolicy = Policy
type Scopes RegionDisksSetIAMPolicy =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient RegionDisksSetIAMPolicy'{..}
= go _rdsipProject _rdsipRegion _rdsipResource
(Just AltJSON)
_rdsipPayload
computeService
where go
= buildClient
(Proxy :: Proxy RegionDisksSetIAMPolicyResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/RegionDisks/SetIAMPolicy.hs | mpl-2.0 | 4,444 | 0 | 18 | 1,039 | 549 | 326 | 223 | 90 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE FlexibleInstances, FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
-- ** `Practical' denotational semantics
module Tutorial1_Orig where
-- Consider a small subset of Haskell
-- (or a pure functional subset of other suitable language)
-- as an `executable Math'
-- How to deal with side-effects, like input?
-- Simple sub-language of effectful integer
-- expressions, embedded in Haskell
-- Define by denotational semantics: giving meaning to each
-- expression and how to compose them
-- Don't commit to a particular domain yet
class ReaderLang d where
int :: Int -> d -- Int literals
add :: d -> d -> d
ask :: d
-- Sample expression
rlExp = add ask (add ask (int 1))
-- What should be that d?
-- EDLS, pp 2 and 6
-- EDLS, p7
-- * Implementing Math
data CRead = CRVal Int | Get0 (Int -> CRead)
instance ReaderLang CRead where
int x = CRVal x
ask = Get0 CRVal
-- t1 = ask + ask
-- p9
add (CRVal x) (CRVal y) = CRVal (x+y)
add (Get0 k) y = Get0 (\x -> add (k x) y)
add x (Get0 k) = Get0 (\y -> add x (k y))
-- The meaning of rlExp in that domain
_ = rlExp :: CRead
-- Need authority (admin)!
-- p 11
runReader0 :: Int -> CRead -> Int
runReader0 e (CRVal x) = x
runReader0 e (Get0 k) = runReader0 e $ k e
_ = runReader0 2 rlExp
-- CRead is too particular semantic domain: nothing but Int value
-- (computations)
-- Need something more general
-- Again, p7
{-
data Comp req a where
Val :: a -> Comp req a
E :: _1 -> (_2 -> Comp req a) -> Comp req a
-- Effect signature
data Get x where
Get :: Get Int
instance ReaderLang (Comp Get Int) where
int x = Val x
ask = E Get Val
add (Val x) (Val y) = Val (x+y)
add (E r k) y = E r (\x -> add (k x) y)
add x (E r k) = E r (\y -> add x (k y))
-- How to extend to other types of env?
runReader :: Int -> Comp Get a -> a
runReader e (Val x) = x
runReader e (E Get k) = runReader e $ k e
_ = runReader 2 rlExp :: Int
-- If we need subtraction, should we write the last two clauses
-- over again?
-- Generalizing even more
-- p7, Fig 3
inV :: a -> Comp req a
inV = Val
bind :: Comp req a -> (a -> Comp req b) -> Comp req b
bind (Val x) f = f x
bind (E r k) f = E r (\x -> bind (k x) f)
-- We can easily write even richer Reader languages, uniformly
rlExp2 =
bind ask $ \x ->
bind ask $ \y ->
Val (x + y + 1)
-- with multiplication, subtraction, end re-using previous expressions
rlExp3 =
bind rlExp2 $ \x ->
bind ask $ \y ->
Val (x * y - 1)
_ = runReader 2 rlExp3 :: Int
-}
| haroldcarr/learn-haskell-coq-ml-etc | haskell/conference/2017-09-cufp-effects/src/Tutorial1_Orig.hs | unlicense | 2,612 | 0 | 11 | 685 | 324 | 179 | 145 | 21 | 1 |
{-
Copyright (c) Facebook, Inc. and its affiliates.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
--
-- Licensed to the Apache Software Foundation (ASF) under one
-- or more contributor license agreements. See the NOTICE file
-- distributed with this work for additional information
-- regarding copyright ownership. The ASF licenses this file
-- to you under the Apache License, Version 2.0 (the
-- "License"); you may not use this file except in compliance
-- with the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing,
-- software distributed under the License is distributed on an
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-- KIND, either express or implied. See the License for the
-- specific language governing permissions and limitations
-- under the License.
--
module Thrift.Transport.Handle
( module Thrift.Transport
, Port
, HandleSource(..)
) where
import Control.Exception ( catch, throw )
import Data.ByteString.Internal (c2w)
#if __GLASGOW_HASKELL__ < 710
import Data.Functor
#endif
#if MIN_VERSION_network(2,7,0)
import Data.Maybe
import Network.Socket
#else
import Network
#endif
import System.IO
import System.IO.Error ( isEOFError )
import Thrift.Transport
import qualified Data.ByteString.Lazy as LBS
#if __GLASGOW_HASKELL__ < 710
import Data.Monoid
#endif
instance Transport Handle where
tIsOpen = hIsOpen
tClose = hClose
tRead h n = LBS.hGet h n `catch` handleEOF mempty
tPeek h = (Just . c2w <$> hLookAhead h) `catch` handleEOF Nothing
tWrite = LBS.hPut
tFlush = hFlush
-- | Type class for all types that can open a Handle. This class is used to
-- replace tOpen in the Transport type class.
class HandleSource s where
hOpen :: s -> IO Handle
instance HandleSource FilePath where
hOpen s = openFile s ReadWriteMode
type Port = String
instance HandleSource (HostName, Port) where
#if MIN_VERSION_network(2,7,0)
hOpen (h,p) = do
let hints = defaultHints{addrFamily = AF_INET}
addr <- fromMaybe (error "getAddrInfo") . listToMaybe <$>
getAddrInfo (Just hints) (Just h) (Just p)
s <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr)
connect s $ addrAddress addr
socketToHandle s ReadWriteMode
#else
hOpen (h,p) = connectTo h (PortNumber $ read p)
#endif
handleEOF :: a -> IOError -> IO a
handleEOF a e = if isEOFError e
then return a
else throw $ TransportExn "TChannelTransport: Could not read" TE_UNKNOWN
| facebook/fbthrift | thrift/lib/hs/Thrift/Transport/Handle.hs | apache-2.0 | 3,315 | 0 | 13 | 632 | 467 | 266 | 201 | 37 | 2 |
module CourseStitch.Models (
module CourseStitch.Models.Tables,
module CourseStitch.Models.Types,
module CourseStitch.Models.RunDB,
module CourseStitch.Models.Queries,
module CourseStitch.Models.Models
) where
import CourseStitch.Models.Tables
import CourseStitch.Models.Types
import CourseStitch.Models.RunDB
import CourseStitch.Models.Queries
import CourseStitch.Models.Models
| coursestitch/coursestitch-api | lib/CourseStitch/Models.hs | apache-2.0 | 400 | 0 | 5 | 46 | 73 | 50 | 23 | 11 | 0 |
{-# LANGUAGE TupleSections #-}
module Handler.DownloadFeeds where
import qualified Data.Text as T
import Data.Maybe
import Data.Time (getCurrentTimeZone)
import Data.Default (def)
import Blaze.ByteString.Builder
import qualified Model
import Import
typeTorrent :: T.Text
typeTorrent = "application/x-bittorrent"
nsAtom :: T.Text
nsAtom = "http://www.w3.org/2005/Atom"
torrentLink :: Download -> Route UIApp
torrentLink d = TorrentFileR
(downloadUser d)
(downloadSlug d)
(TorrentName $ downloadName d)
data FeedParameters = Parameters {
pTitle :: T.Text,
pImage :: T.Text,
pLink :: Route UIApp
}
class RepFeed c where
renderFeed :: FeedParameters -> [Item] -> Handler c
renderFeed' :: FeedParameters -> [Download] -> Handler c
renderFeed' params downloads = renderFeed params $
groupDownloads downloads
withXmlDecl :: Content -> Content
withXmlDecl (ContentBuilder b _) =
flip ContentBuilder Nothing $
fromByteString "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\r\n" `mappend`
b
withXmlDecl c = c
newtype RepRss = RepRss Content
instance HasReps RepRss where
chooseRep (RepRss content) _cts =
return (typeRss, withXmlDecl content)
instance RepFeed RepRss where
renderFeed params items = do
url <- getFullUrlRender
let image = pImage params
RepRss `fmap` hamletToContent [xhamlet|
<rss version="2.0"
xmlns:atom=#{nsAtom}>
<channel>
<title>#{pTitle params}
<link>#{url $ pLink params}
$if not (T.null image)
<image>
<url>#{image}
$forall item <- items
<item>
<title>#{itemTitle item}
<link>#{itemLink url item}
$if not (T.null $ fromMaybe "" $ itemLang item)
<language>#{fromMaybe "" $ itemLang item}
$maybe summary <- itemSummary item
<description>#{summary}
<guid isPermaLink="true">#{itemLink url item}
<pubDate>#{rfc822 (itemPublished item)}
$if not (T.null $ itemImage item)
<image>
<url>#{itemImage item}
$if not (T.null $ itemPayment item)
<atom:link rel="payment"
href=#{itemPayment item}
>
$forall d <- itemDownloads item
<enclosure type="#{typeTorrent}"
length="#{downloadSize d}"
url="#{url $ torrentLink d}">
|]
newtype RepAtom = RepAtom Content
instance HasReps RepAtom where
chooseRep (RepAtom content) _cts =
return (typeAtom, withXmlDecl content)
instance RepFeed RepAtom where
renderFeed params items = do
let image = pImage params
url <- getFullUrlRender
tz <- liftIO getCurrentTimeZone
RepAtom `fmap` hamletToContent [xhamlet|
<feed version="1.0"
xmlns=#{nsAtom}>
<title>#{pTitle params}
<link rel="alternate"
type="text/html"
href=#{url $ pLink params}
>
<id>#{url $ pLink params}
$if not (T.null image)
<link rel="icon"
href=#{image}
>
$forall item <- items
<entry xml:lang="#{fromMaybe "" $ itemLang item}">
<title>#{itemTitle item}
<link rel="alternate"
type="text/html"
href=#{itemLink url item}
>
<id>#{itemLink url item}
<published>#{iso8601 $ localTimeToZonedTime tz $ itemPublished item}
$maybe summary <- itemSummary item
<summary>#{summary}
$if not (T.null $ itemImage item)
<link rel="icon"
href=#{itemImage item}
>
$if not (T.null $ itemPayment item)
<link rel="payment"
href=#{itemPayment item}
>
$forall d <- itemDownloads item
<link rel="enclosure"
type=#{typeTorrent}
size=#{downloadSize d}
href=#{url $ torrentLink d}
>
|]
itemLink urlRender item =
urlRender (UserFeedR (itemUser item) (itemSlug item)) `T.append`
"#" `T.append`
itemId item
getNew :: RepFeed a => Handler a
getNew = withDB (Model.recentDownloads def) >>=
renderFeed' Parameters {
pTitle = "Bitlove: New",
pLink = NewR,
pImage = ""
}
getNewRssR :: Handler RepRss
getNewRssR = getNew
getNewAtomR :: Handler RepAtom
getNewAtomR = getNew
getTop :: RepFeed a => Handler a
getTop = withDB (Model.popularDownloads def) >>=
renderFeed' Parameters {
pTitle = "Bitlove: Top",
pLink = TopR,
pImage = ""
}
getTopRssR :: Handler RepRss
getTopRssR = getTop
getTopAtomR :: Handler RepAtom
getTopAtomR = getTop
getTopDownloaded :: RepFeed a => Period -> Handler a
getTopDownloaded period =
let (period_days, period_title) =
case period of
PeriodDays 1 -> (1, "1 day")
PeriodDays days -> (days, T.pack $ show days ++ " days")
PeriodAll -> (10000, "all time")
in withDB (Model.mostDownloaded period_days def) >>=
renderFeed' Parameters {
pTitle = "Bitlove: Top Downloaded in " `T.append` period_title,
pLink = TopDownloadedR period,
pImage = ""
}
getTopDownloadedRssR :: Period -> Handler RepRss
getTopDownloadedRssR = getTopDownloaded
getTopDownloadedAtomR :: Period -> Handler RepAtom
getTopDownloadedAtomR = getTopDownloaded
getUserDownloads :: RepFeed a => UserName -> Handler a
getUserDownloads user = do
(details, downloads) <- withDB $ \db -> do
details <- Model.userDetailsByName user db
downloads <- Model.userDownloads user def db
return (details, downloads)
case details of
[] ->
notFound
(details':_) ->
renderFeed' Parameters {
pTitle = userName user `T.append` " on Bitlove",
pLink = UserR user,
pImage = userImage details'
} downloads
getUserDownloadsRssR :: UserName -> Handler RepRss
getUserDownloadsRssR = getUserDownloads
getUserDownloadsAtomR :: UserName -> Handler RepAtom
getUserDownloadsAtomR = getUserDownloads
getUserFeed :: RepFeed a => UserName -> Text -> Handler a
getUserFeed user slug = do
mFeedDownloads <- withDB $ \db -> do
feeds <- Model.userFeedInfo user slug db
case feeds of
[] ->
return Nothing
(feed:_) ->
(Just . (feed, )) `fmap`
Model.feedDownloads (feedUrl feed) def db
case mFeedDownloads of
Nothing ->
notFound
Just (feed, downloads) ->
renderFeed' Parameters
{ pTitle = feedTitle feed `T.append` " on Bitlove"
, pLink = UserFeedR user slug
, pImage = feedImage feed
} downloads
getUserFeedRssR :: UserName -> Text -> Handler RepRss
getUserFeedRssR = getUserFeed
getUserFeedAtomR :: UserName -> Text -> Handler RepAtom
getUserFeedAtomR = getUserFeed
| jannschu/bitlove-ui | Handler/DownloadFeeds.hs | bsd-2-clause | 7,345 | 0 | 18 | 2,430 | 1,328 | 695 | 633 | -1 | -1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
--
-- Copyright (c) 2009-2011, ERICSSON AB
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of the ERICSSON AB nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
module Feldspar.Core.Constructs.Tuple
( module Language.Syntactic.Constructs.Tuple
) where
import Data.Tuple.Select
import Language.Syntactic
import Language.Syntactic.Constructs.Binding
import Language.Syntactic.Constructs.Binding.HigherOrder (CLambda)
import Language.Syntactic.Constructs.Tuple
import Feldspar.Core.Types
import Feldspar.Core.Interpretation
import Feldspar.Core.Constructs.Binding
instance Sharable Tuple
instance SizeProp (Tuple :|| Type)
where
sizeProp (C' Tup2) (a :* b :* Nil)
| WrapFull ia <- a
, WrapFull ib <- b
= (infoSize ia, infoSize ib)
sizeProp (C' Tup3) (a :* b :* c :* Nil)
| WrapFull ia <- a
, WrapFull ib <- b
, WrapFull ic <- c
= ( infoSize ia
, infoSize ib
, infoSize ic
)
sizeProp (C' Tup4) (a :* b :* c :* d :* Nil)
| WrapFull ia <- a
, WrapFull ib <- b
, WrapFull ic <- c
, WrapFull id <- d
= ( infoSize ia
, infoSize ib
, infoSize ic
, infoSize id
)
sizeProp (C' Tup5) (a :* b :* c :* d :* e :* Nil)
| WrapFull ia <- a
, WrapFull ib <- b
, WrapFull ic <- c
, WrapFull id <- d
, WrapFull ie <- e
= ( infoSize ia
, infoSize ib
, infoSize ic
, infoSize id
, infoSize ie
)
sizeProp (C' Tup6) (a :* b :* c :* d :* e :* g :* Nil)
| WrapFull ia <- a
, WrapFull ib <- b
, WrapFull ic <- c
, WrapFull id <- d
, WrapFull ie <- e
, WrapFull ig <- g
= ( infoSize ia
, infoSize ib
, infoSize ic
, infoSize id
, infoSize ie
, infoSize ig
)
sizeProp (C' Tup7) (a :* b :* c :* d :* e :* g :* h :* Nil)
| WrapFull ia <- a
, WrapFull ib <- b
, WrapFull ic <- c
, WrapFull id <- d
, WrapFull ie <- e
, WrapFull ig <- g
, WrapFull ih <- h
= ( infoSize ia
, infoSize ib
, infoSize ic
, infoSize id
, infoSize ie
, infoSize ig
, infoSize ih
)
instance Sharable Select
where
sharable _ = False
sel1Size :: (Sel1' a ~ b) => TypeRep a -> Size a -> Size b
sel1Size Tup2Type{} = sel1
sel1Size Tup3Type{} = sel1
sel1Size Tup4Type{} = sel1
sel1Size Tup5Type{} = sel1
sel1Size Tup6Type{} = sel1
sel1Size Tup7Type{} = sel1
sel2Size :: (Sel2' a ~ b) => TypeRep a -> (Size a -> Size b)
sel2Size Tup2Type{} = sel2
sel2Size Tup3Type{} = sel2
sel2Size Tup4Type{} = sel2
sel2Size Tup5Type{} = sel2
sel2Size Tup6Type{} = sel2
sel2Size Tup7Type{} = sel2
sel3Size :: (Sel3' a ~ b) => TypeRep a -> (Size a -> Size b)
sel3Size Tup3Type{} = sel3
sel3Size Tup4Type{} = sel3
sel3Size Tup5Type{} = sel3
sel3Size Tup6Type{} = sel3
sel3Size Tup7Type{} = sel3
sel4Size :: (Sel4' a ~ b) => TypeRep a -> (Size a -> Size b)
sel4Size Tup4Type{} = sel4
sel4Size Tup5Type{} = sel4
sel4Size Tup6Type{} = sel4
sel4Size Tup7Type{} = sel4
sel5Size :: (Sel5' a ~ b) => TypeRep a -> (Size a -> Size b)
sel5Size Tup5Type{} = sel5
sel5Size Tup6Type{} = sel5
sel5Size Tup7Type{} = sel5
sel6Size :: (Sel6' a ~ b) => TypeRep a -> (Size a -> Size b)
sel6Size Tup6Type{} = sel6
sel6Size Tup7Type{} = sel6
sel7Size :: (Sel7' a ~ b) => TypeRep a -> (Size a -> Size b)
sel7Size Tup7Type{} = sel7
instance SizeProp (Select :|| Type)
where
sizeProp (C' Sel1) (WrapFull ia :* Nil) =
sel1Size (infoType ia) (infoSize ia)
sizeProp (C' Sel2) (WrapFull ia :* Nil) =
sel2Size (infoType ia) (infoSize ia)
sizeProp (C' Sel3) (WrapFull ia :* Nil) =
sel3Size (infoType ia) (infoSize ia)
sizeProp (C' Sel4) (WrapFull ia :* Nil) =
sel4Size (infoType ia) (infoSize ia)
sizeProp (C' Sel5) (WrapFull ia :* Nil) =
sel5Size (infoType ia) (infoSize ia)
sizeProp (C' Sel6) (WrapFull ia :* Nil) =
sel6Size (infoType ia) (infoSize ia)
sizeProp (C' Sel7) (WrapFull ia :* Nil) =
sel7Size (infoType ia) (infoSize ia)
-- | Compute a witness that a symbol and an expression have the same result type
tupEq :: Type (DenResult a) =>
sym a -> ASTF (Decor Info dom) b -> Maybe (TypeEq (DenResult a) b)
tupEq _ b = typeEq typeRep (infoType $ getInfo b)
instance
( (Tuple :|| Type) :<: dom
, (Select :|| Type) :<: dom
, OptimizeSuper dom
) =>
Optimize (Tuple :|| Type) dom
where
constructFeatOpt _ (C' tup@Tup2) (s1 :* s2 :* Nil)
| (prjF -> Just (C' Sel1)) :$ a <- s1
, (prjF -> Just (C' Sel2)) :$ b <- s2
, alphaEq a b
, Just TypeEq <- tupEq tup a
= return a
constructFeatOpt _ (C' tup@Tup3) (s1 :* s2 :* s3 :* Nil)
| (prjF -> Just (C' Sel1)) :$ a <- s1
, (prjF -> Just (C' Sel2)) :$ b <- s2
, (prjF -> Just (C' Sel3)) :$ c <- s3
, alphaEq a b
, alphaEq a c
, Just TypeEq <- tupEq tup a
= return a
constructFeatOpt _ (C' tup@Tup4) (s1 :* s2 :* s3 :* s4 :* Nil)
| (prjF -> Just (C' Sel1)) :$ a <- s1
, (prjF -> Just (C' Sel2)) :$ b <- s2
, (prjF -> Just (C' Sel3)) :$ c <- s3
, (prjF -> Just (C' Sel4)) :$ d <- s4
, alphaEq a b
, alphaEq a c
, alphaEq a d
, Just TypeEq <- tupEq tup a
= return a
constructFeatOpt _ (C' tup@Tup5) (s1 :* s2 :* s3 :* s4 :* s5 :* Nil)
| (prjF -> Just (C' Sel1)) :$ a <- s1
, (prjF -> Just (C' Sel2)) :$ b <- s2
, (prjF -> Just (C' Sel3)) :$ c <- s3
, (prjF -> Just (C' Sel4)) :$ d <- s4
, (prjF -> Just (C' Sel5)) :$ e <- s5
, alphaEq a b
, alphaEq a c
, alphaEq a d
, alphaEq a e
, Just TypeEq <- tupEq tup a
= return a
constructFeatOpt _ (C' tup@Tup6) (s1 :* s2 :* s3 :* s4 :* s5 :* s6 :* Nil)
| (prjF -> Just (C' Sel1)) :$ a <- s1
, (prjF -> Just (C' Sel2)) :$ b <- s2
, (prjF -> Just (C' Sel3)) :$ c <- s3
, (prjF -> Just (C' Sel4)) :$ d <- s4
, (prjF -> Just (C' Sel5)) :$ e <- s5
, (prjF -> Just (C' Sel6)) :$ f <- s6
, alphaEq a b
, alphaEq a c
, alphaEq a d
, alphaEq a e
, alphaEq a f
, Just TypeEq <- tupEq tup a
= return a
constructFeatOpt _ (C' tup@Tup7) (s1 :* s2 :* s3 :* s4 :* s5 :* s6 :* s7 :* Nil)
| (prjF -> Just (C' Sel1)) :$ a <- s1
, (prjF -> Just (C' Sel2)) :$ b <- s2
, (prjF -> Just (C' Sel3)) :$ c <- s3
, (prjF -> Just (C' Sel4)) :$ d <- s4
, (prjF -> Just (C' Sel5)) :$ e <- s5
, (prjF -> Just (C' Sel6)) :$ f <- s6
, (prjF -> Just (C' Sel7)) :$ g <- s7
, alphaEq a b
, alphaEq a c
, alphaEq a d
, alphaEq a e
, alphaEq a f
, alphaEq a g
, Just TypeEq <- tupEq tup a
= return a
constructFeatOpt opts feat args = constructFeatUnOpt opts feat args
constructFeatUnOpt opts x@(C' _) = constructFeatUnOptDefault opts x
instance
( (Select :|| Type) :<: dom
, CLambda Type :<: dom
, (Tuple :|| Type) :<: dom
, Let :<: dom
, (Variable :|| Type) :<: dom
, OptimizeSuper dom
) =>
Optimize (Select :|| Type) dom
where
constructFeatOpt opts s@(C' Sel1) (t :* Nil)
| ((prjF -> Just (C' Tup2)) :$ a :$ _) <- t = return a
| ((prjF -> Just (C' Tup3)) :$ a :$ _ :$ _) <- t = return a
| ((prjF -> Just (C' Tup4)) :$ a :$ _ :$ _ :$ _) <- t = return a
| ((prjF -> Just (C' Tup5)) :$ a :$ _ :$ _ :$ _ :$ _) <- t = return a
| ((prjF -> Just (C' Tup6)) :$ a :$ _ :$ _ :$ _ :$ _ :$ _) <- t = return a
| ((prjF -> Just (C' Tup7)) :$ a :$ _ :$ _ :$ _ :$ _ :$ _ :$ _) <- t = return a
| ((prj -> Just Let) :$ a :$ (lam :$ b)) <- t
, (Just v@(SubConstr2 (Lambda {}))) <- prjLambda lam
= do s' <- constructFeatOpt opts s (b :* Nil)
b' <- constructFeatOpt opts (reuseCLambda v) (s' :* Nil)
constructFeatOpt opts Let (a :* b' :* Nil)
constructFeatOpt opts s@(C' Sel2) (t :* Nil)
| ((prjF -> Just (C' Tup2)) :$ _ :$ a) <- t = return a
| ((prjF -> Just (C' Tup3)) :$ _ :$ a :$ _) <- t = return a
| ((prjF -> Just (C' Tup4)) :$ _ :$ a :$ _ :$ _) <- t = return a
| ((prjF -> Just (C' Tup5)) :$ _ :$ a :$ _ :$ _ :$ _) <- t = return a
| ((prjF -> Just (C' Tup6)) :$ _ :$ a :$ _ :$ _ :$ _ :$ _) <- t = return a
| ((prjF -> Just (C' Tup7)) :$ _ :$ a :$ _ :$ _ :$ _ :$ _ :$ _) <- t = return a
| ((prj -> Just Let) :$ a :$ (lam :$ b)) <- t
, (Just v@(SubConstr2 (Lambda {}))) <- prjLambda lam
= do s' <- constructFeatOpt opts s (b :* Nil)
b' <- constructFeatOpt opts (reuseCLambda v) (s' :* Nil)
constructFeatOpt opts Let (a :* b' :* Nil)
constructFeatOpt opts s@(C' Sel3) (t :* Nil)
| ((prjF -> Just (C' Tup3)) :$ _ :$ _ :$ a) <- t = return a
| ((prjF -> Just (C' Tup4)) :$ _ :$ _ :$ a :$ _) <- t = return a
| ((prjF -> Just (C' Tup5)) :$ _ :$ _ :$ a :$ _ :$ _) <- t = return a
| ((prjF -> Just (C' Tup6)) :$ _ :$ _ :$ a :$ _ :$ _ :$ _) <- t = return a
| ((prjF -> Just (C' Tup7)) :$ _ :$ _ :$ a :$ _ :$ _ :$ _ :$ _) <- t = return a
| ((prj -> Just Let) :$ a :$ (lam :$ b)) <- t
, (Just v@(SubConstr2 (Lambda {}))) <- prjLambda lam
= do s' <- constructFeatOpt opts s (b :* Nil)
b' <- constructFeatOpt opts (reuseCLambda v) (s' :* Nil)
constructFeatOpt opts Let (a :* b' :* Nil)
constructFeatOpt opts s@(C' Sel4) (t :* Nil)
| ((prjF -> Just (C' Tup4)) :$ _ :$ _ :$ _ :$ a) <- t = return a
| ((prjF -> Just (C' Tup5)) :$ _ :$ _ :$ _ :$ a :$ _) <- t = return a
| ((prjF -> Just (C' Tup6)) :$ _ :$ _ :$ _ :$ a :$ _ :$ _) <- t = return a
| ((prjF -> Just (C' Tup7)) :$ _ :$ _ :$ _ :$ a :$ _ :$ _ :$ _) <- t = return a
| ((prj -> Just Let) :$ a :$ (lam :$ b)) <- t
, (Just v@(SubConstr2 (Lambda {}))) <- prjLambda lam
= do s' <- constructFeatOpt opts s (b :* Nil)
b' <- constructFeatOpt opts (reuseCLambda v) (s' :* Nil)
constructFeatOpt opts Let (a :* b' :* Nil)
constructFeatOpt opts s@(C' Sel5) (t :* Nil)
| ((prjF -> Just (C' Tup5)) :$ _ :$ _ :$ _ :$ _ :$ a) <- t = return a
| ((prjF -> Just (C' Tup6)) :$ _ :$ _ :$ _ :$ _ :$ a :$ _) <- t = return a
| ((prjF -> Just (C' Tup7)) :$ _ :$ _ :$ _ :$ _ :$ a :$ _ :$ _) <- t = return a
| ((prj -> Just Let) :$ a :$ (lam :$ b)) <- t
, (Just v@(SubConstr2 (Lambda {}))) <- prjLambda lam
= do s' <- constructFeatOpt opts s (b :* Nil)
b' <- constructFeatOpt opts (reuseCLambda v) (s' :* Nil)
constructFeatOpt opts Let (a :* b' :* Nil)
constructFeatOpt opts s@(C' Sel6) (t :* Nil)
| ((prjF -> Just (C' Tup6)) :$ _ :$ _ :$ _ :$ _ :$ _ :$ a) <- t = return a
| ((prjF -> Just (C' Tup7)) :$ _ :$ _ :$ _ :$ _ :$ _ :$ a :$ _) <- t = return a
| ((prj -> Just Let) :$ a :$ (lam :$ b)) <- t
, (Just v@(SubConstr2 (Lambda {}))) <- prjLambda lam
= do s' <- constructFeatOpt opts s (b :* Nil)
b' <- constructFeatOpt opts (reuseCLambda v) (s' :* Nil)
constructFeatOpt opts Let (a :* b' :* Nil)
constructFeatOpt opts s@(C' Sel7) (t :* Nil)
| ((prjF -> Just (C' Tup7)) :$ _ :$ _ :$ _ :$ _ :$ _ :$ _ :$ a) <- t = return a
| ((prj -> Just Let) :$ a :$ (lam :$ b)) <- t
, (Just v@(SubConstr2 (Lambda {}))) <- prjLambda lam
= do s' <- constructFeatOpt opts s (b :* Nil)
b' <- constructFeatOpt opts (reuseCLambda v) (s' :* Nil)
constructFeatOpt opts Let (a :* b' :* Nil)
constructFeatOpt opts feat args = constructFeatUnOpt opts feat args
constructFeatUnOpt opts x@(C' _) = constructFeatUnOptDefault opts x
| rCEx/feldspar-lang-small | src/Feldspar/Core/Constructs/Tuple.hs | bsd-3-clause | 14,139 | 0 | 17 | 4,712 | 6,067 | 3,010 | 3,057 | 284 | 1 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.KHR.ParallelShaderCompile
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.KHR.ParallelShaderCompile (
-- * Extension Support
glGetKHRParallelShaderCompile,
gl_KHR_parallel_shader_compile,
-- * Enums
pattern GL_COMPLETION_STATUS_KHR,
pattern GL_MAX_SHADER_COMPILER_THREADS_KHR,
-- * Functions
glMaxShaderCompilerThreadsKHR
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
| haskell-opengl/OpenGLRaw | src/Graphics/GL/KHR/ParallelShaderCompile.hs | bsd-3-clause | 813 | 0 | 5 | 105 | 62 | 46 | 16 | 10 | 0 |
{-# LANGUAGE OverloadedStrings #-}
import Language.Swift.Quote.Syntax
import Language.Swift.Quote.Pretty
import Control.Arrow (left, right)
import Language.Swift.Quote.Parser
import qualified Data.ByteString.Lazy.Char8 as C
import Data.Either
import qualified Data.Text.Lazy as L
import qualified Data.Text as T
import qualified Data.Text.IO as DTI
import Debug.Trace
import System.IO
import System.FilePath
import Test.Tasty
import Test.Tasty.Golden
import Test.Tasty.HUnit
import qualified Text.Parsec.Text as P
import qualified Text.ParserCombinators.Parsec as PC
import Text.PrettyPrint.Mainland (Pretty, ppr, prettyLazyText)
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
hSetBuffering stderr NoBuffering
swifts <- findByExtension [".swift"] "tests/golden"
defaultMain $ testGroup "Unit"
[ operatorTests
, src2ast
, src2ast2src
, swifts2Goldens swifts
]
swifts2Goldens :: [FilePath] -> TestTree
swifts2Goldens paths = testGroup "Golden" $ map swift2Golden paths
where
swift2Golden s = mkTest s (s <.> "golden")
mkTest s g = goldenVsStringDiff (dropExtension s) diffCmd g (file2ast2bytestring s)
diffCmd ref new = ["diff", "--unified=5", ref, new]
text2ast2string :: T.Text -> String
text2ast2string input = case parse input of
(Left err) -> err
(Right m) -> L.unpack $ prettyPrint (trace ("\n\n" ++ show m ++ "\n\n") m)
file2ast2bytestring :: String -> IO C.ByteString
file2ast2bytestring fileName = do
contents <- DTI.readFile fileName
return (C.pack (text2ast2string contents))
litIntExp :: Integer -> Expression
litIntExp i = litExp (NumericLiteral (show i))
litStrExp :: String -> Expression
litStrExp s = litExp (StringLiteral (StaticStringLiteral s))
parserToEither :: P.Parser a -> T.Text -> Either String a
parserToEither p input = left show (PC.parse p "<stdin>" input)
goodOperator :: String -> T.Text -> TestTree
goodOperator o input = testCase ("good operator: " ++ wrap o ++ " input " ++ wrap (T.unpack input)) $
parserToEither (op o) input @?= Right ()
badOperator :: String -> T.Text -> TestTree
badOperator o input = testCase ("bad operator: " ++ wrap o ++ " input " ++ wrap (T.unpack input)) $
assertBool ("Expected left, got " ++ show e) (isLeft e)
where
e = parserToEither (op o) input
operatorTests :: TestTree
operatorTests = testGroup "Operator"
[ goodOperator "&" "&"
, goodOperator "&" "& "
, goodOperator "&" " &"
, goodOperator "&" " & "
, goodOperator "<" "<"
, goodOperator ">" ">"
, goodOperator ">" ">"
, goodOperator "+" "+"
, goodOperator "-" "-"
, goodOperator "*" "*"
, goodOperator "/" "/"
, goodOperator "!" "!"
, goodOperator "?" "?"
, goodOperator "++" "++"
, goodOperator "->" "->"
, goodOperator "=" "="
, badOperator ":" ":"
, badOperator "." "."
, badOperator ".." ".."
, badOperator "..." "..."
, badOperator "_" "_"
, badOperator "," ","
, badOperator "(" "("
, badOperator ")" ")"
, badOperator "a" "a"
, badOperator "foo" "foo"
]
src2ast :: TestTree
src2ast = testGroup "src2ast"
[ expressionTest "1" $ litIntExp 1
, expressionTest "-1" $ Expression Nothing (PrefixExpression (Just "-") (PostfixPrimary (PrimaryLiteral (RegularLiteral (NumericLiteral "1"))))) []
, expressionTest " 2" $ litIntExp 2
, expressionTest "3 " $ litIntExp 3
, expressionTest " 10 " $ litIntExp 10
, expressionTest "1234567890" $ litIntExp 1234567890
, expressionTest "1.0" $ litExp (NumericLiteral "1.0")
, expressionTest "1.1234567890" $ litExp (NumericLiteral "1.1234567890")
, expressionTest "0xb10101111" $ litExp (NumericLiteral "0xb10101111")
, expressionTest "0xCAFEBABE" $ litExp (NumericLiteral "0xCAFEBABE")
, expressionTest "0o12345670" $ litExp (NumericLiteral "0o12345670")
, expressionTest "\"Hello\"" $ litStrExp "Hello"
, expressionTest " \"Hello\"" $ litStrExp "Hello"
, expressionTest "\"Hello\" " $ litStrExp "Hello"
, expressionTest " \"Hello\" " $ litStrExp "Hello"
, expressionTest "true" $ litExp (BooleanLiteral True)
, expressionTest "false" $ litExp (BooleanLiteral False)
, expressionTest " true" $ litExp (BooleanLiteral True)
, expressionTest "true " $ litExp (BooleanLiteral True)
, expressionTest " true " $ litExp (BooleanLiteral True)
, expressionTest " false" $ litExp (BooleanLiteral False)
, expressionTest "false " $ litExp (BooleanLiteral False)
, expressionTest " false " $ litExp (BooleanLiteral False)
, expressionTest "&a" $ Expression Nothing (InOutExpression "a") []
, expressionTest "& b" $ Expression Nothing (InOutExpression "b") []
, expressionTest "self" $ self Self
, expressionTest "self.a" $ self (SelfMethod "a")
, expressionTest "self. a" $ self (SelfMethod "a")
, expressionTest "self . a" $ self (SelfMethod "a")
, expressionTest " self . a" $ self (SelfMethod "a")
, expressionTest " self . a " $ self (SelfMethod "a")
, expressionTest "self[1]" $ self (SelfSubscript [litIntExp 1])
, expressionTest "self[1,2]" $ self (SelfSubscript [litIntExp 1, litIntExp 2])
, expressionTest "self[1, 2]" $ self (SelfSubscript [litIntExp 1, litIntExp 2])
, expressionTest "self [1, 2]" $ self (SelfSubscript [litIntExp 1, litIntExp 2])
, expressionTest "self [ 1, 2 ]" $ self (SelfSubscript [litIntExp 1, litIntExp 2])
, expressionTest "self.init" $ self SelfInit
, expressionTest "a.123" $ Expression Nothing (PrefixExpression Nothing (ExplicitMemberExpressionDigits (PostfixPrimary (PrimaryExpression1 (IdG {idgIdentifier = "a", idgGenericArgs = []}))) "123")) []
, expressionTest "a.b" $ Expression Nothing (PrefixExpression Nothing (ExplicitMemberExpressionIdentifier (PostfixPrimary (PrimaryExpression1 (IdG {idgIdentifier = "a", idgGenericArgs = []}))) (IdG {idgIdentifier = "b", idgGenericArgs = []}))) []
, expressionTest ".b" $ Expression Nothing (PrefixExpression Nothing (PostfixPrimary (PrimaryImplicitMember "b"))) []
, expressionTest "foo" $ primary1 "foo"
, expressionTest "a" $ primary1 "a"
, expressionTest "a1" $ primary1 "a1"
, expressionTest "xs" $ primary1 "xs"
, expressionTest "1 is Int" $ typeCastExp (NumericLiteral "1") "is" (TypeIdentifierType (TypeIdentifier [("Int",[])]))
, expressionTest "200 as Double" $ typeCastExp (NumericLiteral "200") "as" (TypeIdentifierType (TypeIdentifier [("Double",[])]))
, expressionTest "\"s\" as? String" $ typeCastExp (StringLiteral (StaticStringLiteral "s")) "as?" (TypeIdentifierType (TypeIdentifier [("String",[])]))
, expressionTest "\"s\" as! String" $ typeCastExp (StringLiteral (StaticStringLiteral "s")) "as!" (TypeIdentifierType (TypeIdentifier [("String",[])]))
, expressionTest "a++" $ Expression Nothing (PrefixExpression Nothing
(PostfixOperator (PostfixPrimary (PrimaryExpression1
(IdG {idgIdentifier = "a", idgGenericArgs = []}))) "++")) []
, expressionTest "foo()" $ (Expression Nothing (PrefixExpression Nothing (FunctionCallE (FunctionCall (PostfixPrimary (PrimaryExpression1 (IdG {idgIdentifier = "foo", idgGenericArgs = []}))) [] Nothing))) [])
, expressionTest "foo(1)" $ Expression Nothing (PrefixExpression Nothing (FunctionCallE (FunctionCall (PostfixPrimary (PrimaryExpression1 (IdG {idgIdentifier = "foo", idgGenericArgs = []}))) [ExpressionElement Nothing (Expression Nothing (PrefixExpression Nothing (PostfixPrimary (PrimaryLiteral (RegularLiteral (NumericLiteral "1"))))) [])] Nothing))) []
, expressionTest "foo(1, 2)" $ Expression Nothing (PrefixExpression Nothing (FunctionCallE (FunctionCall (PostfixPrimary (PrimaryExpression1 (IdG {idgIdentifier = "foo", idgGenericArgs = []}))) [ExpressionElement Nothing (Expression Nothing (PrefixExpression Nothing (PostfixPrimary (PrimaryLiteral (RegularLiteral (NumericLiteral "1"))))) []),ExpressionElement Nothing (Expression Nothing (PrefixExpression Nothing (PostfixPrimary (PrimaryLiteral (RegularLiteral (NumericLiteral "2"))))) [])] Nothing))) []
, expressionTest "foo(1, 2, isBlue: false)" $ Expression Nothing (PrefixExpression Nothing (FunctionCallE (FunctionCall (PostfixPrimary (PrimaryExpression1 (IdG {idgIdentifier = "foo", idgGenericArgs = []}))) [ExpressionElement Nothing (Expression Nothing (PrefixExpression Nothing (PostfixPrimary (PrimaryLiteral (RegularLiteral (NumericLiteral "1"))))) []),ExpressionElement Nothing (Expression Nothing (PrefixExpression Nothing (PostfixPrimary (PrimaryLiteral (RegularLiteral (NumericLiteral "2"))))) []),ExpressionElement (Just "isBlue") (Expression Nothing (PrefixExpression Nothing (PostfixPrimary (PrimaryLiteral (RegularLiteral (BooleanLiteral False))))) [])] Nothing))) []
, expressionTest "1.init" $ Expression Nothing (PrefixExpression Nothing (PostfixExpression4Initalizer (PostfixPrimary (PrimaryLiteral (RegularLiteral (NumericLiteral "1")))))) []
, expressionTest "a.init" $ Expression Nothing (PrefixExpression Nothing (PostfixExpression4Initalizer (PostfixPrimary (PrimaryExpression1 (IdG {idgIdentifier = "a", idgGenericArgs = []}))))) []
, expressionTest "foo.init" $ Expression Nothing (PrefixExpression Nothing (PostfixExpression4Initalizer (PostfixPrimary (PrimaryExpression1 (IdG {idgIdentifier = "foo", idgGenericArgs = []}))))) []
, expressionTest "a.self" $ Expression Nothing (PrefixExpression Nothing (PostfixSelf (PostfixPrimary (PrimaryExpression1 (IdG {idgIdentifier = "a", idgGenericArgs = []}))))) []
, expressionTest "a.dynamicType" $ Expression Nothing (PrefixExpression Nothing (PostfixDynamicType (PostfixPrimary (PrimaryExpression1 (IdG {idgIdentifier = "a", idgGenericArgs = []}))))) []
, expressionTest "a!" $ Expression Nothing (PrefixExpression Nothing (PostfixForcedValue (PostfixPrimary (PrimaryExpression1 (IdG {idgIdentifier = "a", idgGenericArgs = []}))))) []
, expressionTest "a?" $ Expression Nothing (PrefixExpression Nothing (PostfixOptionChaining (PostfixPrimary (PrimaryExpression1 (IdG {idgIdentifier = "a", idgGenericArgs = []}))))) []
, expressionTest "a[1]" $ Expression Nothing (PrefixExpression Nothing (Subscript (PostfixPrimary (PrimaryExpression1 (IdG {idgIdentifier = "a", idgGenericArgs = []}))) [Expression Nothing (PrefixExpression Nothing (PostfixPrimary (PrimaryLiteral (RegularLiteral (NumericLiteral "1"))))) []])) []
, expressionTest "try someThrowingFunction() + anotherThrowingFunction()" $ Expression (Just "try") (PrefixExpression Nothing (FunctionCallE (FunctionCall (PostfixPrimary (PrimaryExpression1 (IdG {idgIdentifier = "someThrowingFunction", idgGenericArgs = []}))) [] Nothing))) [BinaryExpression1 {beOperator = "+", bePrefixExpression = PrefixExpression Nothing (FunctionCallE (FunctionCall (PostfixPrimary (PrimaryExpression1 (IdG {idgIdentifier = "anotherThrowingFunction", idgGenericArgs = []}))) [] Nothing))}]
, expressionTest "(try someThrowingFunction()) + anotherThrowingFunction()" $
Expression Nothing (PrefixExpression Nothing (PostfixPrimary
(PrimaryParenthesized
[ExpressionElement Nothing
(Expression (Just "try") (PrefixExpression Nothing
(FunctionCallE (FunctionCall (PostfixPrimary (PrimaryExpression1 (IdG {idgIdentifier = "someThrowingFunction", idgGenericArgs = []}))) [] Nothing))) [])
]))) [BinaryExpression1 {beOperator = "+", bePrefixExpression = PrefixExpression Nothing
(FunctionCallE (FunctionCall (PostfixPrimary (PrimaryExpression1 (IdG {idgIdentifier = "anotherThrowingFunction", idgGenericArgs = []}))) [] Nothing))}]
, moduleTest "import foo" $ singleImport Nothing ["foo"]
, moduleTest "import foo.math.BitVector" $ singleImport Nothing ["foo", "math", "BitVector"]
, moduleTest "import typealias foo.a.b" $ singleImport (Just "typealias") ["foo", "a", "b"]
, moduleTest "print(\"Hello world\\n\")" $ Module [ExpressionStatement (Expression Nothing (PrefixExpression Nothing (FunctionCallE (FunctionCall (PostfixPrimary (PrimaryExpression1 (IdG {idgIdentifier = "print", idgGenericArgs = []}))) [ExpressionElement Nothing (Expression Nothing (PrefixExpression Nothing (PostfixPrimary (PrimaryLiteral (RegularLiteral (StringLiteral (StaticStringLiteral"Hello world\n")))))) [])] Nothing))) [])]
, moduleTest "let n = 1" $ Module [DeclarationStatement (ConstantDeclaration [] [] [PatternInitializer (IdentifierPattern "n" Nothing) (Just (Initializer (Expression Nothing (PrefixExpression Nothing (PostfixPrimary (PrimaryLiteral (RegularLiteral (NumericLiteral "1"))))) [])))])]
, moduleTest "var d = 1.0" $ Module [DeclarationStatement (DeclVariableDeclaration (VarDeclPattern [] [] [PatternInitializer (IdentifierPattern "d" Nothing) (Just (Initializer (Expression Nothing (PrefixExpression Nothing (PostfixPrimary (PrimaryLiteral (RegularLiteral (NumericLiteral "1.0"))))) [])))]))]
, moduleTest "typealias TypeAliasName = String" $ Module [DeclarationStatement (TypeAlias [] Nothing "TypeAliasName" (TypeIdentifierType (TypeIdentifier [("String",[])])))]
]
singleImport :: Maybe ImportKind -> [String] -> Module
singleImport optImportKind imports = Module [DeclarationStatement (import_ optImportKind (map ImportIdentifier imports))]
import_ :: Maybe ImportKind -> ImportPath -> Declaration
import_ = ImportDeclaration []
src2ast2src :: TestTree
src2ast2src = testGroup "src2ast2src"
[ ppExpTest "1" "1"
, ppExpTest "2 " "2"
, ppExpTest " 3" "3"
, ppExpTest " 4 " "4"
, ppExpTest "\"Hello\"" "\"Hello\""
, ppExpTest "\"foo\"" "\"foo\""
, ppExpTest " \"x\"" "\"x\""
, ppExpTest " \"y\" " "\"y\""
, ppExpTest " true " "true"
, ppExpTest "true" "true"
, ppExpTest " \t false " "false"
, ppExpTest "self" "self"
, ppExpTest "self . id" "self.id"
, ppExpTest "self . init" "self.init"
, ppExpTest "self [1,2, 3] " "self[1, 2, 3]"
, ppExpTest "foo()" "foo()"
, ppExpTest "foo( ) " "foo()"
, ppExpTest "foo ( ) " "foo()"
, ppExpTest "foo (false ) " "foo(false)"
, ppExpTest "foo (a ) " "foo(a)"
, ppExpTest "foo ( 1, 2 , isFred : true)" "foo(1, 2, isFred: true)"
]
primary1 :: String -> Expression
primary1 ident =
Expression Nothing
(PrefixExpression Nothing
(PostfixPrimary
(PrimaryExpression1 (IdG ident [])))) []
typeCastExp :: Literal -> String -> Type -> Expression
typeCastExp lit typeCastKind t =
Expression Nothing
(PrefixExpression Nothing
(PostfixPrimary
(PrimaryLiteral
(RegularLiteral lit)))) [BinaryExpression4 typeCastKind t]
litExp :: Literal -> Expression
litExp lit =
Expression Nothing
(PrefixExpression Nothing
(PostfixPrimary
(PrimaryLiteral
(RegularLiteral lit)))) []
self :: SelfExpression -> Expression
self se =
Expression Nothing
(PrefixExpression Nothing
(PostfixPrimary
(PrimarySelf
se))) []
wrap :: String -> String
wrap s = "[[" ++ s ++ "]]"
moduleTest :: T.Text -> Module -> TestTree
moduleTest input m = testCase ("module: " ++ wrap (T.unpack input)) $
parse input @?= Right m
expressionTest :: T.Text -> Expression -> TestTree
expressionTest input e = testCase ("expression: " ++ wrap (T.unpack input)) $
parseExpression input @?= Right e
pp :: Pretty pretty => Either d pretty -> Either d L.Text
pp = right (prettyLazyText 100 . ppr)
ppExpTest :: T.Text -> String -> TestTree
ppExpTest input s = testCase ("expression " ++ wrap (T.unpack input) ++ " => " ++ wrap s) $
sosrc @?= Right s
where ast = parseExpression input
osrc = pp ast
sosrc = fmap L.unpack osrc
| steshaw/language-swift-quote | tests/unit/Main.hs | bsd-3-clause | 15,523 | 0 | 34 | 2,460 | 5,015 | 2,558 | 2,457 | 234 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Network.Socks5
import Network.Socks5.Types
import Network.Socks5.Lowlevel
import Network.Socket hiding (recv, recvFrom, sendTo)
import Network.Socket.ByteString
import Control.Concurrent
import Data.List (find)
import Data.Foldable (for_)
import Data.Serialize (encode,decode)
import Control.Monad
import Control.Exception
import System.IO
import System.IO.Error
import qualified Data.ByteString.Char8 as B8
import qualified Data.ByteString as B
import Data.ByteString (ByteString)
import BinaryHandle
------------------------------------------------------------------------
-- Configuration
------------------------------------------------------------------------
data Configuration = Configuration
{ listenHost :: HostName
, listenService :: ServiceName
, debugLevel :: Verbosity
, logLock :: MVar ()
, configUser :: ByteString
, configPass :: ByteString
, authPreference :: [SocksMethod]
, bindAddress :: SockAddr
, forceLearningMode :: Bool
}
data Verbosity = VInfo | VDebug
deriving (Read, Show, Eq, Ord)
getConfiguration = do
logMutex <- newMVar ()
return (Configuration "" "2080" VDebug logMutex "emertens" "paswerd" [SocksMethodUsernamePassword,SocksMethodNone]
(SockAddrInet aNY_PORT iNADDR_ANY) True)
------------------------------------------------------------------------
-- Logging
------------------------------------------------------------------------
logMsg :: Verbosity -> Configuration -> String -> IO ()
logMsg level config msg
= when (level <= debugLevel config)
$ do threadId <- myThreadId
let msg' = drop 9 (show threadId) ++ ": " ++ msg
withMVar (logLock config) (const (hPutStrLn stderr msg'))
info :: Configuration -> String -> IO ()
info = logMsg VInfo
debug :: Configuration -> String -> IO ()
debug = logMsg VDebug
------------------------------------------------------------------------
-- Main
------------------------------------------------------------------------
main :: IO ()
main = withSocketsDo $ do
config <- getConfiguration
let hints = tcpHints { addrFlags = [AI_PASSIVE, AI_ADDRCONFIG] }
ais <- getAddrInfo (Just hints) (Just (listenHost config)) (Just (listenService config))
when (null ais) (fail "Failed to resolve listening address")
done <- newEmptyMVar
for_ ais $ \ai ->
forkIO (listenerLoop config ai `finally` putMVar done ())
takeMVar done
------------------------------------------------------------------------
-- Top-level listener
------------------------------------------------------------------------
listenerLoop :: Configuration -> AddrInfo -> IO ()
listenerLoop config ai =
withTcpSocket (addrFamily ai) $ \s -> do
setSocketOption s ReuseAddr 1
-- We support binding on multiple addresses. Keeping them separate
-- will help later when we're making additional connections for UDP.
when (addrFamily ai == AF_INET6) (setSocketOption s IPv6Only 1)
bind s (addrAddress ai)
info config ("Listening on " ++ show (addrAddress ai))
listen s maxListenQueue
forever $ do
(c,who) <- accept s
cxt <- newGetContext
info config ("Connection accepted from " ++ show who)
forkIO (handleClientHello config c cxt who `finally` sClose c)
------------------------------------------------------------------------
-- Client startup
------------------------------------------------------------------------
handleClientHello :: Configuration -> Socket -> GetContext -> SockAddr -> IO ()
handleClientHello config s cxt who = do
debug config ("Client thread started for " ++ show who)
SocksHello authTypes <- recvGet s cxt
debug config ("Client proposed " ++ show authTypes)
debug config ("Server supports " ++ show (authPreference config))
case find (`elem` authTypes) (authPreference config) of
Just SocksMethodNone ->
do debug config "No authentication selected"
sendSerialized s (SocksHelloResponse SocksMethodNone)
readyForClientRequest config s cxt who
Just SocksMethodUsernamePassword ->
do debug config "Username/password authentication selected"
sendSerialized s (SocksHelloResponse SocksMethodUsernamePassword)
login <- recvGet s cxt
if configUser config == plainUsername login &&
configPass config == plainPassword login
then do debug config "Authentication succeeded"
sendSerialized s SocksPlainLoginSuccess
readyForClientRequest config s cxt who
else do debug config "Authentication failed"
sendSerialized s SocksPlainLoginFailure
_ -> do debug config "Authentication failed"
sendSerialized s (SocksHelloResponse SocksMethodNotAcceptable)
------------------------------------------------------------------------
-- Post authentication
------------------------------------------------------------------------
readyForClientRequest :: Configuration -> Socket -> GetContext -> SockAddr -> IO ()
readyForClientRequest config s cxt who = do
req <- recvGet s cxt
mbDst <- resolveSocksAddress config (requestDst req)
case mbDst of
Nothing -> do info config "Connection failed"
sendSerialized s (errorResponse SocksErrorHostUnreachable)
Just dst -> do handleClientRequest (requestCommand req) config s who dst
------------------------------------------------------------------------
-- Request modes
------------------------------------------------------------------------
handleClientRequest :: SocksCommand -> Configuration -> Socket -> SockAddr -> SockAddr -> IO ()
handleClientRequest SocksCommandConnect config s who dst =
flip finally (debug config "Thread complete") $
withTcpSocket (sockAddrFamily dst) $ \c ->
do
debug config ("Connecting to " ++ show dst)
connectResult <- tryIOError (connect c dst)
case connectResult of
Left err -> do
info config ("Connect failed with " ++ show err)
sendSerialized s (errorResponse SocksErrorConnectionRefused)
Right () -> do
info config ("Connected to " ++ show dst)
localAddr <- sockAddrToSocksAddress `fmap` getSocketName c
sendSerialized s (SocksResponse SocksReplySuccess localAddr)
tcpRelay config s c
handleClientRequest SocksCommandBind config s who dst =
flip finally (info config "Thread Complete") $
withTcpSocket (sockAddrFamily dst) $ \c ->
do
debug config "Binding TCP socket"
bind c (bindAddress config)
listen c 0
boundAddr <- getSocketName c
info config ("Socket bound to " ++ show boundAddr)
sendSerialized s (SocksResponse SocksReplySuccess (sockAddrToSocksAddress boundAddr))
bracket (accept c) (sClose.fst) $ \(c1,who) ->
do debug config ("Connection received from " ++ show who)
sendSerialized s (SocksResponse SocksReplySuccess (sockAddrToSocksAddress who))
tcpRelay config s c1
handleClientRequest SocksCommandUdpAssociate config s who dst =
flip finally (info config "Thread Complete") $
getSocketName s >>= \localAddr ->
withUdpSocket (sockAddrFamily localAddr) $ \c1 ->
withUdpSocket (sockAddrFamily localAddr) $ \c2 ->
do
debug config "Associating UDP socket"
debug config ("Client UDP address: " ++ show dst)
bind c1 (setPort 0 localAddr)
localDataAddr <- getSocketName c1
debug config ("Server UDP address: " ++ show localDataAddr)
bind c2 (wildAddress (sockAddrFamily localAddr))
remoteDataAddr <- getSocketName c2
info config ("UDP outgoing socket bound to " ++ show remoteDataAddr)
sendSerialized s (SocksResponse SocksReplySuccess (sockAddrToSocksAddress localDataAddr))
let relay = if forceLearningMode config || isWild dst then learningUdpRelay else udpRelay
relay config c1 c2 dst
-- UDP connections are preserved until the control connection goes down
setSocketOption s KeepAlive 1
recv s 1
return ()
handleClientRequest cmd config s who _ = do
info config ("Unsupported command " ++ show cmd)
sendSerialized s (errorResponse SocksErrorCommandNotSupported)
isWild (SockAddrInet p h ) = p == aNY_PORT || h == iNADDR_ANY
isWild (SockAddrInet6 p _ h _) = p == aNY_PORT || h == iN6ADDR_ANY
isWild (SockAddrUnix _ ) = error "isWild: SockAddrUnix not supported"
discardIOErrors m = catchIOError m (const (return ()))
learningUdpRelay :: Configuration -> Socket -> Socket -> SockAddr -> IO ()
learningUdpRelay config c1 c2 dst =
do debug config "UDP address learning mode"
forkIO (discardIOErrors handleFirst)
return ()
where
handleFirst = do
(bs,src) <- recvFrom c1 4096
debug config ("-> client " ++ show src ++ " (" ++ show (B.length bs) ++ ")")
case decode bs of
Right udp | udpFragment udp == 0 -> do
mbAddr <- resolveSocksAddress config (udpRemoteAddr udp)
case mbAddr of
Nothing -> do debug config ("Dropping unresolvable packet: " ++ show (udpRemoteAddr udp))
Just addr -> do let cnts = udpContents udp
debug config ("<- remote " ++ show addr ++ " (" ++ show (B.length cnts) ++ ")")
sendTo c2 cnts addr
return ()
udpRelay config c1 c2 src
Left err -> do debug config ("Ignoring malformed UDP packet: " ++ err)
handleFirst
udpRelay :: Configuration -> Socket -> Socket -> SockAddr -> IO ()
udpRelay config c1 c2 dst = do
_forwardThread <- forkIO $ discardIOErrors $ forever $ do
(bs,src) <- recvFrom c1 4096
debug config ("-> client " ++ show src ++ " (" ++ show (B.length bs) ++ ")")
case (src == dst, decode bs) of
(False,_) -> debug config "Dropping UDP packet due to source mismatch"
(_,Left err) -> debug config ("Dropping malformed UDP packet: " ++ err)
(_,Right udp) | udpFragment udp /= 0 -> debug config ("Dropping fragmented UDP packet: " ++ show (udpFragment udp))
(True, Right udp) -> do
mbAddr <- resolveSocksAddress config (udpRemoteAddr udp)
case mbAddr of
Nothing -> do debug config "Dropping unresolvable packet"
Just addr -> do let cnts = udpContents udp
debug config ("<- remote " ++ show addr ++ " (" ++ show (B.length cnts) ++ ")")
sendTo c2 cnts addr
return ()
_backwardThread <- forkIO $ discardIOErrors $ forever $ do
(msg, remote) <- recvFrom c2 4096
debug config ("-> remote " ++ show remote ++ " (" ++ show (B.length msg) ++ ")")
let cnts = encode (SocksUdpEnvelope 0 (sockAddrToSocksAddress remote) msg)
debug config ("<- client " ++ show dst ++ " (" ++ show (B.length cnts) ++ ")")
sendTo c1 cnts dst
return ()
------------------------------------------------------------------------
-- TCP Proxy
------------------------------------------------------------------------
tcpRelay :: Configuration -> Socket -> Socket -> IO ()
tcpRelay config s c = do
done <- newEmptyMVar
t1 <- forkIO (shuttle config s c `finally` putMVar done ())
t2 <- forkIO (shuttle config c s `finally` putMVar done ())
takeMVar done
takeMVar done
shuttle :: Configuration -> Socket -> Socket -> IO ()
shuttle config source sink = loop `finally` cleanup
where
cleanup = do _ <- tryIOError (shutdown source ShutdownReceive)
_ <- tryIOError (shutdown sink ShutdownSend)
return ()
loop = do
sourcePeer <- getPeerName source
sourceName <- getSocketName source
sinkName <- getSocketName sink
sinkPeer <- getPeerName sink
bs <- recv source (8*4096)
unless (B.null bs) $ do
sendAll sink bs
debug config (show sourcePeer ++ " -> " ++ show sourceName
++ " : "
++ show sinkName ++ " -> " ++ show sinkPeer
++ " (" ++ show (B.length bs) ++ ")")
loop
------------------------------------------------------------------------
-- Address utilities
------------------------------------------------------------------------
sockAddrToSocksAddress :: SockAddr -> SocksAddress
sockAddrToSocksAddress (SockAddrInet p h ) = SocksAddress (SocksAddrIPV4 h) p
sockAddrToSocksAddress (SockAddrInet6 p _ h _) = SocksAddress (SocksAddrIPV6 h) p
sockAddrToSocksAddress (SockAddrUnix _ ) = error "sockAddrToSocksAddress: Unix sockets not supported"
sockAddrFamily :: SockAddr -> Family
sockAddrFamily SockAddrInet {} = AF_INET
sockAddrFamily SockAddrInet6 {} = AF_INET6
sockAddrFamily SockAddrUnix {} = AF_UNIX
errorResponse :: SocksError -> SocksResponse
errorResponse err = (SocksResponse (SocksReplyError err) (SocksAddress (SocksAddrIPV4 iNADDR_ANY) aNY_PORT))
resolveSocksAddress :: Configuration -> SocksAddress -> IO (Maybe SockAddr)
resolveSocksAddress config (SocksAddress host port) =
case host of
SocksAddrIPV4 a -> return (Just (SockAddrInet port a ))
SocksAddrIPV6 a -> return (Just (SockAddrInet6 port 0 a 0))
SocksAddrDomainName str -> do
let hostname = B8.unpack str
ais <- getAddrInfo (Just tcpHints) (Just hostname) (Just (show port))
case ais of
ai : _ -> do let addr = addrAddress ai
debug config ("Resolved " ++ hostname ++ " to " ++ show addr)
return (Just addr)
[] -> do debug config ("Unable to resolve " ++ B8.unpack str)
return Nothing
tcpHints :: AddrInfo
tcpHints = defaultHints
{ addrSocketType = Stream
, addrFlags = [AI_ADDRCONFIG]
}
setPort :: PortNumber -> SockAddr -> SockAddr
setPort port (SockAddrInet _ host ) = SockAddrInet port host
setPort port (SockAddrInet6 _ flow host scope) = SockAddrInet6 port flow host scope
setPort _ SockAddrUnix {} = error "unix sockets don't have ports"
getPort :: SockAddr -> PortNumber
getPort (SockAddrInet port _ ) = port
getPort (SockAddrInet6 port _ _ _) = port
getPort SockAddrUnix {} = error "seriously, stop using unix sockets"
wildAddress :: Family -> SockAddr
wildAddress AF_INET = SockAddrInet aNY_PORT iNADDR_ANY
wildAddress AF_INET6 = SockAddrInet6 aNY_PORT 0 iN6ADDR_ANY 0
withTcpSocket :: Family -> (Socket -> IO a) -> IO a
withTcpSocket family = bracket (socket family Stream defaultProtocol) sClose
withUdpSocket :: Family -> (Socket -> IO a) -> IO a
withUdpSocket family = bracket (socket family Datagram defaultProtocol) sClose
| glguy/s5s | Main.hs | bsd-3-clause | 14,540 | 0 | 28 | 3,153 | 4,201 | 2,017 | 2,184 | 269 | 5 |
{-# LANGUAGE ScopedTypeVariables, DeriveDataTypeable #-}
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import Data.Typeable
import Control.Concurrent (forkIO, myThreadId, threadDelay)
import qualified Control.Exception as E
import Control.Exception (Exception, IOException, throwTo)
import Control.Monad
import Control.Monad.Trans.Class
import Control.Pipe
import Control.Pipe.Combinators
import Control.Pipe.Exception
import qualified Control.Pipe.Binary as PB
import System.IO
import Prelude hiding (catch)
-- line-by-line reader with verbose initializer and finalizer
reader :: FilePath -> Producer B.ByteString IO ()
reader fp = fReader >+> PB.lines
where
fReader = bracket open close PB.handleReader
open = do
putStrLn $ "opening file " ++ show fp ++ " for reading"
openFile fp ReadMode
close h = do
hClose h
putStrLn $ "closed file " ++ show fp
-- line-by-line writer with verbose initializer and finalizer
writer :: FilePath -> Consumer B.ByteString IO ()
writer fp = pipe (`BC.snoc` '\n') >+> fWriter
where
fWriter = await >>= \x -> feed x (bracket open close PB.handleWriter)
open = do
putStrLn $ "opening file " ++ show fp ++ " for writing"
openFile fp WriteMode
close h = do
hClose h
putStrLn $ "closed file " ++ show fp
-- interactive pipe
prompt :: Pipe String String IO ()
prompt = forever $ await >>= \q -> do
lift $ putStr $ q ++ ": "
r <- lift getLine
yield r
-- copy "/etc/motd" to "/tmp/x"
ex1 :: Pipeline IO ()
ex1 = reader "/etc/motd" >+> writer "/tmp/x"
{-
opening file "/etc/motd" for reading
opening file "/tmp/x" for writing
closed file "/etc/motd"
closed file "/tmp/x"
-}
-- note that the files are not closed in LIFO order
-- output error
ex2 :: Pipeline IO ()
ex2 = reader "/etc/motd" >+> writer "/unopenable"
{-
opening file "/etc/motd" for reading
opening file "/unopenable" for writing
closed file "/etc/motd"
*** Exception: /unopenable: openFile: permission denied (Permission denied)
-}
-- note that the input file was automatically closed before the exception
-- terminated the pipeline
-- joining two files
ex3 :: Pipeline IO ()
ex3 = (reader "/etc/motd" >> reader "/usr/share/dict/words") >+>
writer "/tmp/x"
{-
opening file "/etc/motd" for reading
opening file "/tmp/x" for writing
closed file "/etc/motd"
opening file "/usr/share/dict/words" for reading
closed file "/usr/share/dict/words"
closed file "/tmp/x"
-}
-- recovering from exceptions
ex4 :: Pipeline IO ()
ex4 = (safeReader "/etc/motd" >> safeReader "/nonexistent") >+>
writer "/tmp/x"
where
safeReader fp = catch (reader fp) $ \(e :: IOException) ->
lift $ putStrLn $ "exception " ++ show e
{-
opening file "/etc/motd" for reading
opening file "/tmp/x" for writing
closed file "/etc/motd"
opening file "/nonexistent" for reading
exception /nonexistent: openFile: does not exist (No such file or directory)
closed file "/tmp/x"
-}
data Timeout = Timeout
deriving (Show, Typeable)
instance Exception Timeout
-- recovering from asynchronous exceptions
ex5 :: Pipeline IO ()
ex5 = questions >+> safePrompt >+> pipe BC.pack >+> writer "/tmp/x"
where
questions = do
yield "Project name"
yield "Version"
yield "Description"
lift $ E.throwIO Timeout
timeout t = lift $ do
tid <- myThreadId
forkIO $ do
threadDelay (t * 1000000)
throwTo tid Timeout
safePrompt = catch (timeout 5 >> prompt) $ \(_ :: Timeout) ->
lift $ putStrLn "timeout"
{-
Project name: test
opening file "/tmp/x" for writing
Version: timeout
closed file "/tmp/x"
-}
| pcapriotti/pipes-extra | examples/finalizers/simple.hs | bsd-3-clause | 3,698 | 0 | 15 | 768 | 806 | 417 | 389 | 67 | 1 |
module Main where
import Zice
main = do
ziceMain
| zachk/zice | src/Main.hs | bsd-3-clause | 57 | 0 | 6 | 17 | 15 | 9 | 6 | 4 | 1 |
import Data.List
data EncodeType a = Single a | Multiple Int a deriving (Show)
encode lst = map (\ x -> (length x, head x)) (Data.List.group lst)
encodeModified :: Eq a => [a] -> [EncodeType a]
encodeModified = map help . encode
where
help (1, x) = Single x
help (n, x) = Multiple n x
decodeModified :: [EncodeType b] -> [b]
decodeModified = concatMap help
where
help (Single x) = [x]
help (Multiple n x) = take n (repeat x)
| m00nlight/99-problems | haskell/p-12.hs | bsd-3-clause | 462 | 0 | 9 | 118 | 219 | 115 | 104 | 11 | 2 |
module Main where
import Finance.Quote.Yahoo
import Data.Time.Calendar
import Data.Map
quoteSymbolList = ["YHOO"] :: [QuoteSymbol]
quoteFieldsList = ["s","l1","c"] :: [QuoteField]
main = do
q <- getQuote quoteSymbolList quoteFieldsList
case q of
Nothing -> error "no map"
Just m -> case (Data.Map.lookup ("YHOO","l1") m) of
Nothing -> print "no match"
Just a -> print a
let startDate = Data.Time.Calendar.fromGregorian 2013 07 01
let endDate = Data.Time.Calendar.fromGregorian 2013 08 01
h <- getHistoricalQuote (head quoteSymbolList) startDate endDate Monthly
case h of
Nothing -> error "no historical"
Just l -> sequence $ Prelude.map print l
return ()
| bradclawsie/haskell-Finance.Quote.Yahoo | quotes.hs | bsd-3-clause | 727 | 0 | 13 | 165 | 245 | 125 | 120 | 20 | 4 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS -fno-warn-name-shadowing #-}
-- | Track clocking in and out of work.
module Clockin where
import Control.Monad
import Control.Monad.Trans.Resource
import Data.Aeson
import qualified Data.ByteString.Lazy as L
import Data.Conduit
import qualified Data.Conduit.Binary as C
import qualified Data.Conduit.List as CL
import Data.List
import Data.Maybe
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Time
import Data.Time.Lens
import GHC.Generics
import System.Directory
import System.FilePath
import Data.Time.Locale.Compat
import Text.Printf
-- | Configuration for the clocking in setup.
data Config = Config
{ configFilePath :: !FilePath
, configHoursPerDay :: Int
} deriving (Show)
-- | An entry in the clocking log.
data Entry
= In !ClockIn
| Out !ClockOut
deriving (Show,Generic)
instance ToJSON Entry
instance FromJSON Entry
-- | Clocking in.
data ClockIn = ClockIn
{ inProject :: !Text
, inTask :: !(Maybe Text)
, inTime :: !Time
} deriving (Show,Generic)
instance ToJSON ClockIn
instance FromJSON ClockIn
-- | Clocking out.
data ClockOut = ClockOut
{ outProject :: !Text
, outTask :: !(Maybe Text)
, outReason :: !(Maybe Text)
, outTime :: !Time
} deriving (Show,Generic)
instance ToJSON ClockOut
instance FromJSON ClockOut
-- | A status report for the current log.
data Status = Status
{ statusIn :: !Bool -- ^ Am I clocked in?
, statusCurTimePeriod :: !NominalDiffTime -- ^ How long have I been clocked in/clocked out for?
, statusInToday :: !NominalDiffTime -- ^ How long have I been in today?
, statusRemainingToday :: !Remaining -- ^ How long left today?
} deriving (Show)
-- | How much time remaining.
data Remaining
= Credit NominalDiffTime
| Due NominalDiffTime
deriving (Eq,Show)
-- | Get a default config.
getClockinConfig :: IO Config
getClockinConfig =
do dir <- getHomeDirectory
return (Config (dir </> ".clockin.log")
8)
-- | Clock into something.
clockIn :: Config -- ^ Config.
-> Text -- ^ Project.
-> Maybe Text -- ^ Task.
-> IO ()
clockIn config project task =
do now <- getLocalTime
clock config (In (ClockIn project task (Time now)))
-- | Clock out of something.
clockOut :: Config -- ^ Config.
-> Text -- ^ Project.
-> Maybe Text -- ^ Task.
-> Maybe Text -- ^ Reason.
-> IO ()
clockOut config project task reason =
do now <- getLocalTime
clock config (Out (ClockOut project task reason (Time now)))
-- | Clock in or out.
clock :: Config -> Entry -> IO ()
clock config entry =
L.appendFile (configFilePath config)
(encode entry <> "\n")
-- | Print out a status string.
printClockinStatus :: Config -> IO ()
printClockinStatus config =
do entries <- readClockinEntries config
now <- getLocalTime
T.putStrLn (describeStatus now (clockinStatus config now entries) entries)
-- | Print hours worked per day in a format spark can consume.
printSparkDays :: Config -> IO ()
printSparkDays config =
do entries <- readClockinEntries config
now <- getLocalTime
forM_ (days now entries)
(\day ->
let diff =
-1 * (statusInToday
(clockinStatus config
day
(filter ((<=day).entryTime) entries)))
in putStrLn (formatTime defaultTimeLocale "%F" day ++ " " ++ T.unpack (hoursIn day diff)))
where days now =
nub . map (min now . modL seconds (subtract 1) . modL day (+1) . startOfDay . entryTime)
-- | Print minutes.
printMinutes :: Config -> IO ()
printMinutes config =
do entries <- readClockinEntries config
now <- getLocalTime
forM_ (days now entries)
(\day ->
let diff =
-1 * (statusInToday
(clockinStatus config
day
(filter ((<=day).entryTime) entries)))
in putStrLn (formatTime defaultTimeLocale "%F" day ++ "\t" ++ show (minutesIn day diff)))
where days now =
nub . map (min now . modL seconds (subtract 1) . modL day (+1) . startOfDay . entryTime)
-- | Print hours.
printHours :: Config -> IO ()
printHours config =
do entries <- readClockinEntries config
now <- getLocalTime
forM_ (days now entries)
(\day ->
let diff =
-1 * (statusInToday
(clockinStatus config
day
(filter ((<=day).entryTime) entries)))
in putStrLn (formatTime defaultTimeLocale "%F" day ++ "\t" ++ show (hoursCountIn day diff)))
where days now =
nub . map (min now . modL seconds (subtract 1) . modL day (+1) . startOfDay . entryTime)
-- | Read in the log entries from file.
readClockinEntries :: Config -> IO [Entry]
readClockinEntries config =
runResourceT (C.sourceFile (configFilePath config) $=
C.lines $=
CL.mapMaybe (decode . L.fromStrict) $$
CL.consume)
-- | Make a human-readable representation of the status.
describeStatus :: LocalTime -> Status -> [Entry] -> Text
describeStatus now status entries =
T.intercalate
"\n"
["Current time is: " <> T.pack (formatTime defaultTimeLocale "%F %R" now)
,"Currently clocked " <> (if statusIn status
then "IN "
else "OUT ")
<> (if statusCurTimePeriod status == 0 && not (statusIn status)
then ""
else "(" <>
diffTime (-1 * (statusCurTimePeriod status)) True <>
")")
,"Time spent today: " <> hoursIn now (-1 * (statusInToday status))
,"Remaining: " <> hoursRemaining now (statusRemainingToday status)
,"Log today:\n" <> T.intercalate "\n" (map describeEntry
(filter ((>startOfDay now).entryTime) entries))
,if statusIn status then T.pack (formatTime defaultTimeLocale "%R (now)" now) else ""
]
-- | Describe an entry.
describeEntry :: Entry -> Text
describeEntry (In (ClockIn _ _ t)) = T.pack (formatTime defaultTimeLocale "%R" t) <> " in"
describeEntry (Out (ClockOut _ _ _ t)) = T.pack (formatTime defaultTimeLocale "%R" t) <> " out"
-- | Hours remaining.
hoursRemaining now (Due h) = hoursIn now h
hoursRemaining now (Credit h) = "-" <> hoursIn now h
-- | Show the number of hours in (or out, really).
hoursIn :: LocalTime -> NominalDiffTime -> Text
hoursIn now =
T.pack .
formatTime defaultTimeLocale "%R" .
(`addLocalTime` startOfDay now)
-- | Show the number of minutes in (or out, really).
minutesIn :: LocalTime -> NominalDiffTime -> Int
minutesIn now =
getL minutes .
(`addLocalTime` startOfDay now)
-- | Show the number of hours in (or out, really).
hoursCountIn :: LocalTime -> NominalDiffTime -> Int
hoursCountIn now =
getL hours .
(`addLocalTime` startOfDay now)
-- | Make a short human-readable representation of the status, on one line.
onelinerStatus :: LocalTime -> Status -> Text
onelinerStatus now status =
hoursIn now (-1 * (statusInToday status)) <>
"/" <>
hoursRemaining now (statusRemainingToday status) <>
" (" <>
(if statusIn status then "in" else "out") <>
")"
-- | Generate a status report of the current log.
clockinStatus :: Config -> LocalTime -> [Entry] -> Status
clockinStatus config now entries =
Status clockedIn
curPeriod
todayDiff
remaining
where remaining
| -1 * todayDiff < goalDiff =
Due (diffLocalTime (addLocalTime goalDiff
midnight)
(addLocalTime (-1 * todayDiff)
midnight))
| otherwise =
Credit (diffLocalTime (addLocalTime (-1 * todayDiff)
midnight)
(addLocalTime goalDiff
midnight))
goalDiff = (60 * 60 * fromIntegral (configHoursPerDay config))
todayDiff = inToday now descending
clockedIn =
fromMaybe False
(fmap (\i -> case i of
In{} -> True
_ -> False)
current)
curPeriod = maybe 0 (diffLocalTime now . entryTime) current
current = listToMaybe descending
descending = reverse entries
midnight = startOfDay now
-- | Get the time, if any, of an entry's clocking in.
entryTime :: Entry -> LocalTime
entryTime (In i) = timeLocalTime (inTime i)
entryTime (Out o) = timeLocalTime (outTime o)
-- | Get the project of the entry.
entryProject :: Entry -> Text
entryProject (In i) = inProject i
entryProject (Out o) = outProject o
-- | Get the starting time of the day of the given time.
startOfDay :: LocalTime -> LocalTime
startOfDay (LocalTime day _) = LocalTime day midnight
-- | How much time clocked in today? Expects a DESCENDING entry list.
--
-- If the clocking in time was yesterday, then don't include the work
-- from yesterday, only include the work for today starting from
-- midnight.
--
-- Stops traversing the list if it reaches an entry from yesterday.
inToday :: LocalTime -> [Entry] -> NominalDiffTime
inToday now = go now 0 . zip [0::Int ..]
where go last total (((i,x):xs)) =
case x of
In{} | today -> go this (total + diffLocalTime this last) xs
| otherwise -> go this (total + diffLocalTime (startOfDay now) last) []
Out{} | today -> go this total xs
| i == 0 -> go this total []
| otherwise -> go this total []
where this = entryTime x
today = localDay this == localDay now
go _ total _ = total
-- | Display a time span as one time relative to another.
diffTime :: NominalDiffTime
-> Bool -- ^ Display 'in/ago'?
-> Text -- ^ Example: '3 seconds ago', 'in three days'.
diffTime span' fix = T.pack $ maybe "unknown" format $ find (\(s,_,_) -> abs span'>=s) $ reverse ranges where
minute = 60; hour = minute * 60; day = hour * 24;
week = day * 7; month = day * 30; year = month * 12
format range =
(if fix && span'>0 then "in " else "")
++ case range of
(_,str,0) -> str
(_,str,base) -> printf str (abs $ round (span' / base) :: Integer)
++ (if fix && span'<0 then " ago" else "")
ranges = [(0,"%d seconds",1)
,(minute,"a minute",0)
,(minute*2,"%d minutes",minute)
,(minute*30,"half an hour",0)
,(minute*31,"%d minutes",minute)
,(hour,"an hour",0)
,(hour*2,"%d hours",hour)
,(hour*3,"a few hours",0)
,(hour*4,"%d hours",hour)
,(day,"a day",0)
,(day*2,"%d days",day)
,(week,"a week",0)
,(week*2,"%d weeks",week)
,(month,"a month",0)
,(month*2,"%d months",month)
,(year,"a year",0)
,(year*2,"%d years",year)
]
-- Local time operations
getLocalTime :: IO LocalTime
getLocalTime = fmap zonedTimeToLocalTime getZonedTime
diffLocalTime :: LocalTime -> LocalTime -> NominalDiffTime
diffLocalTime t1 t2 = diffUTCTime (localTimeToUTC utc t1)
(localTimeToUTC utc t2)
addLocalTime :: NominalDiffTime -> LocalTime -> LocalTime
addLocalTime d = tmap (addUTCTime d)
tmap :: (UTCTime -> UTCTime) -> LocalTime -> LocalTime
tmap f = utcToLocalTime utc . f . localTimeToUTC utc
-- | A local time. The only reason for this is to avoid the orphan instance.
newtype Time = Time
{ timeLocalTime :: LocalTime }
deriving (Show,ParseTime,FormatTime,Generic)
instance ToJSON Time where
toJSON =
toJSON . formatTime defaultTimeLocale "%F %T"
instance FromJSON Time where
parseJSON s =
do s <- parseJSON s
case parseTime defaultTimeLocale "%F %T" s of
Nothing -> fail "Couldn't parse local time."
Just t -> return t
| chrisdone/clockin | src/Clockin.hs | bsd-3-clause | 12,791 | 0 | 23 | 4,029 | 3,463 | 1,831 | 1,632 | 309 | 4 |
module Data.Text.Aux where
import Control.Monad (liftM2)
import qualified Data.Text as T (Text, append, cons, intercalate, lines, pack, snoc, unlines, unwords, words)
import Language.C.Pretty (Pretty, pretty)
import Data.Text.Lazy (toStrict)
import Data.Text.Lazy.Builder (toLazyText)
import Data.Text.Lazy.Builder.Int (decimal)
import Text.PrettyPrint.HughesPJ.Ext
import TextShow (showt)
-- | `showInt` should be equivalent to `pack . show` for the `Int` type, except
-- a bit faster since it uses `Text` library functions
showInt :: Int -> T.Text
showInt = toStrict . toLazyText . decimal
-- | Example:
--
-- >>> textAp "Hello" "World"
-- "Hello (World)"
--
textAp :: String -> T.Text -> T.Text
textAp ap = T.append (flip T.snoc ' ' . T.pack $ ap) . parens
-- | Example:
--
-- >>> wrapText '|' "text"
-- "|text|"
--
wrapText :: Char -> T.Text -> T.Text
wrapText = liftM2 (.) T.cons (flip T.snoc)
-- | Example:
--
-- >>> addArrows . map T.pack ["Bool", "String", "Int"]
-- "Bool -> String -> Int"
--
addArrows :: [T.Text] -> T.Text
addArrows = T.intercalate (T.pack " -> ")
-- | Example:
--
-- >>> unwords2 (T.pack "Hello") (T.pack "World")
-- "Hello World"
--
unwords2 :: T.Text -> T.Text -> T.Text
unwords2 = appendAfter (T.pack " ")
-- | Example:
--
-- >>> appendAfter ", " "Hello" "World"
-- "Hello, World"
--
appendAfter :: T.Text -> T.Text -> T.Text -> T.Text
appendAfter = (T.append .) . T.append
-- | Add parentheses around a `Text` object
parens :: T.Text -> T.Text
parens = (T.cons '(') . (flip T.snoc ')')
-- | `wordLines` converts a Text input to a list of lists, where
-- `wordLines text !! i !! j` is the jth word of the ith line of `text`.
wordLines :: T.Text -> [[T.Text]]
wordLines = map T.words . T.lines
-- | See `wordLines`
unWordLines :: [[T.Text]] -> T.Text
unWordLines = T.unlines . map T.unwords
-- | Shortcut for
--
-- > unlines . map pack
--
packUnlines :: [String] -> T.Text
packUnlines = T.unlines . map T.pack
-- | Shortcut for
--
-- > unwords . map pack
--
packUnwords :: [String] -> T.Text
packUnwords = T.unwords . map T.pack
-- | Convert a pretty-printable object to `Text`
prettyShowt :: Pretty p => p -> T.Text
prettyShowt = showt . pretty
| michaeljklein/CPlug | src/Data/Text/Aux.hs | bsd-3-clause | 2,199 | 0 | 11 | 392 | 589 | 347 | 242 | 33 | 1 |
--
-- Types.hs
-- Copyright (C) 2017 jragonfyre <jragonfyre@jragonfyre>
--
-- Distributed under terms of the MIT license.
--
module Game.Types where
--(
--) where
import qualified Data.HashMap.Lazy as M
import qualified Data.Yaml as Y
import Data.Yaml (FromJSON (..), (.:), (.!=), (.:?), (.=), ToJSON (..))
import Control.Applicative ((<$>),(<*>),(<|>))
import qualified Data.Maybe as May
import qualified Data.List as L
import qualified Data.Text as T
import Data.Text (Text)
import Data.Aeson.Types (typeMismatch)
import Data.Vector (toList)
import Game.BaseTypes
import Game.MaterialTypes
import Game.RelationTypes
import Game.ObjectTypes
import Game.SpaceTypes
import ParseUtilities
data World = World
{ registeredRelations :: M.HashMap Identifier Relation
, registeredMaterials :: M.HashMap Identifier Material
, registeredSpaceClasses :: M.HashMap Identifier SpaceClass
, registeredObjectClasses :: M.HashMap Identifier ObjectClass
}
type Space = SpaceData
-- dynamic data associated to a space
data SpaceData =
SpData
{ spaceId :: Identifier
, spaceDescription :: Description
, spaceClass :: Identifier
, contents :: [Object]
, subspaces :: [Space]
, spaceInstanceProperties :: [SpaceProperty]
}
deriving (Show, Read, Eq)
instance FromJSON SpaceData where
parseJSON (Y.Object v) = SpData
<$> v .: "space"
<*> v .: "description"
<*> v .: "space-class"
<*> v .:? "contents" .!= []
<*> v .:? "subspaces" .!= []
<*> v .:? "properties" .!= []
instance ToJSON SpaceData where
toJSON spdata = Y.object
[ "space" .= spaceId spdata
, "description" .= spaceDescription spdata
, "space-class" .= spaceClass spdata
, "contents" .= toJSONList (contents spdata)
, "subspaces" .= toJSONList (subspaces spdata)
, "properties" .= toJSONList (spaceInstanceProperties spdata)
]
{-
-- describes a space
data Space where
-- Space with an identifier and certain relations to subspaces
NamedSpace :: Identifier -> SpaceClass -> SpaceData -> Space
-- subspace of some parent space with the relation
Subspace :: Relation -> SpaceClass -> SpaceData -> Space
deriving (Show, Read, Eq)
-}
type Object = ObjectData
data ObjectData =
ObData
{ objectId :: Identifier
, objectDescription :: Description
, objectClass :: Identifier
, objectSpaces :: [Space]
, objectInstanceProperties :: [ObjectProperty]
}
deriving (Show, Read, Eq)
instance FromJSON ObjectData where
parseJSON (Y.Object v) = ObData
<$> v .: "object"
<*> v .: "description"
<*> v .: "object-class"
<*> v .:? "spaces" .!= []
<*> v .:? "properties" .!= []
instance ToJSON ObjectData where
toJSON obdata = Y.object
[ "object" .= objectId obdata
, "description" .= objectDescription obdata
, "object-class" .= objectClass obdata
, "spaces" .= toJSONList (objectSpaces obdata)
, "properties" .= toJSONList (objectInstanceProperties obdata)
]
{-
data Object where
Object :: ObjectClass -> ObjectData -> Object
deriving (Show, Read, Eq)
-}
| jragonfyre/TRPG | src/Game/Types.hs | bsd-3-clause | 3,090 | 0 | 20 | 622 | 730 | 422 | 308 | 71 | 0 |
module Main where
import qualified Data.ByteString.Lazy as LazyBS
import Data.Time.Clock
import Data.Time.Format
import Data.Time.LocalTime
import System.IO
--import System.Locale
import CSVPlayer
import Types
import NameCluster
import NameGender
import OptApplicative
import Utils
-- |Clusterifies the specifed names and converts it into a String.
stringifyClusters :: [String] -> String
stringifyClusters ns = (show . clusterify) ns
{-|
Clusterifies the specified names and encodes the result into a CSV formatted
String.
-}
stringifyCSV :: [String] -> String
stringifyCSV ns = (show . convertClustersToCSVString . clusterify) ns
-- TEST
hardcodedValues :: [String]
hardcodedValues = words "Armand Theo Alex Adrien Tristan Alexandra \
\ Arnaud Julien Juliette Ilhem Alyx Tristan Alexis Alexandre Theo Thibault Thomas \
\ Armande Armando Alexia"
-- TEST
displayHardCodedValues :: IO ()
displayHardCodedValues = (putStrLn . show . stringifyClusters) hardcodedValues
-- |Uses input as a list of names for clusterification.
displayInputValues :: IO ()
displayInputValues =
do
putStrLn "List your names separated by spaces: "
interact (show . stringifyClusters . words)
-- |Clusterifies input values and writes the result to a new CSV file.
writeCSVFileInputValues :: IO ()
writeCSVFileInputValues =
do
putStrLn "List your names separated by spaces: "
names <- getLine
writeCSVFile "clusters" (toClusterRecordsAll . clusterify . words $ names)
-- TEST
writeCSVFileHardCodedValues :: IO ()
writeCSVFileHardCodedValues =
do
filename <- generateFilenameFromTime
writeCSVFile filename (toClusterRecordsAll . clusterify $ hardcodedValues)
-- |Generates a file name from the current time.
generateFilenameFromTime :: IO String
generateFilenameFromTime =
do
timezone <- getCurrentTimeZone
utcTime <- getCurrentTime
-- Getting current time for filename
let localTime = utcToLocalTime timezone utcTime
-- Formatting time for filename
let filename = formatTime defaultTimeLocale "%Y-%m-%d_%H-%M-%S" localTime
return filename
findGender :: IO ()
findGender =
do
let basePath = "data/db_all_names.csv"
let errorMsg = "Could not load gendered name base at path " ++ show basePath
let ns = ["Alexandre", "Diamant", "ChouDeBruxelles41", "Jackie"]
mbBase <- loadGenderedBase basePath True
case mbBase of
Nothing -> putStrLn errorMsg
Just base -> (putStrLn . show) (findGenderBase base ns)
callVlfFunc :: VLFOptions -> IO ()
callVlfFunc vlfOpts =
{-|
Distributes calls so that the right function is called based on the typed
command.
-}
callMainFuncs :: GlobalOptions -> IO ()
callMainFuncs opts =
let
cmd = optCommand opts
format = optCsvFlag opts
results =
case cmd of
VoiciLesFemmes vlfOpts -> callVlfFunc vlfOpts
GroupeLesNoms glnOpts -> callGlnFunc glnOpts
strResults =
case format of
Csv -> expression
Normal -> expression
in expression
-- |Parses all the options and call the entry function.
optMain :: IO ()
optMain = execParser opts >>= callMainFuncs
where
opts = info (helper <*> globalOptions)
( fullDesc
<> progDesc "Multiple tools for name classification purposes"
<> header "ou-sont-les-femmes-? - name classification tools")
main :: IO ()
main = optMain
| Tydax/ou-sont-les-femmes | app/OuSontLesFemmes.hs | bsd-3-clause | 3,360 | 0 | 12 | 653 | 676 | 348 | 328 | -1 | -1 |
{-# LANGUAGE QuasiQuotes #-}
module AltParsing where
import Control.Applicative
import Text.Trifecta
import Text.RawString.QQ
type NumberOrString = Either Integer String
a = "blah"
b = "123"
c = "123blah789"
eitherOr :: String
eitherOr = [r|
123
abc
456
def
|]
parseNos :: Parser NumberOrString
parseNos = do
skipMany (oneOf "\n")
v <- (Left <$> integer) <|> (Right <$> some letter)
skipMany (oneOf "\n")
return v
main = do
print $ parseString (some letter) mempty a
print $ parseString integer mempty b
print $ parseString parseNos mempty a
print $ parseString parseNos mempty b
print $ parseString (many parseNos) mempty c
print $ parseString (some parseNos) mempty c
print $ parseString (some parseNos) mempty eitherOr
| chengzh2008/hpffp | src/ch24-Parser/altParsing.hs | bsd-3-clause | 751 | 0 | 11 | 142 | 259 | 129 | 130 | 25 | 1 |
{-# LANGUAGE PatternGuards, RecordWildCards #-}
module Website where
import Data.Char (toLower)
import Data.Function (on)
import Data.List (find, groupBy, nub, sort)
import Data.Version (showVersion, versionBranch)
import Development.Shake
import Text.Hastache
import Text.Hastache.Context
import Dirs
import Paths
import PlatformDB
import Releases
import ReleaseFiles
import Templates
import Types
websiteRules :: FilePath -> Rules ()
websiteRules templateSite = do
websiteDir %/> \dst -> do
bcCtx <- buildConfigContext
let rlsCtx = releasesCtx
ctx = ctxConcat [rlsCtx, historyCtx, bcCtx, currentPlatformCtx, errorCtx]
copyExpandedDir ctx templateSite dst
currentPlatformCtx :: Monad m => MuContext m
currentPlatformCtx = mkStrContext ctx
where
ctx "freezeConfig" = mapListStrContext go freezeIncludes
ctx _ = MuNothing
go x "name" = MuVariable $ pkgName x
go x "version" = MuVariable . showVersion $ pkgVersion x
go _ _ = MuNothing
freezeIncludes = map snd . filter filt . allRelIncludes $ head (reverse releases)
filt x = not (fst x `elem` [IncGHC, IncGHCLib, IncGHCTool, IncTool])
fileCtx :: (Monad m) => FileInfo -> MuContext m
fileCtx (dist, url, mHash, isFull) = mkStrContext ctx
where
ctx "osNameAndArch" = MuVariable $ distName dist
ctx "url" = MuVariable url
ctx "mHash" = maybe (MuBool False) MuVariable mHash
ctx "archBits"
| DistBinary _ arch <- dist = MuVariable $ archBits arch
| otherwise = MuNothing
ctx "isOSX" = MuBool $ distIsFor OsOSX dist
ctx "isWindows" = MuBool $ distIsFor OsWindows dist
ctx "isLinux" = MuBool $ distIsFor OsLinux dist
ctx "isSource" = MuBool $ dist == DistSource
ctx "isFull" = MuBool $ isFull
ctx _ = MuNothing
releaseCtx :: (Monad m) => ReleaseFiles -> MuContext m
releaseCtx (ver, (month, year), files) = mkStrContext ctx
where
ctx "version" = MuVariable ver
ctx "year" = MuVariable $ show year
ctx "month" = MuVariable $ monthName month
ctx "files" = mapListContext fileCtx files
ctx _ = MuNothing
releasesCtx :: (Monad m) => MuContext m
releasesCtx = mkStrContext ctx
where
ctx "years" = mapListStrContext yearCtx years
ctx "current" = MuList [releaseCtx currentFiles]
ctx _ = MuNothing
yearCtx [] _ = MuBool False
yearCtx (r0:_) "year" = MuVariable $ show $ releaseYear r0
yearCtx rs "releases" = mapListContext releaseCtx rs
yearCtx _ _ = MuNothing
years = groupBy ((==) `on` releaseYear) priorFiles
releaseYear :: ReleaseFiles -> Int
releaseYear (_ver, (_month, year), _files) = year
monthName :: Int -> String
monthName i = maybe (show i) id $ lookup i monthNames
where
monthNames = zip [1..] $
words "January Feburary March April May June \
\July August September October November December"
historyCtx :: (Monad m) => MuContext m
historyCtx = mkStrContext outerCtx
where
outerCtx "history" = MuList [mkStrContext ctx]
outerCtx _ = MuNothing
ctx "hpReleases" = mapListStrContext rlsCtx releasesNewToOld
ctx "ncols" = MuVariable $ length releasesNewToOld + 1
ctx "sections" = MuList
[ sectionCtx "Compiler" [isGhc, not . isLib]
, sectionCtx "Core Libraries, provided with GHC" [isGhc, isLib]
, sectionCtx "Additional Minimal Platform Libraries" [not . isGhc, isLib]
, sectionCtx "Programs and Tools" [isTool]
, extendedCtx "Libraries with Full Platform"
]
ctx _ = MuNothing
rlsCtx rls "hpVersion" = MuVariable . showVersion . hpVersion . relVersion $ rls
rlsCtx _ _ = MuNothing
sectionCtx :: (Monad m) => String -> [IncludeType -> Bool] -> MuContext m
sectionCtx name tests = mkStrContext ctx
where
ctx "name" = MuVariable name
ctx "components" = mapListStrContext pCtx packages
ctx _ = MuNothing
packages = sortOnLower . nub . map pkgName . concat $
map (packagesByIncludeFilter (\i -> all ($i) tests) False)
releasesNewToOld
sortOnLower = map snd . sort . map (\s -> (map toLower s, s))
pCtx pName "package" = MuVariable pName
pCtx pName "hackageUrl" =
MuVariable $ "http://hackage.haskell.org/package/" ++ pName
pCtx pName "releases" = mapListStrContext pvCtx $ packageVersionInfo False pName
pCtx _ _ = MuNothing
pvCtx (c, _) "class" = MuVariable c
pvCtx (_, v) "version" = MuVariable v
pvCtx _ _ = MuNothing
extendedCtx :: (Monad m) => String -> MuContext m
extendedCtx name = mkStrContext ctx
where
ctx "name" = MuVariable name
ctx "components" = mapListStrContext pCtx packages
ctx _ = MuNothing
packages = sortOnLower . nub . map pkgName . concat $
map (map snd . relIncludes)
releasesNewToOld
sortOnLower = map snd . sort . map (\s -> (map toLower s, s))
pCtx pName "package" = MuVariable pName
pCtx pName "hackageUrl" =
MuVariable $ "http://hackage.haskell.org/package/" ++ pName
pCtx pName "releases" = mapListStrContext pvCtx $ packageVersionInfo True pName
pCtx _ _ = MuNothing
pvCtx (c, _) "class" = MuVariable c
pvCtx (_, v) "version" = MuVariable v
pvCtx _ _ = MuNothing
packageVersionInfo :: Bool -> String -> [(String, String)]
packageVersionInfo searchFull pName = curr $ zipWith comp vers (drop 1 vers ++ [Nothing])
where
comp Nothing _ = ("missing", "-")
comp (Just v) Nothing = ("update", showVersion v)
comp (Just v) (Just w) | maj v == maj w = ("same", showVersion v)
| otherwise = ("update", showVersion v)
maj = take 3 . versionBranch
curr ((c, v) : cvs) = (c ++ " current", v) : cvs
curr [] = []
vers = map (fmap pkgVersion . find ((==pName) . pkgName) . map snd . (if searchFull then allRelIncludes else relMinimalIncludes))
releasesNewToOld
releasesNewToOld :: [Release]
releasesNewToOld = reverse releases
| gbaz/haskell-platform | hptool/src/Website.hs | bsd-3-clause | 6,097 | 0 | 15 | 1,551 | 1,950 | 997 | 953 | 131 | 10 |
module Karamaan.Plankton.Date where
import Data.Time.Calendar (Day, addDays, fromGregorian, toGregorian)
import Data.Time.LocalTime (ZonedTime, zonedTimeToLocalTime, getZonedTime, localDay)
import Data.Time.Format (formatTime)
import System.Locale (defaultTimeLocale)
import qualified Data.Time.Parse
import qualified Data.Time.LocalTime
import Karamaan.Plankton ((.:))
usDateFormatString :: String
usDateFormatString = "%m-%d-%Y"
isoDateFormatString :: String
isoDateFormatString = "%Y-%m-%d"
monthNameFormatString :: String
monthNameFormatString = "%B %e, %Y"
ymdNoSpaceFormatString :: String
ymdNoSpaceFormatString = "%Y%m%d"
usDateFormat :: Day -> String
usDateFormat = formatTimeDefault usDateFormatString
isoDateFormat :: Day -> String
isoDateFormat = formatTimeDefault isoDateFormatString
dayToSQL :: Day -> String
dayToSQL = formatTimeDefault isoDateFormatString
monthNameFormat :: Day -> String
monthNameFormat = formatTimeDefault monthNameFormatString
formatTimeDefault :: String -> Day -> String
formatTimeDefault = formatTime defaultTimeLocale
zonedTimeToDay :: ZonedTime -> Day
zonedTimeToDay = localDay . zonedTimeToLocalTime
todayLocal :: IO Day
todayLocal = (return . zonedTimeToDay) =<< getZonedTime
yesterdayLocal :: IO Day
yesterdayLocal = (return . dayBefore . zonedTimeToDay) =<< getZonedTime
dayBefore :: Day -> Day
dayBefore = addDays (-1)
dayOrTodayLocal :: Maybe Day -> IO Day
dayOrTodayLocal = maybe todayLocal return
dayOrYesterdayLocal :: Maybe Day -> IO Day
dayOrYesterdayLocal = maybe yesterdayLocal return
-- TODO: this accepts "2013-07-123" and returns 'Just 2013-07-12'.
-- Seems like a bug
parseDate :: String -> Maybe Day
parseDate = fmap (Data.Time.LocalTime.localDay . fst)
. Data.Time.Parse.strptime isoDateFormatString
-- ^^ Here we use Data.Time.Parse.strptime rather than parseTime because
-- Massy wants '2013-5-28' to be valid
-- (Message-Id: <[email protected]>)
-- parseTime doesn't support that because parseValue's definition for the
-- 'm' character is 'digits 2', rather than 'spdigits 2'
parseDateG :: String -> String -> Maybe Day
parseDateG = fmap (Data.Time.LocalTime.localDay . fst)
.: Data.Time.Parse.strptime
firstDayOfYear :: Day -> Day
firstDayOfYear day = fromGregorian year 1 1
where (year, _, _) = toGregorian day
firstDayOfQuarter :: Day -> Day
firstDayOfQuarter day = fromGregorian year quarterMonth 1
where (year, month, _) = toGregorian day
quarterMonth = ((+1) . (*3) . (`div` 3) . (subtract 1)) month
firstDayOfMonth :: Day -> Day
firstDayOfMonth day = fromGregorian year month 1
where (year, month, _) = toGregorian day
| karamaan/karamaan-plankton | Karamaan/Plankton/Date.hs | bsd-3-clause | 2,680 | 0 | 12 | 384 | 632 | 356 | 276 | 54 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.