Search is not available for this dataset
repo_name
string
path
string
license
string
full_code
string
full_size
int64
uncommented_code
string
uncommented_size
int64
function_only_code
string
function_only_size
int64
is_commented
bool
is_signatured
bool
n_ast_errors
int64
ast_max_depth
int64
n_whitespaces
int64
n_ast_nodes
int64
n_ast_terminals
int64
n_ast_nonterminals
int64
loc
int64
cycloplexity
int64
j-mueller/mtg-builder
src/Main.hs
bsd-3-clause
playTurn :: ( Monad m , HasGameState a , HasLibrary a , HasTurn a , MonadState a m , MonadRandom m , MonadReader a m , HasHand a , HasBattlefield a , MonadWriter DeckStatistics m , MonadPlus m ) => m () playTurn = do turn %= succ draw 1 logLandsInHand logConvertedManaCost logCardsInHand playLand' logAvgManaCurve -- | Play a number of times and aggregate the results -- | TODO: Return type should not be IO
479
playTurn :: ( Monad m , HasGameState a , HasLibrary a , HasTurn a , MonadState a m , MonadRandom m , MonadReader a m , HasHand a , HasBattlefield a , MonadWriter DeckStatistics m , MonadPlus m ) => m () playTurn = do turn %= succ draw 1 logLandsInHand logConvertedManaCost logCardsInHand playLand' logAvgManaCurve -- | Play a number of times and aggregate the results -- | TODO: Return type should not be IO
479
playTurn = do turn %= succ draw 1 logLandsInHand logConvertedManaCost logCardsInHand playLand' logAvgManaCurve -- | Play a number of times and aggregate the results -- | TODO: Return type should not be IO
219
false
true
0
7
146
121
58
63
null
null
keera-studios/hsQt
Qtc/Enums/Gui/QTabletEvent.hs
bsd-2-clause
ePuck :: TabletDevice ePuck = ieTabletDevice $ 1
50
ePuck :: TabletDevice ePuck = ieTabletDevice $ 1
50
ePuck = ieTabletDevice $ 1
28
false
true
0
6
9
22
9
13
null
null
geophf/1HaskellADay
exercises/HAD/Y2017/M05/D22/Solution.hs
mit
{-- We're going to examine language this week. Not the Haskell language, but spoken languages (again, not Haskell). So, looking at this on wikidata.org: https://www.wikidata.org/wiki/Q13284 We see the Sora language (Q13284). That's not what interests me, however. What interests me is the general structure of how languages are classified in Wikidata. A language, such as Sora, is an instance of the class: Language, or: https://www.wikidata.org/wiki/Q34770 (that's the 'q-name' for language) And has a parent class of a language-root, in Sora's case is: https://www.wikidata.org/wiki/Q33199 or Austro-Asiatic languages But this language-family, itself, has a class: https://www.wikidata.org/wiki/Q25295 (That is the 'q-name' of language family) And has a 'number of speakers' property: https://www.wikidata.org/wiki/Property:P1098 So, cool. Let's generalize from more than just the Sora language to all spoken languages Let's query wikidata and see what spoken languages there are, what they are subclasses of, and how many people speak those languages. So here is my SPARQL query to ask this question: --} languagesSpeakersQuery :: String languagesSpeakersQuery = unlines [ "SELECT ?language ?languageLabel ?root ?rootLabel ?nspeakers WHERE {", "?language wdt:P31 wd:Q34770.", "?language wdt:P279 ?root.", "?language wdt:P1098 ?nspeakers.", "SERVICE wikibase:label {", "bd:serviceParam wikibase:language \"en\" .", "} } ORDER BY ?nspeakers"]
1,482
languagesSpeakersQuery :: String languagesSpeakersQuery = unlines [ "SELECT ?language ?languageLabel ?root ?rootLabel ?nspeakers WHERE {", "?language wdt:P31 wd:Q34770.", "?language wdt:P279 ?root.", "?language wdt:P1098 ?nspeakers.", "SERVICE wikibase:label {", "bd:serviceParam wikibase:language \"en\" .", "} } ORDER BY ?nspeakers"]
357
languagesSpeakersQuery = unlines [ "SELECT ?language ?languageLabel ?root ?rootLabel ?nspeakers WHERE {", "?language wdt:P31 wd:Q34770.", "?language wdt:P279 ?root.", "?language wdt:P1098 ?nspeakers.", "SERVICE wikibase:label {", "bd:serviceParam wikibase:language \"en\" .", "} } ORDER BY ?nspeakers"]
324
true
true
0
6
231
36
22
14
null
null
dblia/nosql-ganeti
src/Ganeti/Query/Server.hs
gpl-2.0
-- | Helper for classic queries. handleClassicQuery :: ConfigData -- ^ Cluster config -> Qlang.ItemType -- ^ Query type -> [Either String Integer] -- ^ Requested names -- (empty means all) -> [String] -- ^ Requested fields -> Maybe FilterConstructor -- ^ the filter algorithm -- to be used, defaults to -- makeSimpleFilter -> Bool -- ^ Whether to do sync queries or not -> IO (GenericResult GanetiException JSValue) handleClassicQuery _ _ _ _ _ True = return . Bad $ OpPrereqError "Sync queries are not allowed" ECodeInval
800
handleClassicQuery :: ConfigData -- ^ Cluster config -> Qlang.ItemType -- ^ Query type -> [Either String Integer] -- ^ Requested names -- (empty means all) -> [String] -- ^ Requested fields -> Maybe FilterConstructor -- ^ the filter algorithm -- to be used, defaults to -- makeSimpleFilter -> Bool -- ^ Whether to do sync queries or not -> IO (GenericResult GanetiException JSValue) handleClassicQuery _ _ _ _ _ True = return . Bad $ OpPrereqError "Sync queries are not allowed" ECodeInval
767
handleClassicQuery _ _ _ _ _ True = return . Bad $ OpPrereqError "Sync queries are not allowed" ECodeInval
108
true
true
3
14
365
104
54
50
null
null
brendanhay/gogol
gogol-analytics/gen/Network/Google/Resource/Analytics/Management/CustomDimensions/Get.hs
mpl-2.0
-- | Web property ID for the custom dimension to retrieve. mcdgWebPropertyId :: Lens' ManagementCustomDimensionsGet Text mcdgWebPropertyId = lens _mcdgWebPropertyId (\ s a -> s{_mcdgWebPropertyId = a})
209
mcdgWebPropertyId :: Lens' ManagementCustomDimensionsGet Text mcdgWebPropertyId = lens _mcdgWebPropertyId (\ s a -> s{_mcdgWebPropertyId = a})
150
mcdgWebPropertyId = lens _mcdgWebPropertyId (\ s a -> s{_mcdgWebPropertyId = a})
88
true
true
0
8
34
43
22
21
null
null
grayjay/json-rpc-client
tests/Tests.hs
mit
missingMethodSig :: Signature () Rational missingMethodSig = Signature "f" ()
77
missingMethodSig :: Signature () Rational missingMethodSig = Signature "f" ()
77
missingMethodSig = Signature "f" ()
35
false
true
0
6
9
25
12
13
null
null
sethfowler/pygmalion
indexers/Pygmalion/Index/Source.hs
bsd-3-clause
getSemanticScope :: C.Cursor s' -> BU.ByteString -> Analysis s BU.ByteString getSemanticScope c name | C.isInvalid (C.getKind c) = return name | isScopeCursorKind (C.getKind c) = do semanticParent <- C.getSemanticParent c scopeName <- cursorName c getSemanticScope semanticParent $ scopeName <::> name | otherwise = do semanticParent <- C.getSemanticParent c getSemanticScope semanticParent name
431
getSemanticScope :: C.Cursor s' -> BU.ByteString -> Analysis s BU.ByteString getSemanticScope c name | C.isInvalid (C.getKind c) = return name | isScopeCursorKind (C.getKind c) = do semanticParent <- C.getSemanticParent c scopeName <- cursorName c getSemanticScope semanticParent $ scopeName <::> name | otherwise = do semanticParent <- C.getSemanticParent c getSemanticScope semanticParent name
431
getSemanticScope c name | C.isInvalid (C.getKind c) = return name | isScopeCursorKind (C.getKind c) = do semanticParent <- C.getSemanticParent c scopeName <- cursorName c getSemanticScope semanticParent $ scopeName <::> name | otherwise = do semanticParent <- C.getSemanticParent c getSemanticScope semanticParent name
354
false
true
1
11
86
147
64
83
null
null
frantisekfarka/ghc-dsi
libraries/base/GHC/Foreign.hs
bsd-3-clause
newEncodedCString :: TextEncoding -- ^ Encoding of CString to create -> Bool -- ^ Null-terminate? -> String -- ^ String to encode -> IO CStringLen newEncodedCString (TextEncoding { mkTextEncoder = mk_encoder }) null_terminate s = bracket mk_encoder close $ \encoder -> withArrayLen s $ \sz p -> do from <- fmap (\fp -> bufferAdd sz (emptyBuffer fp sz ReadBuffer)) $ newForeignPtr_ p let go iteration to_p to_sz_bytes = do putDebugMsg ("newEncodedCString: " ++ show iteration) mb_res <- tryFillBufferAndCall encoder null_terminate from to_p to_sz_bytes return case mb_res of Nothing -> do let to_sz_bytes' = to_sz_bytes * 2 to_p' <- reallocBytes to_p to_sz_bytes' go (iteration + 1) to_p' to_sz_bytes' Just res -> return res -- If the input string is ASCII, this value will ensure we only allocate once let to_sz_bytes = cCharSize * (sz + 1) to_p <- mallocBytes to_sz_bytes go (0 :: Int) to_p to_sz_bytes
1,125
newEncodedCString :: TextEncoding -- ^ Encoding of CString to create -> Bool -- ^ Null-terminate? -> String -- ^ String to encode -> IO CStringLen newEncodedCString (TextEncoding { mkTextEncoder = mk_encoder }) null_terminate s = bracket mk_encoder close $ \encoder -> withArrayLen s $ \sz p -> do from <- fmap (\fp -> bufferAdd sz (emptyBuffer fp sz ReadBuffer)) $ newForeignPtr_ p let go iteration to_p to_sz_bytes = do putDebugMsg ("newEncodedCString: " ++ show iteration) mb_res <- tryFillBufferAndCall encoder null_terminate from to_p to_sz_bytes return case mb_res of Nothing -> do let to_sz_bytes' = to_sz_bytes * 2 to_p' <- reallocBytes to_p to_sz_bytes' go (iteration + 1) to_p' to_sz_bytes' Just res -> return res -- If the input string is ASCII, this value will ensure we only allocate once let to_sz_bytes = cCharSize * (sz + 1) to_p <- mallocBytes to_sz_bytes go (0 :: Int) to_p to_sz_bytes
1,125
newEncodedCString (TextEncoding { mkTextEncoder = mk_encoder }) null_terminate s = bracket mk_encoder close $ \encoder -> withArrayLen s $ \sz p -> do from <- fmap (\fp -> bufferAdd sz (emptyBuffer fp sz ReadBuffer)) $ newForeignPtr_ p let go iteration to_p to_sz_bytes = do putDebugMsg ("newEncodedCString: " ++ show iteration) mb_res <- tryFillBufferAndCall encoder null_terminate from to_p to_sz_bytes return case mb_res of Nothing -> do let to_sz_bytes' = to_sz_bytes * 2 to_p' <- reallocBytes to_p to_sz_bytes' go (iteration + 1) to_p' to_sz_bytes' Just res -> return res -- If the input string is ASCII, this value will ensure we only allocate once let to_sz_bytes = cCharSize * (sz + 1) to_p <- mallocBytes to_sz_bytes go (0 :: Int) to_p to_sz_bytes
907
false
true
5
24
366
286
134
152
null
null
kim/amazonka
amazonka-swf/gen/Network/AWS/SWF/Types.hs
mpl-2.0
-- | The duration of time after which the timer will fire. -- -- The duration is specified in seconds; an integer greater than or equal to 0. tseaStartToFireTimeout :: Lens' TimerStartedEventAttributes Text tseaStartToFireTimeout = lens _tseaStartToFireTimeout (\s a -> s { _tseaStartToFireTimeout = a })
308
tseaStartToFireTimeout :: Lens' TimerStartedEventAttributes Text tseaStartToFireTimeout = lens _tseaStartToFireTimeout (\s a -> s { _tseaStartToFireTimeout = a })
166
tseaStartToFireTimeout = lens _tseaStartToFireTimeout (\s a -> s { _tseaStartToFireTimeout = a })
101
true
true
0
9
49
42
24
18
null
null
fmapfmapfmap/amazonka
amazonka-opsworks/gen/Network/AWS/OpsWorks/CreateStack.hs
mpl-2.0
-- | The stack ID, which is an opaque string that you use to identify the -- stack when performing actions such as 'DescribeStacks'. crsStackId :: Lens' CreateStackResponse (Maybe Text) crsStackId = lens _crsStackId (\ s a -> s{_crsStackId = a})
245
crsStackId :: Lens' CreateStackResponse (Maybe Text) crsStackId = lens _crsStackId (\ s a -> s{_crsStackId = a})
112
crsStackId = lens _crsStackId (\ s a -> s{_crsStackId = a})
59
true
true
0
9
40
47
26
21
null
null
aopp-pred/fp-truncate
test/Truncate/Test/Binary.hs
apache-2.0
prop_makeBin64 :: Bitstring64 -> Bool prop_makeBin64 (Bitstring64 b) = case makeBin64 b of Right (Bin64 x) -> x == b Right _ -> False Left _ -> False ------------------------------------------------------------------------ -- Constructors with invalid inputs
388
prop_makeBin64 :: Bitstring64 -> Bool prop_makeBin64 (Bitstring64 b) = case makeBin64 b of Right (Bin64 x) -> x == b Right _ -> False Left _ -> False ------------------------------------------------------------------------ -- Constructors with invalid inputs
388
prop_makeBin64 (Bitstring64 b) = case makeBin64 b of Right (Bin64 x) -> x == b Right _ -> False Left _ -> False ------------------------------------------------------------------------ -- Constructors with invalid inputs
350
false
true
0
10
163
70
34
36
null
null
jwiegley/ghc-release
libraries/containers/Data/Sequence.hs
gpl-3.0
empty :: Seq a empty = Seq Empty
53
empty :: Seq a empty = Seq Empty
53
empty = Seq Empty
28
false
true
0
5
28
17
8
9
null
null
haroldcarr/learn-haskell-coq-ml-etc
haskell/playpen/conal-elliot/2019-03-conal-elliott-convolution/src/Lib.hs
unlicense
t7a :: [Test] t7a = U.t "Additive curry zero apply" (curry zero (1::Int) (2::Int) :: Int) zero
99
t7a :: [Test] t7a = U.t "Additive curry zero apply" (curry zero (1::Int) (2::Int) :: Int) zero
99
t7a = U.t "Additive curry zero apply" (curry zero (1::Int) (2::Int) :: Int) zero
85
false
true
0
7
21
57
28
29
null
null
schell/odin
odin-engine/src/Odin/Engine/New.hs
mit
renderWidget :: Widget -> [RenderTransform2] -> IO () renderWidget w = snd (widgetRenderer2 w)
94
renderWidget :: Widget -> [RenderTransform2] -> IO () renderWidget w = snd (widgetRenderer2 w)
94
renderWidget w = snd (widgetRenderer2 w)
40
false
true
0
9
13
43
20
23
null
null
sramekj/lunchvote
Handler/Cache.hs
mit
processJSON :: Restaurant -> IO (Menu) processJSON r = do jsonData <- getJSON $ show $ getRestaurantId r case jsonData of Nothing -> return Menu{id = getRestaurantId r, restaurant = getRestaurantTitle r, link = getRestaurantLink r, meals = [] } Just j -> return Menu{id = getRestaurantId r, restaurant = getRestaurantTitle r, link = getRestaurantLink r, meals = getMeals j }
490
processJSON :: Restaurant -> IO (Menu) processJSON r = do jsonData <- getJSON $ show $ getRestaurantId r case jsonData of Nothing -> return Menu{id = getRestaurantId r, restaurant = getRestaurantTitle r, link = getRestaurantLink r, meals = [] } Just j -> return Menu{id = getRestaurantId r, restaurant = getRestaurantTitle r, link = getRestaurantLink r, meals = getMeals j }
490
processJSON r = do jsonData <- getJSON $ show $ getRestaurantId r case jsonData of Nothing -> return Menu{id = getRestaurantId r, restaurant = getRestaurantTitle r, link = getRestaurantLink r, meals = [] } Just j -> return Menu{id = getRestaurantId r, restaurant = getRestaurantTitle r, link = getRestaurantLink r, meals = getMeals j }
451
false
true
0
14
176
150
75
75
null
null
f1u77y/xmonad-contrib
XMonad/Util/XUtils.hs
bsd-3-clause
-- | Fill a window with a rectangle and a border paintWindow :: Window -- ^ The window where to draw -> Dimension -- ^ Window width -> Dimension -- ^ Window height -> Dimension -- ^ Border width -> String -- ^ Window background color -> String -- ^ Border color -> X () paintWindow w wh ht bw c bc = paintWindow' w (Rectangle 0 0 wh ht) bw c bc Nothing Nothing
451
paintWindow :: Window -- ^ The window where to draw -> Dimension -- ^ Window width -> Dimension -- ^ Window height -> Dimension -- ^ Border width -> String -- ^ Window background color -> String -- ^ Border color -> X () paintWindow w wh ht bw c bc = paintWindow' w (Rectangle 0 0 wh ht) bw c bc Nothing Nothing
402
paintWindow w wh ht bw c bc = paintWindow' w (Rectangle 0 0 wh ht) bw c bc Nothing Nothing
94
true
true
0
12
165
87
46
41
null
null
guillep19/uulib
src/UU/Parsing/Derived.hs
bsd-3-clause
pFoldr1_gr :: (IsParser p s) => (v -> b -> b, b) -> p v -> p b pFoldr1_gr alg@(op,e) p = asList1 (getfirsts p) (op <$> p <*> pFoldr_gr alg p)
150
pFoldr1_gr :: (IsParser p s) => (v -> b -> b, b) -> p v -> p b pFoldr1_gr alg@(op,e) p = asList1 (getfirsts p) (op <$> p <*> pFoldr_gr alg p)
150
pFoldr1_gr alg@(op,e) p = asList1 (getfirsts p) (op <$> p <*> pFoldr_gr alg p)
87
false
true
1
9
40
98
48
50
null
null
filib/ruby-marshal
src/Data/Ruby/Marshal/Encoding.hs
mit
toEnc "Windows-1251" = Windows_1251
49
toEnc "Windows-1251" = Windows_1251
49
toEnc "Windows-1251" = Windows_1251
49
false
false
0
5
17
9
4
5
null
null
tangboyun/miranda
src/MiRanda/Diagram/Structure.hs
gpl-3.0
charC c ch = text [ch] # font monoFont # fc c <> rect w h # lcA transparent
76
charC c ch = text [ch] # font monoFont # fc c <> rect w h # lcA transparent
76
charC c ch = text [ch] # font monoFont # fc c <> rect w h # lcA transparent
76
false
false
5
6
19
29
13
16
null
null
gbwey/persistentold
persistent-template/Database/Persist/TH.hs
mit
pack' :: String -> Text pack' = pack
36
pack' :: String -> Text pack' = pack
36
pack' = pack
12
false
true
0
5
7
15
8
7
null
null
tfausak/strive
source/library/Strive/Actions/Activities.hs
mit
deleteActivity :: Client -> ActivityId -> IO (Result ()) deleteActivity client activityId = do request <- buildRequest methodDelete client resource query response <- performRequest client request return (if responseStatus response == noContent204 then Right () else Left (response, unpack (toStrict (responseBody response))) ) where resource = "api/v3/activities/" <> show activityId query = [] :: Query -- | <http://strava.github.io/api/v3/activities/#get-activities>
500
deleteActivity :: Client -> ActivityId -> IO (Result ()) deleteActivity client activityId = do request <- buildRequest methodDelete client resource query response <- performRequest client request return (if responseStatus response == noContent204 then Right () else Left (response, unpack (toStrict (responseBody response))) ) where resource = "api/v3/activities/" <> show activityId query = [] :: Query -- | <http://strava.github.io/api/v3/activities/#get-activities>
500
deleteActivity client activityId = do request <- buildRequest methodDelete client resource query response <- performRequest client request return (if responseStatus response == noContent204 then Right () else Left (response, unpack (toStrict (responseBody response))) ) where resource = "api/v3/activities/" <> show activityId query = [] :: Query -- | <http://strava.github.io/api/v3/activities/#get-activities>
443
false
true
3
16
89
155
70
85
null
null
oldmanmike/ghc
compiler/types/Type.hs
bsd-3-clause
splitFunTysN :: Int -> Type -> ([Type], Type) -- ^ Split off exactly the given number argument types, and panics if that is not possible splitFunTysN 0 ty = ([], ty)
165
splitFunTysN :: Int -> Type -> ([Type], Type) splitFunTysN 0 ty = ([], ty)
74
splitFunTysN 0 ty = ([], ty)
28
true
true
0
10
30
48
25
23
null
null
oldmanmike/ghc
compiler/typecheck/TcRnTypes.hs
bsd-3-clause
-- | Retrieve the # of arguments in the error thing, if known errorThingNumArgs_maybe :: ErrorThing -> Maybe Arity errorThingNumArgs_maybe (ErrorThing _ args _) = args
167
errorThingNumArgs_maybe :: ErrorThing -> Maybe Arity errorThingNumArgs_maybe (ErrorThing _ args _) = args
105
errorThingNumArgs_maybe (ErrorThing _ args _) = args
52
true
true
0
6
25
36
17
19
null
null
pgj/bead
src/Bead/Controller/UserStories.hs
bsd-3-clause
withUserAndPersist :: (Username -> Persist a) -> UserStory a withUserAndPersist f = do u <- username persistence (f u) -- | Lifting a persistence action, if some error happens -- during the action we create a unique hash ticket and we display -- the ticket to the user, and log the original message with the -- ticket itself
329
withUserAndPersist :: (Username -> Persist a) -> UserStory a withUserAndPersist f = do u <- username persistence (f u) -- | Lifting a persistence action, if some error happens -- during the action we create a unique hash ticket and we display -- the ticket to the user, and log the original message with the -- ticket itself
329
withUserAndPersist f = do u <- username persistence (f u) -- | Lifting a persistence action, if some error happens -- during the action we create a unique hash ticket and we display -- the ticket to the user, and log the original message with the -- ticket itself
268
false
true
0
9
62
54
27
27
null
null
Peaker/lamdu
src/Lamdu/Sugar/Convert/Composite.hs
gpl-3.0
convertOneItemOpenComposite :: Monad m => (V.RowExtend T.Tag V.Term V.Term # F (IRef m) -> ExprIRef.ValBody m) -> k # Term v InternalName (OnceT (T m)) (T m) -> k # Term v InternalName (OnceT (T m)) (T m) -> Input.Payload m a # V.Term -> ExtendVal m (Input.Payload m a # V.Term) -> ConvertM m (Composite v InternalName (OnceT (T m)) (T m) # k) convertOneItemOpenComposite cons valS restS exprPl extendV = do addItem <- convertAddItem cons (Set.singleton (extendV ^. extendTag)) (exprPl ^. Input.stored) item <- convertItem addItem cons exprPl mempty valS (extendV & extendRest %~ (^. Input.stored . ExprIRef.iref)) pure Composite { _cList = TaggedList { _tlAddFirst = addItem , _tlItems = Just (TaggedListBody item []) } , _cPunnedItems = [] , _cTail = OpenComposite restS }
950
convertOneItemOpenComposite :: Monad m => (V.RowExtend T.Tag V.Term V.Term # F (IRef m) -> ExprIRef.ValBody m) -> k # Term v InternalName (OnceT (T m)) (T m) -> k # Term v InternalName (OnceT (T m)) (T m) -> Input.Payload m a # V.Term -> ExtendVal m (Input.Payload m a # V.Term) -> ConvertM m (Composite v InternalName (OnceT (T m)) (T m) # k) convertOneItemOpenComposite cons valS restS exprPl extendV = do addItem <- convertAddItem cons (Set.singleton (extendV ^. extendTag)) (exprPl ^. Input.stored) item <- convertItem addItem cons exprPl mempty valS (extendV & extendRest %~ (^. Input.stored . ExprIRef.iref)) pure Composite { _cList = TaggedList { _tlAddFirst = addItem , _tlItems = Just (TaggedListBody item []) } , _cPunnedItems = [] , _cTail = OpenComposite restS }
950
convertOneItemOpenComposite cons valS restS exprPl extendV = do addItem <- convertAddItem cons (Set.singleton (extendV ^. extendTag)) (exprPl ^. Input.stored) item <- convertItem addItem cons exprPl mempty valS (extendV & extendRest %~ (^. Input.stored . ExprIRef.iref)) pure Composite { _cList = TaggedList { _tlAddFirst = addItem , _tlItems = Just (TaggedListBody item []) } , _cPunnedItems = [] , _cTail = OpenComposite restS }
578
false
true
0
19
303
364
181
183
null
null
matthiasgoergens/redgrep
src/DDup.hs
bsd-3-clause
-- mainOld = do -- print $ nf' $ x `uni` (flip uni nilX nilY) -- print $ nf' $ nilX `uni` x -- -- print $ nf' $ x `seq` nilX -- print $ nf' $ nilX `seq` x -- -- print $ nf' $ x `cut` nilX -- print $ nf' $ nilX `cut` x -- -- print $ nf' $ nilX `alt` x -- print $ nf' $ x `alt` nilX -- -- print $ nf' $ eps () () `seq` x -- print $ nf' $ x `seq` eps () () -- putStrLn "+++++" -- main' -- main'' = do -- putStrLn "------" -- print $ forget' . flattenForget $ x `uni` x -- print $ forget' . flattenForget $ a `uni` (b `uni` x) -- print $ forget' . flattenForget $ a `uni` (b `uni` a) -- print $ forget' . flattenForget $ a `uni` (a `uni` b) -- print $ forget' . flattenForget $ a `uni` (a `uni` fmap undefined a) -- print $ forget' . flattenForget $ a -- main' = do -- let re = a -- print $ forget' . d 'a' $ a -- print $ dd "a" a -- print $ dd "ab" a -- print $ dd "" (rep $ string "abc") -- putStrLn "------" -- print $ dd "ababab" (rep $ string "ab") -- let ab = string "ab" -- print $ dd "ababab" (ab `seq` ab `seq` ab) -- print $ count $ dd' "ababab" (ab `seq` ab `seq` ab) -- let cf (Both c (Both f r)) = (count c, result f, forget' r) -- print $ cf $ dd' (concat $ replicate 50 "abc") (rep $ string "abc") -- let flapping = cut (i `seq` string "ping") (not $ i `seq` string "flapping") `seq` i -- putStrLn "Flapping!" -- putStrLn "" -- print3 $ cf $ dd' "ee flapping eue" flapping -- print3 $ cf $ dd' "ee flapping eue ping oe" flapping -- print3 $ cf $ dd' "ee lapping eue pin" flapping -- -- Quadratic! Rep takes quadratic time! -- -- print3 $ cf $ dd' (concat $ replicate 10000 "a") (rep $ sym Nothing) -- -- print3 $ cf $ dd' (concat $ replicate 1250 "a") (rep $ sym Nothing) -- sain = do -- let i = 20 -- let rex = dd' (concat $ replicate i "a") $ -- bimap (const ()) (const ()) -- $ not nil_ `seq` not nil_ -- -- print $ count rex -- -- print $ nf' rex -- return () -- -- fain i = do -- -- quadratic again.. -- print $ ((count *** result) . unBoth *** forgetF) . unBoth $ -- -- print $ count $ -- dd' (concat $ replicate i "ab") $ -- bimap (const ()) id -- $ cut (not nil_ `seq` not nil_) -- (rep $ sym Nothing) -- `seq` (sym (Just "ab")) -- -- print3 $ cf $ dd' (concat $ replicate 2500 "a") (rep $ sym Nothing) inti :: r Int Int -> r Int Int inti = id
2,567
inti :: r Int Int -> r Int Int inti = id
40
inti = id
9
true
true
0
7
819
97
79
18
null
null
Fuuzetsu/pastepipe
src/Utils/PastePipe.hs
gpl-3.0
-- | Creates the request to post a chunk of content. buildRequest :: Config -> String -> Request String buildRequest conf str = formToRequest $ Form POST (saveUri $ uri conf) [ ("title", title conf) , ("author", userName conf) , ("paste", str) , ("language", language conf) , ("channel", channel conf) , mkPrivatePair conf , ("email", "") ]
572
buildRequest :: Config -> String -> Request String buildRequest conf str = formToRequest $ Form POST (saveUri $ uri conf) [ ("title", title conf) , ("author", userName conf) , ("paste", str) , ("language", language conf) , ("channel", channel conf) , mkPrivatePair conf , ("email", "") ]
519
buildRequest conf str = formToRequest $ Form POST (saveUri $ uri conf) [ ("title", title conf) , ("author", userName conf) , ("paste", str) , ("language", language conf) , ("channel", channel conf) , mkPrivatePair conf , ("email", "") ]
468
true
true
0
9
288
121
67
54
null
null
gentoo-haskell/hackport
Merge/Dependencies.hs
gpl-3.0
resolveDependencies :: Portage.Overlay -> RetroPackageDescription -> Cabal.CompilerInfo -> [Cabal.PackageName] -> Cabal.PackageName -> EDep resolveDependencies overlay pkg compiler_info ghc_package_names merged_cabal_pkg_name = edeps where -- hasBuildableExes p = any (buildable . buildInfo) . executables $ p treatAsLibrary :: Bool treatAsLibrary = isJust (Cabal.library (packageDescription pkg)) -- without slot business raw_haskell_deps :: Portage.Dependency raw_haskell_deps = PN.normalize_depend $ Portage.DependAllOf $ haskellDependencies overlay (buildDepends pkg) test_deps :: Portage.Dependency test_deps = Portage.mkUseDependency (True, Portage.Use "test") $ Portage.DependAllOf $ remove_raw_common $ testDependencies overlay (packageDescription pkg) ghc_package_names merged_cabal_pkg_name cabal_dep :: Portage.Dependency cabal_dep = cabalDependency overlay (packageDescription pkg) compiler_info ghc_dep :: Portage.Dependency ghc_dep = compilerInfoToDependency compiler_info extra_libs :: Portage.Dependency extra_libs = Portage.DependAllOf $ findCLibs (packageDescription pkg) pkg_config_libs :: [Portage.Dependency] pkg_config_libs = pkgConfigDependencies overlay (packageDescription pkg) pkg_config_tools :: Portage.Dependency pkg_config_tools = Portage.DependAllOf $ if L.null pkg_config_libs then [] else [any_c_p "virtual" "pkgconfig"] build_tools :: Portage.Dependency build_tools = Portage.DependAllOf $ pkg_config_tools : legacyBuildToolsDependencies (packageDescription pkg) ++ hackageBuildToolsDependencies overlay (packageDescription pkg) setup_deps :: Portage.Dependency setup_deps = PN.normalize_depend $ Portage.DependAllOf $ remove_raw_common $ setupDependencies overlay (packageDescription pkg) ghc_package_names merged_cabal_pkg_name edeps :: EDep edeps | treatAsLibrary = mempty { dep = Portage.DependAllOf [ cabal_dep , setup_deps , build_tools , test_deps ], dep_e = S.singleton "${RDEPEND}", rdep = Portage.DependAllOf [ Portage.set_build_slot ghc_dep , Portage.set_build_slot $ add_profile $ raw_haskell_deps , extra_libs , Portage.DependAllOf pkg_config_libs ] } | otherwise = mempty { dep = Portage.DependAllOf [ cabal_dep , setup_deps , build_tools , test_deps ], dep_e = S.singleton "${RDEPEND}", rdep = Portage.DependAllOf [ Portage.set_build_slot ghc_dep , Portage.set_build_slot $ raw_haskell_deps , extra_libs , Portage.DependAllOf pkg_config_libs ] } add_profile = Portage.addDepUseFlag (Portage.mkQUse (Portage.Use "profile")) -- remove depends present in common section remove_raw_common = filter (\d -> not (Portage.dep_as_broad_as d raw_haskell_deps)) . map PN.normalize_depend --------------------------------------------------------------- -- Custom-setup dependencies -- TODO: move partitioning part to Merge:mergeGenericPackageDescription ---------------------------------------------------------------
4,026
resolveDependencies :: Portage.Overlay -> RetroPackageDescription -> Cabal.CompilerInfo -> [Cabal.PackageName] -> Cabal.PackageName -> EDep resolveDependencies overlay pkg compiler_info ghc_package_names merged_cabal_pkg_name = edeps where -- hasBuildableExes p = any (buildable . buildInfo) . executables $ p treatAsLibrary :: Bool treatAsLibrary = isJust (Cabal.library (packageDescription pkg)) -- without slot business raw_haskell_deps :: Portage.Dependency raw_haskell_deps = PN.normalize_depend $ Portage.DependAllOf $ haskellDependencies overlay (buildDepends pkg) test_deps :: Portage.Dependency test_deps = Portage.mkUseDependency (True, Portage.Use "test") $ Portage.DependAllOf $ remove_raw_common $ testDependencies overlay (packageDescription pkg) ghc_package_names merged_cabal_pkg_name cabal_dep :: Portage.Dependency cabal_dep = cabalDependency overlay (packageDescription pkg) compiler_info ghc_dep :: Portage.Dependency ghc_dep = compilerInfoToDependency compiler_info extra_libs :: Portage.Dependency extra_libs = Portage.DependAllOf $ findCLibs (packageDescription pkg) pkg_config_libs :: [Portage.Dependency] pkg_config_libs = pkgConfigDependencies overlay (packageDescription pkg) pkg_config_tools :: Portage.Dependency pkg_config_tools = Portage.DependAllOf $ if L.null pkg_config_libs then [] else [any_c_p "virtual" "pkgconfig"] build_tools :: Portage.Dependency build_tools = Portage.DependAllOf $ pkg_config_tools : legacyBuildToolsDependencies (packageDescription pkg) ++ hackageBuildToolsDependencies overlay (packageDescription pkg) setup_deps :: Portage.Dependency setup_deps = PN.normalize_depend $ Portage.DependAllOf $ remove_raw_common $ setupDependencies overlay (packageDescription pkg) ghc_package_names merged_cabal_pkg_name edeps :: EDep edeps | treatAsLibrary = mempty { dep = Portage.DependAllOf [ cabal_dep , setup_deps , build_tools , test_deps ], dep_e = S.singleton "${RDEPEND}", rdep = Portage.DependAllOf [ Portage.set_build_slot ghc_dep , Portage.set_build_slot $ add_profile $ raw_haskell_deps , extra_libs , Portage.DependAllOf pkg_config_libs ] } | otherwise = mempty { dep = Portage.DependAllOf [ cabal_dep , setup_deps , build_tools , test_deps ], dep_e = S.singleton "${RDEPEND}", rdep = Portage.DependAllOf [ Portage.set_build_slot ghc_dep , Portage.set_build_slot $ raw_haskell_deps , extra_libs , Portage.DependAllOf pkg_config_libs ] } add_profile = Portage.addDepUseFlag (Portage.mkQUse (Portage.Use "profile")) -- remove depends present in common section remove_raw_common = filter (\d -> not (Portage.dep_as_broad_as d raw_haskell_deps)) . map PN.normalize_depend --------------------------------------------------------------- -- Custom-setup dependencies -- TODO: move partitioning part to Merge:mergeGenericPackageDescription ---------------------------------------------------------------
4,026
resolveDependencies overlay pkg compiler_info ghc_package_names merged_cabal_pkg_name = edeps where -- hasBuildableExes p = any (buildable . buildInfo) . executables $ p treatAsLibrary :: Bool treatAsLibrary = isJust (Cabal.library (packageDescription pkg)) -- without slot business raw_haskell_deps :: Portage.Dependency raw_haskell_deps = PN.normalize_depend $ Portage.DependAllOf $ haskellDependencies overlay (buildDepends pkg) test_deps :: Portage.Dependency test_deps = Portage.mkUseDependency (True, Portage.Use "test") $ Portage.DependAllOf $ remove_raw_common $ testDependencies overlay (packageDescription pkg) ghc_package_names merged_cabal_pkg_name cabal_dep :: Portage.Dependency cabal_dep = cabalDependency overlay (packageDescription pkg) compiler_info ghc_dep :: Portage.Dependency ghc_dep = compilerInfoToDependency compiler_info extra_libs :: Portage.Dependency extra_libs = Portage.DependAllOf $ findCLibs (packageDescription pkg) pkg_config_libs :: [Portage.Dependency] pkg_config_libs = pkgConfigDependencies overlay (packageDescription pkg) pkg_config_tools :: Portage.Dependency pkg_config_tools = Portage.DependAllOf $ if L.null pkg_config_libs then [] else [any_c_p "virtual" "pkgconfig"] build_tools :: Portage.Dependency build_tools = Portage.DependAllOf $ pkg_config_tools : legacyBuildToolsDependencies (packageDescription pkg) ++ hackageBuildToolsDependencies overlay (packageDescription pkg) setup_deps :: Portage.Dependency setup_deps = PN.normalize_depend $ Portage.DependAllOf $ remove_raw_common $ setupDependencies overlay (packageDescription pkg) ghc_package_names merged_cabal_pkg_name edeps :: EDep edeps | treatAsLibrary = mempty { dep = Portage.DependAllOf [ cabal_dep , setup_deps , build_tools , test_deps ], dep_e = S.singleton "${RDEPEND}", rdep = Portage.DependAllOf [ Portage.set_build_slot ghc_dep , Portage.set_build_slot $ add_profile $ raw_haskell_deps , extra_libs , Portage.DependAllOf pkg_config_libs ] } | otherwise = mempty { dep = Portage.DependAllOf [ cabal_dep , setup_deps , build_tools , test_deps ], dep_e = S.singleton "${RDEPEND}", rdep = Portage.DependAllOf [ Portage.set_build_slot ghc_dep , Portage.set_build_slot $ raw_haskell_deps , extra_libs , Portage.DependAllOf pkg_config_libs ] } add_profile = Portage.addDepUseFlag (Portage.mkQUse (Portage.Use "profile")) -- remove depends present in common section remove_raw_common = filter (\d -> not (Portage.dep_as_broad_as d raw_haskell_deps)) . map PN.normalize_depend --------------------------------------------------------------- -- Custom-setup dependencies -- TODO: move partitioning part to Merge:mergeGenericPackageDescription ---------------------------------------------------------------
3,846
false
true
5
11
1,456
737
357
380
null
null
tittoassini/typed
test/Test/Data/Values.hs
bsd-3-clause
-- vw = VW 0 0 0 0 0 viT = ("v ints",VI 444 123 (-8999) (-123823) (-34723823923))
81
viT = ("v ints",VI 444 123 (-8999) (-123823) (-34723823923))
60
viT = ("v ints",VI 444 123 (-8999) (-123823) (-34723823923))
60
true
false
1
8
17
42
22
20
null
null
pparkkin/eta
compiler/ETA/Prelude/TysWiredIn.hs
bsd-3-clause
pcNonRecDataTyCon :: Name -> Maybe CType -> [TyVar] -> [DataCon] -> TyCon -- Not an enumeration, not promotable pcNonRecDataTyCon = pcTyCon False NonRecursive False
164
pcNonRecDataTyCon :: Name -> Maybe CType -> [TyVar] -> [DataCon] -> TyCon pcNonRecDataTyCon = pcTyCon False NonRecursive False
126
pcNonRecDataTyCon = pcTyCon False NonRecursive False
52
true
true
0
9
23
44
23
21
null
null
tolysz/prepare-ghcjs
spec-lts8/aeson/Data/Aeson/Encoding/Internal.hs
bsd-3-clause
zonedTime :: ZonedTime -> Encoding' a zonedTime = Encoding . EB.quote . EB.zonedTime
84
zonedTime :: ZonedTime -> Encoding' a zonedTime = Encoding . EB.quote . EB.zonedTime
84
zonedTime = Encoding . EB.quote . EB.zonedTime
46
false
true
0
7
12
30
15
15
null
null
chriseidhof/persist
src/Data/Persist/Interface.hs
bsd-3-clause
-- | Creates a new value. To assure your database stays correct, never use this function directly. create_ :: (Regular a, DatabaseRepr (PF a), Persistent p) => a -> p (Ref a) create_ x = fmap Ref $ createImpl (tableName genX) (toDatabaseValue genX) where genX = from x -- | Add a relation between entities of type @a@ and @b@.
328
create_ :: (Regular a, DatabaseRepr (PF a), Persistent p) => a -> p (Ref a) create_ x = fmap Ref $ createImpl (tableName genX) (toDatabaseValue genX) where genX = from x -- | Add a relation between entities of type @a@ and @b@.
229
create_ x = fmap Ref $ createImpl (tableName genX) (toDatabaseValue genX) where genX = from x -- | Add a relation between entities of type @a@ and @b@.
153
true
true
0
9
61
91
45
46
null
null
leshchevds/ganeti
src/Ganeti/Constants.hs
bsd-2-clause
esScriptOpen :: String esScriptOpen = esActionOpen
50
esScriptOpen :: String esScriptOpen = esActionOpen
50
esScriptOpen = esActionOpen
27
false
true
0
4
5
11
6
5
null
null
fmapfmapfmap/amazonka
amazonka-codecommit/gen/Network/AWS/CodeCommit/CreateRepository.hs
mpl-2.0
-- | Creates a value of 'CreateRepository' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'crRepositoryDescription' -- -- * 'crRepositoryName' createRepository :: Text -- ^ 'crRepositoryName' -> CreateRepository createRepository pRepositoryName_ = CreateRepository' { _crRepositoryDescription = Nothing , _crRepositoryName = pRepositoryName_ }
453
createRepository :: Text -- ^ 'crRepositoryName' -> CreateRepository createRepository pRepositoryName_ = CreateRepository' { _crRepositoryDescription = Nothing , _crRepositoryName = pRepositoryName_ }
224
createRepository pRepositoryName_ = CreateRepository' { _crRepositoryDescription = Nothing , _crRepositoryName = pRepositoryName_ }
147
true
true
0
7
82
49
28
21
null
null
urbanslug/ghc
compiler/llvmGen/Llvm/Types.hs
bsd-3-clause
ppPlainName (LMLocalVar x LMLabel ) = text (show x)
53
ppPlainName (LMLocalVar x LMLabel ) = text (show x)
53
ppPlainName (LMLocalVar x LMLabel ) = text (show x)
53
false
false
0
7
10
26
12
14
null
null
abhin4v/ringo
ringo/src/Ringo/ArgParser.hs
mit
settingsParser :: Parser Settings settingsParser = let Settings {..} = defSettings in Settings <$> textOption (long "dim-prefix" <> short 'd' <> value (Text.unpack settingDimPrefix) <> showDefault <> help "Prefix for dimension tables") <*> textOption (long "fact-prefix" <> short 'f' <> value (Text.unpack settingFactPrefix) <> showDefault <> help "Prefix for fact tables") <*> minorOption "fact-infix" settingFactInfix "Infix for fact tables" <*> option auto (let timeunits = map show [Second ..] in long "timeunit" <> short 't' <> value settingTimeUnit <> showDefault <> completeWith timeunits <> help ("Time unit granularity for fact tables. Possible values: " ++ intercalate ", " timeunits)) <*> minorOption "avg-count-col-suffix" settingAvgCountColumnSuffix "Suffix for average count columns" <*> minorOption "avg-sum-col-suffix" settingAvgSumColumnSuffix "Suffix for average sum columns" <*> minorOption "dim-id-col-name" settingDimTableIdColumnName "Name of dimension table id columns" <*> minorOption "dim-id-col-type" settingDimTableIdColumnType "Type of dimension table id columns" <*> minorOption "fact-count-col-type" settingFactCountColumnType "Type of fact table count columns" <*> option auto (long "fact-count-distinct-error-rate" <> hidden <> value settingFactCountDistinctErrorRate <> showDefault <> help "Error rate for count distinct calulations") <*> minorOption "dependencies-json-file" settingDependenciesJSONFileName "Name of the output dependencies json file" <*> minorOption "facts-json-file" settingFactsJSONFileName "Name of the output facts json file" <*> minorOption "dimensions-json-file" settingDimensionsJSONFileName "Name of the output dimensions json file" <*> option auto (long "foreign-key-id-coalesce-val" <> hidden <> value settingForeignKeyIdCoalesceValue <> showDefault <> help "Value to coalease missing foriegn key ids to, in fact tables") <*> minorOption "tablename-suffix-template" settingTableNameSuffixTemplate "Suffix template for table names in SQL" where minorOption longDesc defValue helpTxt = textOption (long longDesc <> hidden <> value (Text.unpack defValue) <> showDefault <> help helpTxt)
3,252
settingsParser :: Parser Settings settingsParser = let Settings {..} = defSettings in Settings <$> textOption (long "dim-prefix" <> short 'd' <> value (Text.unpack settingDimPrefix) <> showDefault <> help "Prefix for dimension tables") <*> textOption (long "fact-prefix" <> short 'f' <> value (Text.unpack settingFactPrefix) <> showDefault <> help "Prefix for fact tables") <*> minorOption "fact-infix" settingFactInfix "Infix for fact tables" <*> option auto (let timeunits = map show [Second ..] in long "timeunit" <> short 't' <> value settingTimeUnit <> showDefault <> completeWith timeunits <> help ("Time unit granularity for fact tables. Possible values: " ++ intercalate ", " timeunits)) <*> minorOption "avg-count-col-suffix" settingAvgCountColumnSuffix "Suffix for average count columns" <*> minorOption "avg-sum-col-suffix" settingAvgSumColumnSuffix "Suffix for average sum columns" <*> minorOption "dim-id-col-name" settingDimTableIdColumnName "Name of dimension table id columns" <*> minorOption "dim-id-col-type" settingDimTableIdColumnType "Type of dimension table id columns" <*> minorOption "fact-count-col-type" settingFactCountColumnType "Type of fact table count columns" <*> option auto (long "fact-count-distinct-error-rate" <> hidden <> value settingFactCountDistinctErrorRate <> showDefault <> help "Error rate for count distinct calulations") <*> minorOption "dependencies-json-file" settingDependenciesJSONFileName "Name of the output dependencies json file" <*> minorOption "facts-json-file" settingFactsJSONFileName "Name of the output facts json file" <*> minorOption "dimensions-json-file" settingDimensionsJSONFileName "Name of the output dimensions json file" <*> option auto (long "foreign-key-id-coalesce-val" <> hidden <> value settingForeignKeyIdCoalesceValue <> showDefault <> help "Value to coalease missing foriegn key ids to, in fact tables") <*> minorOption "tablename-suffix-template" settingTableNameSuffixTemplate "Suffix template for table names in SQL" where minorOption longDesc defValue helpTxt = textOption (long longDesc <> hidden <> value (Text.unpack defValue) <> showDefault <> help helpTxt)
3,252
settingsParser = let Settings {..} = defSettings in Settings <$> textOption (long "dim-prefix" <> short 'd' <> value (Text.unpack settingDimPrefix) <> showDefault <> help "Prefix for dimension tables") <*> textOption (long "fact-prefix" <> short 'f' <> value (Text.unpack settingFactPrefix) <> showDefault <> help "Prefix for fact tables") <*> minorOption "fact-infix" settingFactInfix "Infix for fact tables" <*> option auto (let timeunits = map show [Second ..] in long "timeunit" <> short 't' <> value settingTimeUnit <> showDefault <> completeWith timeunits <> help ("Time unit granularity for fact tables. Possible values: " ++ intercalate ", " timeunits)) <*> minorOption "avg-count-col-suffix" settingAvgCountColumnSuffix "Suffix for average count columns" <*> minorOption "avg-sum-col-suffix" settingAvgSumColumnSuffix "Suffix for average sum columns" <*> minorOption "dim-id-col-name" settingDimTableIdColumnName "Name of dimension table id columns" <*> minorOption "dim-id-col-type" settingDimTableIdColumnType "Type of dimension table id columns" <*> minorOption "fact-count-col-type" settingFactCountColumnType "Type of fact table count columns" <*> option auto (long "fact-count-distinct-error-rate" <> hidden <> value settingFactCountDistinctErrorRate <> showDefault <> help "Error rate for count distinct calulations") <*> minorOption "dependencies-json-file" settingDependenciesJSONFileName "Name of the output dependencies json file" <*> minorOption "facts-json-file" settingFactsJSONFileName "Name of the output facts json file" <*> minorOption "dimensions-json-file" settingDimensionsJSONFileName "Name of the output dimensions json file" <*> option auto (long "foreign-key-id-coalesce-val" <> hidden <> value settingForeignKeyIdCoalesceValue <> showDefault <> help "Value to coalease missing foriegn key ids to, in fact tables") <*> minorOption "tablename-suffix-template" settingTableNameSuffixTemplate "Suffix template for table names in SQL" where minorOption longDesc defValue helpTxt = textOption (long longDesc <> hidden <> value (Text.unpack defValue) <> showDefault <> help helpTxt)
3,218
false
true
0
30
1,363
440
203
237
null
null
mstksg/cm-dip
src/Data/PArray.hs
mit
laplac :: (Ix i, Num i, Fractional e) => PArray (i, i) e -> e laplac p = (p ?~ (-1, 0)) + (p ?~ (1, 0)) + (p ?~ (0, -1)) + (p ?~ (0, 1)) + 4 * (p ?~ (0, 0))
156
laplac :: (Ix i, Num i, Fractional e) => PArray (i, i) e -> e laplac p = (p ?~ (-1, 0)) + (p ?~ (1, 0)) + (p ?~ (0, -1)) + (p ?~ (0, 1)) + 4 * (p ?~ (0, 0))
156
laplac p = (p ?~ (-1, 0)) + (p ?~ (1, 0)) + (p ?~ (0, -1)) + (p ?~ (0, 1)) + 4 * (p ?~ (0, 0))
94
false
true
0
13
43
145
79
66
null
null
kolmodin/cabal
cabal-install/Distribution/Client/ProjectPlanning.hs
bsd-3-clause
optionalStanzasWithDepsAvailable :: Set InstalledPackageId -> ElaboratedConfiguredPackage -> Set OptionalStanza optionalStanzasWithDepsAvailable availablePkgs pkg = Set.fromList [ stanza | stanza <- Set.toList (pkgStanzasAvailable pkg) , let deps :: [InstalledPackageId] deps = map installedPackageId $ CD.select (optionalStanzaDeps stanza) (pkgDependencies pkg) , all (`Set.member` availablePkgs) deps ] where optionalStanzaDeps TestStanzas (CD.ComponentTest _) = True optionalStanzaDeps BenchStanzas (CD.ComponentBench _) = True optionalStanzaDeps _ _ = False -- The second pass does three things: -- -- * A second go at deciding which optional stanzas to enable. -- * Prune the dependencies based on the final choice of optional stanzas. -- * Extend the targets within each package to build, now we know the reverse -- dependencies, ie we know which libs are needed as deps by other packages. -- -- Achieving sticky behaviour with enabling\/disabling optional stanzas is -- tricky. The first approximation was handled by the first pass above, but -- it's not quite enough. That pass will enable stanzas if all of the deps -- of the optional stanza are already installed /in the store/. That's important -- but it does not account for dependencies that get built inplace as part of -- the project. We cannot take those inplace build deps into account in the -- pruning pass however because we don't yet know which ones we're going to -- build. Once we do know, we can have another go and enable stanzas that have -- all their deps available. Now we can consider all packages in the pruned -- plan to be available, including ones we already decided to build from -- source. -- -- Deciding which targets to build depends on knowing which packages have -- reverse dependencies (ie are needed). This requires the result of first -- pass, which is another reason we have to split it into two passes. -- -- Note that just because we might enable testsuites or benchmarks (in the -- first or second pass) doesn't mean that we build all (or even any) of them. -- That depends on which targets we picked in the first pass. --
2,327
optionalStanzasWithDepsAvailable :: Set InstalledPackageId -> ElaboratedConfiguredPackage -> Set OptionalStanza optionalStanzasWithDepsAvailable availablePkgs pkg = Set.fromList [ stanza | stanza <- Set.toList (pkgStanzasAvailable pkg) , let deps :: [InstalledPackageId] deps = map installedPackageId $ CD.select (optionalStanzaDeps stanza) (pkgDependencies pkg) , all (`Set.member` availablePkgs) deps ] where optionalStanzaDeps TestStanzas (CD.ComponentTest _) = True optionalStanzaDeps BenchStanzas (CD.ComponentBench _) = True optionalStanzaDeps _ _ = False -- The second pass does three things: -- -- * A second go at deciding which optional stanzas to enable. -- * Prune the dependencies based on the final choice of optional stanzas. -- * Extend the targets within each package to build, now we know the reverse -- dependencies, ie we know which libs are needed as deps by other packages. -- -- Achieving sticky behaviour with enabling\/disabling optional stanzas is -- tricky. The first approximation was handled by the first pass above, but -- it's not quite enough. That pass will enable stanzas if all of the deps -- of the optional stanza are already installed /in the store/. That's important -- but it does not account for dependencies that get built inplace as part of -- the project. We cannot take those inplace build deps into account in the -- pruning pass however because we don't yet know which ones we're going to -- build. Once we do know, we can have another go and enable stanzas that have -- all their deps available. Now we can consider all packages in the pruned -- plan to be available, including ones we already decided to build from -- source. -- -- Deciding which targets to build depends on knowing which packages have -- reverse dependencies (ie are needed). This requires the result of first -- pass, which is another reason we have to split it into two passes. -- -- Note that just because we might enable testsuites or benchmarks (in the -- first or second pass) doesn't mean that we build all (or even any) of them. -- That depends on which targets we picked in the first pass. --
2,327
optionalStanzasWithDepsAvailable availablePkgs pkg = Set.fromList [ stanza | stanza <- Set.toList (pkgStanzasAvailable pkg) , let deps :: [InstalledPackageId] deps = map installedPackageId $ CD.select (optionalStanzaDeps stanza) (pkgDependencies pkg) , all (`Set.member` availablePkgs) deps ] where optionalStanzaDeps TestStanzas (CD.ComponentTest _) = True optionalStanzaDeps BenchStanzas (CD.ComponentBench _) = True optionalStanzaDeps _ _ = False -- The second pass does three things: -- -- * A second go at deciding which optional stanzas to enable. -- * Prune the dependencies based on the final choice of optional stanzas. -- * Extend the targets within each package to build, now we know the reverse -- dependencies, ie we know which libs are needed as deps by other packages. -- -- Achieving sticky behaviour with enabling\/disabling optional stanzas is -- tricky. The first approximation was handled by the first pass above, but -- it's not quite enough. That pass will enable stanzas if all of the deps -- of the optional stanza are already installed /in the store/. That's important -- but it does not account for dependencies that get built inplace as part of -- the project. We cannot take those inplace build deps into account in the -- pruning pass however because we don't yet know which ones we're going to -- build. Once we do know, we can have another go and enable stanzas that have -- all their deps available. Now we can consider all packages in the pruned -- plan to be available, including ones we already decided to build from -- source. -- -- Deciding which targets to build depends on knowing which packages have -- reverse dependencies (ie are needed). This requires the result of first -- pass, which is another reason we have to split it into two passes. -- -- Note that just because we might enable testsuites or benchmarks (in the -- first or second pass) doesn't mean that we build all (or even any) of them. -- That depends on which targets we picked in the first pass. --
2,149
false
true
2
14
552
194
110
84
null
null
Ferdinand-vW/Wlp-verification-engine
src/GCL.hs
gpl-3.0
ref :: String -> Expr ref s = Name s
36
ref :: String -> Expr ref s = Name s
36
ref s = Name s
14
false
true
0
5
9
21
10
11
null
null
oakfang/DemiLang
src/Demi/VM.hs
mit
subSolve GreaterEqualThan (DblVar x) (IntVar y) = return $ BoolVar $ x >= (fromIntegral y)
92
subSolve GreaterEqualThan (DblVar x) (IntVar y) = return $ BoolVar $ x >= (fromIntegral y)
92
subSolve GreaterEqualThan (DblVar x) (IntVar y) = return $ BoolVar $ x >= (fromIntegral y)
92
false
false
0
7
16
43
21
22
null
null
fpco/serial-bench
binary-0.4.3.1/tests/Benchmark.hs
bsd-3-clause
getWord16N4Big = loop 0 where loop s n | s `seq` n `seq` False = undefined loop s 0 = return s loop s n = do s0 <- getWord16be s1 <- getWord16be s2 <- getWord16be s3 <- getWord16be loop (s+s0+s1+s2+s3) (n-4)
275
getWord16N4Big = loop 0 where loop s n | s `seq` n `seq` False = undefined loop s 0 = return s loop s n = do s0 <- getWord16be s1 <- getWord16be s2 <- getWord16be s3 <- getWord16be loop (s+s0+s1+s2+s3) (n-4)
275
getWord16N4Big = loop 0 where loop s n | s `seq` n `seq` False = undefined loop s 0 = return s loop s n = do s0 <- getWord16be s1 <- getWord16be s2 <- getWord16be s3 <- getWord16be loop (s+s0+s1+s2+s3) (n-4)
275
false
false
2
14
109
124
60
64
null
null
expipiplus1/vulkan
utils/src/Vulkan/Utils/ShaderQQ/Interpolate.hs
bsd-3-clause
-- | Extract variables and literals from string to be interpolated -- -- >>> parse "" -- [] -- -- >>> parse "hello $world" -- [Right "hello ",Left "world"] -- -- >>> parse "$hello$world" -- [Left "hello",Left "world"] -- -- >>> parse "$" -- [Right "$"] -- -- >>> parse "hi" -- [Right "hi"] -- -- >>> parse "h$hi" -- [Right "h",Left "hi"] -- -- >>> parse "$$hi" -- [Right "$",Left "hi"] -- -- >>> parse "$1" -- [Right "$1"] -- -- >>> parse "$$$" -- [Right "$$$"] -- -- >>> parse "\\" -- [Right "\\"] -- -- >>> parse "\\$" -- [Right "$"] -- -- >>> parse "\\$hi" -- [Right "$hi"] -- -- >>> parse "\\\\$hi" -- [Right "\\$hi"] -- -- >>> parse "\\hi" -- [Right "\\hi"] -- -- >>> parse "$hi\\$foo" -- [Left "hi",Right "$foo"] -- -- >>> parse "hello, \\$foo" -- [Right "hello, $foo"] -- -- >>> parse "${fo'o}bar" -- [Left "fo'o",Right "bar"] -- -- >>> parse "\\" -- [Right "\\"] -- -- >>> parse "\\\\$" -- [Right "\\$"] -- -- >>> parse "$" -- [Right "$"] parse :: String -> [Either Var String] parse s = let -- A haskell var or con ident = (:) <$> satisfy (isLower <||> isUpper <||> (== '_')) <*> munch (isAlphaNum <||> (== '\'') <||> (== '_')) braces = between (char '{') (char '}') -- parse a var, a '$' followed by an ident var = char '$' *> ((Left <$> (ident +++ braces ident)) <++ pure (Right "$")) -- Everything up to a '$' or '\' normal = Right <$> munch1 ((/= '$') <&&> (/= '\\')) -- escape a $ escape = char '\\' *> (Right <$> (string "$" <++ pure "\\")) -- One normal or var -- - Check escaped '$' first -- - variables, starting with $ -- - normal string one = normal +++ var +++ escape parser = many one <* eof in case readP_to_S parser s of [(r, "")] -> foldr mergeRights [] r _ -> error "Failed to parse string"
1,815
parse :: String -> [Either Var String] parse s = let -- A haskell var or con ident = (:) <$> satisfy (isLower <||> isUpper <||> (== '_')) <*> munch (isAlphaNum <||> (== '\'') <||> (== '_')) braces = between (char '{') (char '}') -- parse a var, a '$' followed by an ident var = char '$' *> ((Left <$> (ident +++ braces ident)) <++ pure (Right "$")) -- Everything up to a '$' or '\' normal = Right <$> munch1 ((/= '$') <&&> (/= '\\')) -- escape a $ escape = char '\\' *> (Right <$> (string "$" <++ pure "\\")) -- One normal or var -- - Check escaped '$' first -- - variables, starting with $ -- - normal string one = normal +++ var +++ escape parser = many one <* eof in case readP_to_S parser s of [(r, "")] -> foldr mergeRights [] r _ -> error "Failed to parse string"
868
parse s = let -- A haskell var or con ident = (:) <$> satisfy (isLower <||> isUpper <||> (== '_')) <*> munch (isAlphaNum <||> (== '\'') <||> (== '_')) braces = between (char '{') (char '}') -- parse a var, a '$' followed by an ident var = char '$' *> ((Left <$> (ident +++ braces ident)) <++ pure (Right "$")) -- Everything up to a '$' or '\' normal = Right <$> munch1 ((/= '$') <&&> (/= '\\')) -- escape a $ escape = char '\\' *> (Right <$> (string "$" <++ pure "\\")) -- One normal or var -- - Check escaped '$' first -- - variables, starting with $ -- - normal string one = normal +++ var +++ escape parser = many one <* eof in case readP_to_S parser s of [(r, "")] -> foldr mergeRights [] r _ -> error "Failed to parse string"
829
true
true
0
16
435
354
217
137
null
null
brendanhay/gogol
gogol-script/gen/Network/Google/Script/Types/Product.hs
mpl-2.0
-- | The deployment configuration. udrDeploymentConfig :: Lens' UpdateDeploymentRequest (Maybe DeploymentConfig) udrDeploymentConfig = lens _udrDeploymentConfig (\ s a -> s{_udrDeploymentConfig = a})
207
udrDeploymentConfig :: Lens' UpdateDeploymentRequest (Maybe DeploymentConfig) udrDeploymentConfig = lens _udrDeploymentConfig (\ s a -> s{_udrDeploymentConfig = a})
172
udrDeploymentConfig = lens _udrDeploymentConfig (\ s a -> s{_udrDeploymentConfig = a})
94
true
true
1
9
29
52
25
27
null
null
dysinger/amazonka
amazonka-rds/gen/Network/AWS/RDS/CreateDBInstance.hs
mpl-2.0
-- | Specifies the accessibility options for the DB instance. A value of true -- specifies an Internet-facing instance with a publicly resolvable DNS name, -- which resolves to a public IP address. A value of false specifies an internal -- instance with a DNS name that resolves to a private IP address. -- -- Default: The default behavior varies depending on whether a VPC has been -- requested or not. The following list shows the default behavior in each case. -- -- Default VPC: true VPC: false If no DB subnet group has been specified -- as part of the request and the PubliclyAccessible value has not been set, the -- DB instance will be publicly accessible. If a specific DB subnet group has -- been specified as part of the request and the PubliclyAccessible value has -- not been set, the DB instance will be private. cdbiPubliclyAccessible :: Lens' CreateDBInstance (Maybe Bool) cdbiPubliclyAccessible = lens _cdbiPubliclyAccessible (\s a -> s { _cdbiPubliclyAccessible = a })
993
cdbiPubliclyAccessible :: Lens' CreateDBInstance (Maybe Bool) cdbiPubliclyAccessible = lens _cdbiPubliclyAccessible (\s a -> s { _cdbiPubliclyAccessible = a })
163
cdbiPubliclyAccessible = lens _cdbiPubliclyAccessible (\s a -> s { _cdbiPubliclyAccessible = a })
101
true
true
1
9
173
64
37
27
null
null
imh/plover
src/Language/Plover/Expressions.hs
mit
e0 = "x" := Vec "i" 2 (Vec "j" 2 ("i" + "j"))
45
e0 = "x" := Vec "i" 2 (Vec "j" 2 ("i" + "j"))
45
e0 = "x" := Vec "i" 2 (Vec "j" 2 ("i" + "j"))
45
false
false
0
10
12
34
17
17
null
null
fmthoma/ghc
compiler/coreSyn/CoreUtils.hs
bsd-3-clause
exprOkForSideEffects = expr_ok primOpOkForSideEffects
53
exprOkForSideEffects = expr_ok primOpOkForSideEffects
53
exprOkForSideEffects = expr_ok primOpOkForSideEffects
53
false
false
1
5
3
13
4
9
null
null
forked-upstream-packages-for-ghcjs/ghc
compiler/basicTypes/MkId.hs
bsd-3-clause
mkDictFunTy :: [TyVar] -> ThetaType -> Class -> [Type] -> (Int, Type) mkDictFunTy tvs theta clas tys = (length silent_theta, dfun_ty) where dfun_ty = mkSigmaTy tvs (silent_theta ++ theta) (mkClassPred clas tys) silent_theta | null tvs, null theta = [] | otherwise = filterOut discard $ substTheta (zipTopTvSubst (classTyVars clas) tys) (classSCTheta clas) -- See Note [Silent Superclass Arguments] discard pred = any (`eqPred` pred) theta -- See the DFun Superclass Invariant in TcInstDcls {- ************************************************************************ * * \subsection{Un-definable} * * ************************************************************************ These Ids can't be defined in Haskell. They could be defined in unfoldings in the wired-in GHC.Prim interface file, but we'd have to ensure that they were definitely, definitely inlined, because there is no curried identifier for them. That's what mkCompulsoryUnfolding does. If we had a way to get a compulsory unfolding from an interface file, we could do that, but we don't right now. unsafeCoerce# isn't so much a PrimOp as a phantom identifier, that just gets expanded into a type coercion wherever it occurs. Hence we add it as a built-in Id with an unfolding here. The type variables we use here are "open" type variables: this means they can unify with both unlifted and lifted types. Hence we provide another gun with which to shoot yourself in the foot. -}
1,689
mkDictFunTy :: [TyVar] -> ThetaType -> Class -> [Type] -> (Int, Type) mkDictFunTy tvs theta clas tys = (length silent_theta, dfun_ty) where dfun_ty = mkSigmaTy tvs (silent_theta ++ theta) (mkClassPred clas tys) silent_theta | null tvs, null theta = [] | otherwise = filterOut discard $ substTheta (zipTopTvSubst (classTyVars clas) tys) (classSCTheta clas) -- See Note [Silent Superclass Arguments] discard pred = any (`eqPred` pred) theta -- See the DFun Superclass Invariant in TcInstDcls {- ************************************************************************ * * \subsection{Un-definable} * * ************************************************************************ These Ids can't be defined in Haskell. They could be defined in unfoldings in the wired-in GHC.Prim interface file, but we'd have to ensure that they were definitely, definitely inlined, because there is no curried identifier for them. That's what mkCompulsoryUnfolding does. If we had a way to get a compulsory unfolding from an interface file, we could do that, but we don't right now. unsafeCoerce# isn't so much a PrimOp as a phantom identifier, that just gets expanded into a type coercion wherever it occurs. Hence we add it as a built-in Id with an unfolding here. The type variables we use here are "open" type variables: this means they can unify with both unlifted and lifted types. Hence we provide another gun with which to shoot yourself in the foot. -}
1,689
mkDictFunTy tvs theta clas tys = (length silent_theta, dfun_ty) where dfun_ty = mkSigmaTy tvs (silent_theta ++ theta) (mkClassPred clas tys) silent_theta | null tvs, null theta = [] | otherwise = filterOut discard $ substTheta (zipTopTvSubst (classTyVars clas) tys) (classSCTheta clas) -- See Note [Silent Superclass Arguments] discard pred = any (`eqPred` pred) theta -- See the DFun Superclass Invariant in TcInstDcls {- ************************************************************************ * * \subsection{Un-definable} * * ************************************************************************ These Ids can't be defined in Haskell. They could be defined in unfoldings in the wired-in GHC.Prim interface file, but we'd have to ensure that they were definitely, definitely inlined, because there is no curried identifier for them. That's what mkCompulsoryUnfolding does. If we had a way to get a compulsory unfolding from an interface file, we could do that, but we don't right now. unsafeCoerce# isn't so much a PrimOp as a phantom identifier, that just gets expanded into a type coercion wherever it occurs. Hence we add it as a built-in Id with an unfolding here. The type variables we use here are "open" type variables: this means they can unify with both unlifted and lifted types. Hence we provide another gun with which to shoot yourself in the foot. -}
1,619
false
true
7
10
464
167
89
78
null
null
nevvi/Declarative-D7012E
lab1/lab1.hs
gpl-3.0
drop' 0 xs = xs
15
drop' 0 xs = xs
15
drop' 0 xs = xs
15
false
false
1
5
4
14
5
9
null
null
facebookincubator/duckling
Duckling/AmountOfMoney/KA/Rules.hs
bsd-3-clause
rulePrecision :: Rule rulePrecision = Rule { name = "about|exactly <amount-of-money>" , pattern = [ regex "ზუსტად|იმენა|დაახლოებით|გძეტა" , Predicate isMoneyWithValue ] , prod = \tokens -> case tokens of (_:token:_) -> Just token _ -> Nothing }
276
rulePrecision :: Rule rulePrecision = Rule { name = "about|exactly <amount-of-money>" , pattern = [ regex "ზუსტად|იმენა|დაახლოებით|გძეტა" , Predicate isMoneyWithValue ] , prod = \tokens -> case tokens of (_:token:_) -> Just token _ -> Nothing }
276
rulePrecision = Rule { name = "about|exactly <amount-of-money>" , pattern = [ regex "ზუსტად|იმენა|დაახლოებით|გძეტა" , Predicate isMoneyWithValue ] , prod = \tokens -> case tokens of (_:token:_) -> Just token _ -> Nothing }
254
false
true
0
14
68
88
45
43
null
null
urbanslug/ghc
compiler/types/TyCon.hs
bsd-3-clause
tyConPrimRep tc = ASSERT(not (isUnboxedTupleTyCon tc)) PtrRep
61
tyConPrimRep tc = ASSERT(not (isUnboxedTupleTyCon tc)) PtrRep
61
tyConPrimRep tc = ASSERT(not (isUnboxedTupleTyCon tc)) PtrRep
61
false
false
1
9
6
30
12
18
null
null
unisonweb/platform
parser-typechecker/src/Unison/Runtime/Foreign/Function.hs
mit
readForeignEnum :: Enum a => [Int] -> [Int] -> Stack 'UN -> Stack 'BX -> IO ([Int], [Int], a) readForeignEnum = readForeignAs toEnum
138
readForeignEnum :: Enum a => [Int] -> [Int] -> Stack 'UN -> Stack 'BX -> IO ([Int], [Int], a) readForeignEnum = readForeignAs toEnum
138
readForeignEnum = readForeignAs toEnum
38
false
true
0
13
28
76
38
38
null
null
narrative/stack
src/Stack/Setup.hs
bsd-3-clause
addIncludeLib :: ExtraDirs -> Config -> Config addIncludeLib (ExtraDirs _bins includes libs) config = config { configExtraIncludeDirs = Set.union (configExtraIncludeDirs config) (Set.fromList $ map T.pack includes) , configExtraLibDirs = Set.union (configExtraLibDirs config) (Set.fromList $ map T.pack libs) }
354
addIncludeLib :: ExtraDirs -> Config -> Config addIncludeLib (ExtraDirs _bins includes libs) config = config { configExtraIncludeDirs = Set.union (configExtraIncludeDirs config) (Set.fromList $ map T.pack includes) , configExtraLibDirs = Set.union (configExtraLibDirs config) (Set.fromList $ map T.pack libs) }
354
addIncludeLib (ExtraDirs _bins includes libs) config = config { configExtraIncludeDirs = Set.union (configExtraIncludeDirs config) (Set.fromList $ map T.pack includes) , configExtraLibDirs = Set.union (configExtraLibDirs config) (Set.fromList $ map T.pack libs) }
307
false
true
0
12
81
115
56
59
null
null
ekmett/wxHaskell
wxcore/src/haskell/Graphics/UI/WXCore/WxcDefs.hs
lgpl-2.1
wxSTC_CAML_IDENTIFIER :: Int wxSTC_CAML_IDENTIFIER = 1
54
wxSTC_CAML_IDENTIFIER :: Int wxSTC_CAML_IDENTIFIER = 1
54
wxSTC_CAML_IDENTIFIER = 1
25
false
true
0
6
5
18
7
11
null
null
tomberek/rulestesting
src/Control/Arrow/CCA/Rules.hs
bsd-3-clause
isArrLike (VarE (nameBase -> "snd")) = True
43
isArrLike (VarE (nameBase -> "snd")) = True
43
isArrLike (VarE (nameBase -> "snd")) = True
43
false
false
0
9
6
22
11
11
null
null
razvan9310/barrelfish
tools/hamlet/Parser.hs
mit
-- parse the body of a capability definition capabilityDef name = do -- check for "can_retype_multiple" multi <- (do reserved "can_retype_multiple" missingSep ("can_retype_multiple in " ++ name) return True) <|> (return False) -- read sequence of field, address, size, and equality definitions annotatedFields <- many $ capFieldOrExpr name (fields, addresses, sizes, eqExprs) <- return $ unzipDefs annotatedFields -- lengths to check let numAddrs = length addresses numSizes = length sizes -- check that there are either 0 or 1 of both address and size definitions if numAddrs > 1 then unexpected ("multiple address definitions for cap " ++ name) else return () if numSizes > 1 then unexpected ("multiple size definitions for cap " ++ name) else return () if numAddrs < 1 && numSizes > 0 then unexpected ("have size definition but no address definition for cap " ++ name) else return () -- merge address and size expressions if present let rangeExpr = if null addresses then Nothing else Just $ if null sizes then (head addresses, ZeroSize) else (head addresses, head sizes) return (fields, rangeExpr, eqExprs, multi) where -- un-maybe lists from capfields parsing unzipDefs annotatedFields = (fs, as, ss, es) where fs = catMaybes afs as = catMaybes aas ss = catMaybes ass es = catMaybes ess (afs, aas, ass, ess) = unzip4 annotatedFields
1,686
capabilityDef name = do -- check for "can_retype_multiple" multi <- (do reserved "can_retype_multiple" missingSep ("can_retype_multiple in " ++ name) return True) <|> (return False) -- read sequence of field, address, size, and equality definitions annotatedFields <- many $ capFieldOrExpr name (fields, addresses, sizes, eqExprs) <- return $ unzipDefs annotatedFields -- lengths to check let numAddrs = length addresses numSizes = length sizes -- check that there are either 0 or 1 of both address and size definitions if numAddrs > 1 then unexpected ("multiple address definitions for cap " ++ name) else return () if numSizes > 1 then unexpected ("multiple size definitions for cap " ++ name) else return () if numAddrs < 1 && numSizes > 0 then unexpected ("have size definition but no address definition for cap " ++ name) else return () -- merge address and size expressions if present let rangeExpr = if null addresses then Nothing else Just $ if null sizes then (head addresses, ZeroSize) else (head addresses, head sizes) return (fields, rangeExpr, eqExprs, multi) where -- un-maybe lists from capfields parsing unzipDefs annotatedFields = (fs, as, ss, es) where fs = catMaybes afs as = catMaybes aas ss = catMaybes ass es = catMaybes ess (afs, aas, ass, ess) = unzip4 annotatedFields
1,641
capabilityDef name = do -- check for "can_retype_multiple" multi <- (do reserved "can_retype_multiple" missingSep ("can_retype_multiple in " ++ name) return True) <|> (return False) -- read sequence of field, address, size, and equality definitions annotatedFields <- many $ capFieldOrExpr name (fields, addresses, sizes, eqExprs) <- return $ unzipDefs annotatedFields -- lengths to check let numAddrs = length addresses numSizes = length sizes -- check that there are either 0 or 1 of both address and size definitions if numAddrs > 1 then unexpected ("multiple address definitions for cap " ++ name) else return () if numSizes > 1 then unexpected ("multiple size definitions for cap " ++ name) else return () if numAddrs < 1 && numSizes > 0 then unexpected ("have size definition but no address definition for cap " ++ name) else return () -- merge address and size expressions if present let rangeExpr = if null addresses then Nothing else Just $ if null sizes then (head addresses, ZeroSize) else (head addresses, head sizes) return (fields, rangeExpr, eqExprs, multi) where -- un-maybe lists from capfields parsing unzipDefs annotatedFields = (fs, as, ss, es) where fs = catMaybes afs as = catMaybes aas ss = catMaybes ass es = catMaybes ess (afs, aas, ass, ess) = unzip4 annotatedFields
1,641
true
false
0
14
566
371
190
181
null
null
haskoin/secp256k1-haskell
test/Crypto/Secp256k1/InternalSpec.hs
unlicense
cloneContextTest :: Assertion cloneContextTest = do (x1, x2) <- liftIO $ do x1 <- contextCreate signVerify ret <- withEntropy $ contextRandomize x1 unless (isSuccess ret) $ error "failed to randomize context" x2 <- contextClone x1 return (x1, x2) assertBool "original context not null" $ x1 /= nullPtr assertBool "cloned context not null" $ x2 /= nullPtr assertBool "context ptrs different" $ x1 /= x2
457
cloneContextTest :: Assertion cloneContextTest = do (x1, x2) <- liftIO $ do x1 <- contextCreate signVerify ret <- withEntropy $ contextRandomize x1 unless (isSuccess ret) $ error "failed to randomize context" x2 <- contextClone x1 return (x1, x2) assertBool "original context not null" $ x1 /= nullPtr assertBool "cloned context not null" $ x2 /= nullPtr assertBool "context ptrs different" $ x1 /= x2
457
cloneContextTest = do (x1, x2) <- liftIO $ do x1 <- contextCreate signVerify ret <- withEntropy $ contextRandomize x1 unless (isSuccess ret) $ error "failed to randomize context" x2 <- contextClone x1 return (x1, x2) assertBool "original context not null" $ x1 /= nullPtr assertBool "cloned context not null" $ x2 /= nullPtr assertBool "context ptrs different" $ x1 /= x2
427
false
true
0
14
119
135
61
74
null
null
bravit/Idris-dev
src/Idris/Core/TT.hs
bsd-3-clause
fcIn (FC {}) NoFC = False
25
fcIn (FC {}) NoFC = False
25
fcIn (FC {}) NoFC = False
25
false
false
0
7
5
18
9
9
null
null
TransformingMusicology/adb-test-framework
src/AudioDB/Test.hs
gpl-3.0
mkDistance EuclideanNormed = Just [euclideanNormedFlag]
65
mkDistance EuclideanNormed = Just [euclideanNormedFlag]
65
mkDistance EuclideanNormed = Just [euclideanNormedFlag]
65
false
false
0
6
14
15
7
8
null
null
NightRa/Idris-dev
src/IRTS/CodegenC.hs
bsd-3-clause
doOp v LWriteStr [_,s] = v ++ "MKINT((i_int)(idris_writeStr(stdout" ++ ",GETSTR(" ++ creg s ++ "))))"
149
doOp v LWriteStr [_,s] = v ++ "MKINT((i_int)(idris_writeStr(stdout" ++ ",GETSTR(" ++ creg s ++ "))))"
149
doOp v LWriteStr [_,s] = v ++ "MKINT((i_int)(idris_writeStr(stdout" ++ ",GETSTR(" ++ creg s ++ "))))"
149
false
false
0
8
62
38
19
19
null
null
harendra-kumar/asyncly
src/Streamly/Internal/Data/Sink.hs
bsd-3-clause
-- | Demultiplex to multiple consumers without collecting the results. Useful -- to run different effectful computations depending on the value of the stream -- elements, for example handling network packets of different types using -- different handlers. -- -- @ -- -- |-------Sink m a -- -----stream m a-----Map-----| -- |-------Sink m a -- | -- ... -- @ -- -- @ -- > let pr x = Sink.drainM (putStrLn . ((x ++ " ") ++) . show) -- > let table = Data.Map.fromList [(1, pr \"One"), (2, pr \"Two")] -- in Sink.sink (Sink.demux id table) (S.enumerateFromTo 1 100) -- One 1 -- Two 2 -- @ {- demux :: (Monad m, Ord k) => (a -> k) -> Map k (Sink m a) -> Sink m a demux f kv = Sink step where step a = -- XXX should we raise an exception in Nothing case? -- Ideally we should enforce that it is a total map over k so that look -- up never fails case Map.lookup (f a) kv of Nothing -> return () Just (Sink g) -> g a -} demux :: (Monad m, Ord k) => Map k (Sink m a) -> Sink m (a, k) demux kv = Sink step where step (a, k) = -- XXX should we raise an exception in Nothing case? -- Ideally we should enforce that it is a total map over k so that look -- up never fails case Map.lookup k kv of Nothing -> return () Just (Sink g) -> g a -- | Split elements in the input stream into two parts using a monadic unzip -- function, direct each part to a different sink. -- -- @ -- -- |-------Sink m b -- -----Stream m a----(b,c)--| -- |-------Sink m c -- @ -- @ -- > let pr x = Sink.drainM (putStrLn . ((x ++ " ") ++) . show) -- in Sink.sink (Sink.unzip return (pr \"L") (pr \"R")) (S.yield (1,2)) -- L 1 -- R 2 -- @
1,911
demux :: (Monad m, Ord k) => Map k (Sink m a) -> Sink m (a, k) demux kv = Sink step where step (a, k) = -- XXX should we raise an exception in Nothing case? -- Ideally we should enforce that it is a total map over k so that look -- up never fails case Map.lookup k kv of Nothing -> return () Just (Sink g) -> g a -- | Split elements in the input stream into two parts using a monadic unzip -- function, direct each part to a different sink. -- -- @ -- -- |-------Sink m b -- -----Stream m a----(b,c)--| -- |-------Sink m c -- @ -- @ -- > let pr x = Sink.drainM (putStrLn . ((x ++ " ") ++) . show) -- in Sink.sink (Sink.unzip return (pr \"L") (pr \"R")) (S.yield (1,2)) -- L 1 -- R 2 -- @
809
demux kv = Sink step where step (a, k) = -- XXX should we raise an exception in Nothing case? -- Ideally we should enforce that it is a total map over k so that look -- up never fails case Map.lookup k kv of Nothing -> return () Just (Sink g) -> g a -- | Split elements in the input stream into two parts using a monadic unzip -- function, direct each part to a different sink. -- -- @ -- -- |-------Sink m b -- -----Stream m a----(b,c)--| -- |-------Sink m c -- @ -- @ -- > let pr x = Sink.drainM (putStrLn . ((x ++ " ") ++) . show) -- in Sink.sink (Sink.unzip return (pr \"L") (pr \"R")) (S.yield (1,2)) -- L 1 -- R 2 -- @
746
true
true
0
10
649
160
98
62
null
null
vikraman/ghc
compiler/main/HscTypes.hs
bsd-3-clause
pkgQual :: DynFlags -> PrintUnqualified pkgQual dflags = alwaysQualify { queryQualifyPackage = mkQualPackage dflags }
129
pkgQual :: DynFlags -> PrintUnqualified pkgQual dflags = alwaysQualify { queryQualifyPackage = mkQualPackage dflags }
129
pkgQual dflags = alwaysQualify { queryQualifyPackage = mkQualPackage dflags }
89
false
true
0
7
26
29
15
14
null
null
AlexanderPankiv/ghc
compiler/codeGen/StgCmmPrim.hs
bsd-3-clause
emitPrimOp _ res ReadByteArrayOp_Word64 args = doIndexByteArrayOp Nothing b64 res args
104
emitPrimOp _ res ReadByteArrayOp_Word64 args = doIndexByteArrayOp Nothing b64 res args
104
emitPrimOp _ res ReadByteArrayOp_Word64 args = doIndexByteArrayOp Nothing b64 res args
104
false
false
0
5
28
24
11
13
null
null
kim/amazonka
amazonka-cloudhsm/gen/Network/AWS/CloudHSM/DescribeLunaClient.hs
mpl-2.0
-- | The ARN of the client. dlcrClientArn :: Lens' DescribeLunaClientResponse (Maybe Text) dlcrClientArn = lens _dlcrClientArn (\s a -> s { _dlcrClientArn = a })
161
dlcrClientArn :: Lens' DescribeLunaClientResponse (Maybe Text) dlcrClientArn = lens _dlcrClientArn (\s a -> s { _dlcrClientArn = a })
133
dlcrClientArn = lens _dlcrClientArn (\s a -> s { _dlcrClientArn = a })
70
true
true
0
9
25
46
25
21
null
null
lesguillemets/hspov_proto
examples/Example.hs
bsd-3-clause
:: Vect o = V 0 0 0
19
:: Vect o = V 0 0 0
19
:: Vect o = V 0 0 0
19
false
false
0
6
7
21
9
12
null
null
nushio3/distributed-process-p2p
src/Control/Distributed/Backend/P2P.hs
bsd-3-clause
onQuery :: MVar (S.Set DPT.ProcessId) -> QueryMessage -> Process () onQuery peers (QueryMessage reply) = do ps <- liftIO $ readMVar peers sendChan reply $ QueryResult . map processNodeId . S.toList $ ps
210
onQuery :: MVar (S.Set DPT.ProcessId) -> QueryMessage -> Process () onQuery peers (QueryMessage reply) = do ps <- liftIO $ readMVar peers sendChan reply $ QueryResult . map processNodeId . S.toList $ ps
210
onQuery peers (QueryMessage reply) = do ps <- liftIO $ readMVar peers sendChan reply $ QueryResult . map processNodeId . S.toList $ ps
142
false
true
3
11
40
92
41
51
null
null
rsasse/tamarin-prover
lib/theory/src/Theory/Tools/Wellformedness.hs
gpl-3.0
freshNamesReportDiff :: OpenDiffTheory -> WfErrorReport freshNamesReportDiff thy = do ru <- diffThyProtoRules thy case filter ((LSortFresh ==) . sortOfName) $ universeBi ru of [] -> [] names -> return $ (,) "fresh names" $ fsep $ text ( "rule " ++ quote (showRuleCaseName ru) ++ ": " ++ "fresh names are not allowed in rule:" ) : punctuate comma (map (nest 2 . text . show) names) -- | Report on capitalization of public names.
486
freshNamesReportDiff :: OpenDiffTheory -> WfErrorReport freshNamesReportDiff thy = do ru <- diffThyProtoRules thy case filter ((LSortFresh ==) . sortOfName) $ universeBi ru of [] -> [] names -> return $ (,) "fresh names" $ fsep $ text ( "rule " ++ quote (showRuleCaseName ru) ++ ": " ++ "fresh names are not allowed in rule:" ) : punctuate comma (map (nest 2 . text . show) names) -- | Report on capitalization of public names.
486
freshNamesReportDiff thy = do ru <- diffThyProtoRules thy case filter ((LSortFresh ==) . sortOfName) $ universeBi ru of [] -> [] names -> return $ (,) "fresh names" $ fsep $ text ( "rule " ++ quote (showRuleCaseName ru) ++ ": " ++ "fresh names are not allowed in rule:" ) : punctuate comma (map (nest 2 . text . show) names) -- | Report on capitalization of public names.
430
false
true
0
19
133
149
73
76
null
null
printedheart/atomspace
examples/haskell/example_data_types.hs
agpl-3.0
{- Type checking error. findAnimals2 = BindLink (GroundedSchemaNode "some-fun") -- This is not a Variable. (ListLink |> ConceptNode "Arg1" (stv 1 1) \> ConceptNode "Arg2" (stv 1 1) ) (ConceptNode "res" (stv 1 1)) -} main :: IO () main = runOnNewAtomSpace $ do p <- get $ PredicateNode "Pred" noTv case p of Just (PredicateNode _ _) -> liftIO $ print "Predicate found." _ -> liftIO $ print "No Predicate found." liftIO $ printAtom li insert li res <- get li () <- case res of Just (ListLink (x:_)) -> case x of Gen (ConceptNode c _) -> liftIO $ print "First is Concept" Gen (PredicateNode p _ ) -> liftIO $ print "First is Predicate" _ -> liftIO $ print "First is other type" insert e remove e return ()
995
main :: IO () main = runOnNewAtomSpace $ do p <- get $ PredicateNode "Pred" noTv case p of Just (PredicateNode _ _) -> liftIO $ print "Predicate found." _ -> liftIO $ print "No Predicate found." liftIO $ printAtom li insert li res <- get li () <- case res of Just (ListLink (x:_)) -> case x of Gen (ConceptNode c _) -> liftIO $ print "First is Concept" Gen (PredicateNode p _ ) -> liftIO $ print "First is Predicate" _ -> liftIO $ print "First is other type" insert e remove e return ()
696
main = runOnNewAtomSpace $ do p <- get $ PredicateNode "Pred" noTv case p of Just (PredicateNode _ _) -> liftIO $ print "Predicate found." _ -> liftIO $ print "No Predicate found." liftIO $ printAtom li insert li res <- get li () <- case res of Just (ListLink (x:_)) -> case x of Gen (ConceptNode c _) -> liftIO $ print "First is Concept" Gen (PredicateNode p _ ) -> liftIO $ print "First is Predicate" _ -> liftIO $ print "First is other type" insert e remove e return ()
682
true
true
0
17
424
227
102
125
null
null
abdulrahimnizamani/OccamStar
interpreter.hs
gpl-2.0
matchExpExp lx w d t (HsInfixApp p1 (HsQConOp op1) p2) (HsInfixApp e1 (HsQConOp op2) e2) | op1 == op2 = do (t1,m1) <- matchExpExp lx w d t p1 e1 (t2,m2) <- matchExpExp lx w d t1 p2 e2 return (t2,m1 && m2)
223
matchExpExp lx w d t (HsInfixApp p1 (HsQConOp op1) p2) (HsInfixApp e1 (HsQConOp op2) e2) | op1 == op2 = do (t1,m1) <- matchExpExp lx w d t p1 e1 (t2,m2) <- matchExpExp lx w d t1 p2 e2 return (t2,m1 && m2)
223
matchExpExp lx w d t (HsInfixApp p1 (HsQConOp op1) p2) (HsInfixApp e1 (HsQConOp op2) e2) | op1 == op2 = do (t1,m1) <- matchExpExp lx w d t p1 e1 (t2,m2) <- matchExpExp lx w d t1 p2 e2 return (t2,m1 && m2)
223
false
false
0
10
61
129
63
66
null
null
sgraf812/feed-gipeda
src/FeedGipeda/THGenerated.hs
bsd-3-clause
benchmarkProcess :: (String, Repo, SHA, Rational) -> Process String benchmarkProcess (benchmarkScript, repo, sha, timeout) = liftIO (Slave.benchmark benchmarkScript repo sha (fromRational timeout))
199
benchmarkProcess :: (String, Repo, SHA, Rational) -> Process String benchmarkProcess (benchmarkScript, repo, sha, timeout) = liftIO (Slave.benchmark benchmarkScript repo sha (fromRational timeout))
199
benchmarkProcess (benchmarkScript, repo, sha, timeout) = liftIO (Slave.benchmark benchmarkScript repo sha (fromRational timeout))
131
false
true
0
9
23
68
37
31
null
null
tekul/cryptonite
Crypto/PubKey/ECC/Types.hs
bsd-3-clause
-- | Irreducible polynomial representing the characteristic of a CurveBinary. ecc_fx :: CurveBinary -> Integer ecc_fx (CurveBinary fx _) = fx
141
ecc_fx :: CurveBinary -> Integer ecc_fx (CurveBinary fx _) = fx
63
ecc_fx (CurveBinary fx _) = fx
30
true
true
0
7
20
27
14
13
null
null
gridaphobe/ghc
compiler/deSugar/DsUtils.hs
bsd-3-clause
wrapBinds ((new,old):prs) e = wrapBind new old (wrapBinds prs e)
64
wrapBinds ((new,old):prs) e = wrapBind new old (wrapBinds prs e)
64
wrapBinds ((new,old):prs) e = wrapBind new old (wrapBinds prs e)
64
false
false
1
7
9
45
20
25
null
null
SamirTalwar/Smoke
src/lib/Test/Smoke/Filters.hs
mit
applyFilters :: FixtureType a => Maybe Shell -> Filter -> a -> Filtering a applyFilters fallbackShell (Filter command) value = do executable <- withExceptT FilterPathError $ convertCommandToExecutable fallbackShell command (exitCode, processStdOut, processStdErr) <- withExceptT (CouldNotExecuteFilter executable) $ ExceptT $ tryIOError $ runExecutable executable mempty (StdIn (serializeFixture value)) Nothing case exitCode of ExitSuccess -> return $ deserializeFixture processStdOut ExitFailure code -> throwE $ FilterExecutionFailed executable (Status code) (StdOut processStdOut) (StdErr processStdErr)
713
applyFilters :: FixtureType a => Maybe Shell -> Filter -> a -> Filtering a applyFilters fallbackShell (Filter command) value = do executable <- withExceptT FilterPathError $ convertCommandToExecutable fallbackShell command (exitCode, processStdOut, processStdErr) <- withExceptT (CouldNotExecuteFilter executable) $ ExceptT $ tryIOError $ runExecutable executable mempty (StdIn (serializeFixture value)) Nothing case exitCode of ExitSuccess -> return $ deserializeFixture processStdOut ExitFailure code -> throwE $ FilterExecutionFailed executable (Status code) (StdOut processStdOut) (StdErr processStdErr)
713
applyFilters fallbackShell (Filter command) value = do executable <- withExceptT FilterPathError $ convertCommandToExecutable fallbackShell command (exitCode, processStdOut, processStdErr) <- withExceptT (CouldNotExecuteFilter executable) $ ExceptT $ tryIOError $ runExecutable executable mempty (StdIn (serializeFixture value)) Nothing case exitCode of ExitSuccess -> return $ deserializeFixture processStdOut ExitFailure code -> throwE $ FilterExecutionFailed executable (Status code) (StdOut processStdOut) (StdErr processStdErr)
638
false
true
0
13
175
186
88
98
null
null
edwardwas/drawingObjects
example/Main.hs
mit
main = playBanana (InWindow "floatMe" (800,600) (10,10)) white 33 $ \eTick eEv -> liftMoment $ linkBuild $ do eOne <- construct $ modifyPicture (translate 100 (-100)) $ counterObj 0 eEv :: BuildMonad (I.IntMap Int) (Event Int) eTwo <- construct $ modifyPicture (translate 100 100) $ counterObj 1 eTick :: BuildMonad (I.IntMap Int) (Event Int) construct $ sumObject 2 [eOne, eTwo]
433
main = playBanana (InWindow "floatMe" (800,600) (10,10)) white 33 $ \eTick eEv -> liftMoment $ linkBuild $ do eOne <- construct $ modifyPicture (translate 100 (-100)) $ counterObj 0 eEv :: BuildMonad (I.IntMap Int) (Event Int) eTwo <- construct $ modifyPicture (translate 100 100) $ counterObj 1 eTick :: BuildMonad (I.IntMap Int) (Event Int) construct $ sumObject 2 [eOne, eTwo]
433
main = playBanana (InWindow "floatMe" (800,600) (10,10)) white 33 $ \eTick eEv -> liftMoment $ linkBuild $ do eOne <- construct $ modifyPicture (translate 100 (-100)) $ counterObj 0 eEv :: BuildMonad (I.IntMap Int) (Event Int) eTwo <- construct $ modifyPicture (translate 100 100) $ counterObj 1 eTick :: BuildMonad (I.IntMap Int) (Event Int) construct $ sumObject 2 [eOne, eTwo]
433
false
false
0
17
113
183
91
92
null
null
dmbarbour/Sirea
src/Sirea/PCX.hs
bsd-3-clause
-- | Find a resource in a partition based on both name and type. -- -- Notionally, the resource already exists, we aren't creating it. -- In practice, the resource is created on the first lookup, and all -- subsequent lookups (with the same string and type) will return -- the same resource. To protect notional existence, resources are -- not to have observable side-effects until we interact with them. -- findByNameInPCX :: (NamedResource p r) => String -> PCX p -> IO r findByNameInPCX = findByNameInPCX'
508
findByNameInPCX :: (NamedResource p r) => String -> PCX p -> IO r findByNameInPCX = findByNameInPCX'
100
findByNameInPCX = findByNameInPCX'
34
true
true
0
8
86
44
26
18
null
null
jacekszymanski/wxHaskell
wxcore/src/haskell/Graphics/UI/WXCore/WxcDefs.hs
lgpl-2.1
wxSTC_LEX_PHPSCRIPT :: Int wxSTC_LEX_PHPSCRIPT = 69
51
wxSTC_LEX_PHPSCRIPT :: Int wxSTC_LEX_PHPSCRIPT = 69
51
wxSTC_LEX_PHPSCRIPT = 69
24
false
true
0
4
5
11
6
5
null
null
olsner/ghc
compiler/prelude/PrelNames.hs
bsd-3-clause
appAIdKey = mkPreludeMiscIdUnique 183
43
appAIdKey = mkPreludeMiscIdUnique 183
43
appAIdKey = mkPreludeMiscIdUnique 183
43
false
false
0
5
9
9
4
5
null
null
nomeata/gipeda
src/Development/Shake/Fancy.hs
mit
finish :: FancyEnv -> IO () finish env = finishConsoleRegion (currentRegion env) $ "✓ " ++ currentTarget env ++ " done"
123
finish :: FancyEnv -> IO () finish env = finishConsoleRegion (currentRegion env) $ "✓ " ++ currentTarget env ++ " done"
123
finish env = finishConsoleRegion (currentRegion env) $ "✓ " ++ currentTarget env ++ " done"
95
false
true
2
8
24
53
23
30
null
null
samvher/translatethenews
app/TTN/View/Article.hs
mit
mkGTranslateURL :: Language -> Language -> Text -> Text mkGTranslateURL source target article_url = "https://translate.google.com/translate?sl=" <> langCode source <> "&tl=" <> langCode target <> "&ie=UTF-8&u=" <> article_url
235
mkGTranslateURL :: Language -> Language -> Text -> Text mkGTranslateURL source target article_url = "https://translate.google.com/translate?sl=" <> langCode source <> "&tl=" <> langCode target <> "&ie=UTF-8&u=" <> article_url
235
mkGTranslateURL source target article_url = "https://translate.google.com/translate?sl=" <> langCode source <> "&tl=" <> langCode target <> "&ie=UTF-8&u=" <> article_url
179
false
true
0
10
36
56
27
29
null
null
abhean/phffp-examples
src/Lib.hs
bsd-3-clause
myAbs :: (Ord a, Num a) => a -> a myAbs x | x < 0 = -x | otherwise = x
76
myAbs :: (Ord a, Num a) => a -> a myAbs x | x < 0 = -x | otherwise = x
76
myAbs x | x < 0 = -x | otherwise = x
42
false
true
1
8
27
53
26
27
null
null
uduki/hsQt
extra-pkgs/Glome/Qt/Glome/Cone.hs
bsd-2-clause
cone_z :: Flt -> Flt -> Flt -> Flt -> SolidItem cone_z r h1 h2 height = SolidItem (Cone r h1 h2 height)
103
cone_z :: Flt -> Flt -> Flt -> Flt -> SolidItem cone_z r h1 h2 height = SolidItem (Cone r h1 h2 height)
103
cone_z r h1 h2 height = SolidItem (Cone r h1 h2 height)
55
false
true
0
8
22
51
25
26
null
null
robeverest/accelerate
Data/Array/Accelerate/Smart.hs
bsd-3-clause
mkBAnd :: (Elt t, IsIntegral t) => Exp t -> Exp t -> Exp t mkBAnd x y = Exp $ PrimBAnd integralType `PrimApp` tup2 (x, y)
121
mkBAnd :: (Elt t, IsIntegral t) => Exp t -> Exp t -> Exp t mkBAnd x y = Exp $ PrimBAnd integralType `PrimApp` tup2 (x, y)
121
mkBAnd x y = Exp $ PrimBAnd integralType `PrimApp` tup2 (x, y)
62
false
true
0
8
26
70
35
35
null
null
capital-match/bake
src/Development/Bake/Core/Type.hs
bsd-3-clause
-- | Given a 'Stringy' for @test@, and a function that when run on a code base -- returns the list of tests that need running, and a function to populate -- a 'TestInfo', modify the 'Oven' with a test type. ovenTest :: IO [test] -> (test -> TestInfo test) -> Oven state patch () -> Oven state patch test ovenTest prepare info o = o{ovenPrepare= \_ _ -> prepare, ovenTestInfo=info}
393
ovenTest :: IO [test] -> (test -> TestInfo test) -> Oven state patch () -> Oven state patch test ovenTest prepare info o = o{ovenPrepare= \_ _ -> prepare, ovenTestInfo=info}
182
ovenTest prepare info o = o{ovenPrepare= \_ _ -> prepare, ovenTestInfo=info}
76
true
true
0
9
83
86
46
40
null
null
cchalmers/dense
src/Data/Dense/Generic.hs
bsd-3-clause
-- | Generate a bundle of indexes for the given 'Layout'. bundleIndexes :: (Monad m, Shape f) => Layout f -> MBundle m v (f Int) bundleIndexes l = MBundle.fromStream (streamIndexes l) (Exact (shapeSize l))
205
bundleIndexes :: (Monad m, Shape f) => Layout f -> MBundle m v (f Int) bundleIndexes l = MBundle.fromStream (streamIndexes l) (Exact (shapeSize l))
147
bundleIndexes l = MBundle.fromStream (streamIndexes l) (Exact (shapeSize l))
76
true
true
0
10
34
79
38
41
null
null
thoughtpolice/binary-serialise-cbor
Data/Binary/Serialise/CBOR/Decoding.hs
bsd-3-clause
decodeInt :: Decoder Int decodeInt = Decoder (\k -> ConsumeInt (\n# -> k (I# n#)))
82
decodeInt :: Decoder Int decodeInt = Decoder (\k -> ConsumeInt (\n# -> k (I# n#)))
82
decodeInt = Decoder (\k -> ConsumeInt (\n# -> k (I# n#)))
57
false
true
1
13
14
48
23
25
null
null
spechub/Hets
OWL2/ShipSyntax.hs
gpl-2.0
primRole :: CharParser st Role primRole = do o <- notOrInv << skip fmap (UnOp o) primRole <|> braced (nomPair NominalR) <|> parent role <|> fmap RName nominal
172
primRole :: CharParser st Role primRole = do o <- notOrInv << skip fmap (UnOp o) primRole <|> braced (nomPair NominalR) <|> parent role <|> fmap RName nominal
172
primRole = do o <- notOrInv << skip fmap (UnOp o) primRole <|> braced (nomPair NominalR) <|> parent role <|> fmap RName nominal
141
false
true
9
9
41
79
38
41
null
null
mrakgr/futhark
src/futhark-test.hs
bsd-3-clause
runTests :: TestConfig -> [FilePath] -> IO () runTests config paths = do files <- concat <$> mapM testPrograms paths let mode = configTestMode config testmvar <- newEmptyMVar resmvar <- newEmptyMVar concurrency <- getNumCapabilities replicateM_ concurrency $ forkIO $ runTest testmvar resmvar all_tests <- mapM (makeTestCase config mode) files let (excluded, included) = partition (excludedTest config) all_tests _ <- forkIO $ mapM_ (putMVar testmvar) included isTTY <- (&& mode /= OnTravis) <$> hIsTerminalDevice stdout let report = if isTTY then reportInteractive else reportText clear = if isTTY then clearLine else putStr "\n" getResults remaining failed passed = case S.toList remaining of [] -> clear >> return (failed, passed) first:_ -> do report (testCaseProgram first) failed passed $ S.size remaining (test, res) <- takeMVar resmvar let next = getResults $ test `S.delete` remaining case res of Success -> next failed (passed+1) Failure s -> do clear T.putStrLn (T.pack (testCaseProgram test) <> ":\n" <> s) next (failed+1) passed (failed, passed) <- getResults (S.fromList included) 0 0 let excluded_str = if null excluded then "" else " (" ++ show (length excluded) ++ " excluded)" putStrLn $ show failed ++ " failed, " ++ show passed ++ " passed" ++ excluded_str ++ "." exitWith $ case failed of 0 -> ExitSuccess _ -> ExitFailure 1
1,630
runTests :: TestConfig -> [FilePath] -> IO () runTests config paths = do files <- concat <$> mapM testPrograms paths let mode = configTestMode config testmvar <- newEmptyMVar resmvar <- newEmptyMVar concurrency <- getNumCapabilities replicateM_ concurrency $ forkIO $ runTest testmvar resmvar all_tests <- mapM (makeTestCase config mode) files let (excluded, included) = partition (excludedTest config) all_tests _ <- forkIO $ mapM_ (putMVar testmvar) included isTTY <- (&& mode /= OnTravis) <$> hIsTerminalDevice stdout let report = if isTTY then reportInteractive else reportText clear = if isTTY then clearLine else putStr "\n" getResults remaining failed passed = case S.toList remaining of [] -> clear >> return (failed, passed) first:_ -> do report (testCaseProgram first) failed passed $ S.size remaining (test, res) <- takeMVar resmvar let next = getResults $ test `S.delete` remaining case res of Success -> next failed (passed+1) Failure s -> do clear T.putStrLn (T.pack (testCaseProgram test) <> ":\n" <> s) next (failed+1) passed (failed, passed) <- getResults (S.fromList included) 0 0 let excluded_str = if null excluded then "" else " (" ++ show (length excluded) ++ " excluded)" putStrLn $ show failed ++ " failed, " ++ show passed ++ " passed" ++ excluded_str ++ "." exitWith $ case failed of 0 -> ExitSuccess _ -> ExitFailure 1
1,630
runTests config paths = do files <- concat <$> mapM testPrograms paths let mode = configTestMode config testmvar <- newEmptyMVar resmvar <- newEmptyMVar concurrency <- getNumCapabilities replicateM_ concurrency $ forkIO $ runTest testmvar resmvar all_tests <- mapM (makeTestCase config mode) files let (excluded, included) = partition (excludedTest config) all_tests _ <- forkIO $ mapM_ (putMVar testmvar) included isTTY <- (&& mode /= OnTravis) <$> hIsTerminalDevice stdout let report = if isTTY then reportInteractive else reportText clear = if isTTY then clearLine else putStr "\n" getResults remaining failed passed = case S.toList remaining of [] -> clear >> return (failed, passed) first:_ -> do report (testCaseProgram first) failed passed $ S.size remaining (test, res) <- takeMVar resmvar let next = getResults $ test `S.delete` remaining case res of Success -> next failed (passed+1) Failure s -> do clear T.putStrLn (T.pack (testCaseProgram test) <> ":\n" <> s) next (failed+1) passed (failed, passed) <- getResults (S.fromList included) 0 0 let excluded_str = if null excluded then "" else " (" ++ show (length excluded) ++ " excluded)" putStrLn $ show failed ++ " failed, " ++ show passed ++ " passed" ++ excluded_str ++ "." exitWith $ case failed of 0 -> ExitSuccess _ -> ExitFailure 1
1,584
false
true
0
27
496
549
260
289
null
null
nevrenato/Hets_Fork
GUI/HTkProverGUI.hs
gpl-2.0
setSelectedProver :: ListBox String -> ProofState -> IO () setSelectedProver lb st = do let ind = case selectedProver st of Just sp -> findIndex (== sp) $ Map.keys (proversMap st) Nothing -> Nothing maybe (return ()) (\ i -> selection i lb >> return ()) ind -- *** Callbacks {- | Updates the display of the status of the selected goals. -}
416
setSelectedProver :: ListBox String -> ProofState -> IO () setSelectedProver lb st = do let ind = case selectedProver st of Just sp -> findIndex (== sp) $ Map.keys (proversMap st) Nothing -> Nothing maybe (return ()) (\ i -> selection i lb >> return ()) ind -- *** Callbacks {- | Updates the display of the status of the selected goals. -}
416
setSelectedProver lb st = do let ind = case selectedProver st of Just sp -> findIndex (== sp) $ Map.keys (proversMap st) Nothing -> Nothing maybe (return ()) (\ i -> selection i lb >> return ()) ind -- *** Callbacks {- | Updates the display of the status of the selected goals. -}
321
false
true
0
17
139
132
62
70
null
null
snoyberg/ghc
compiler/prelude/PrelNames.hs
bsd-3-clause
unComp1_RDR = varQual_RDR gHC_GENERICS (fsLit "unComp1")
56
unComp1_RDR = varQual_RDR gHC_GENERICS (fsLit "unComp1")
56
unComp1_RDR = varQual_RDR gHC_GENERICS (fsLit "unComp1")
56
false
false
0
7
5
17
8
9
null
null
brendanhay/gogol
gogol-containerbuilder/gen/Network/Google/Resource/Cloudbuild/Projects/Builds/Cancel.hs
mpl-2.0
-- | V1 error format. pbcXgafv :: Lens' ProjectsBuildsCancel (Maybe Xgafv) pbcXgafv = lens _pbcXgafv (\ s a -> s{_pbcXgafv = a})
128
pbcXgafv :: Lens' ProjectsBuildsCancel (Maybe Xgafv) pbcXgafv = lens _pbcXgafv (\ s a -> s{_pbcXgafv = a})
106
pbcXgafv = lens _pbcXgafv (\ s a -> s{_pbcXgafv = a})
53
true
true
0
9
21
46
25
21
null
null
nevrenato/HetsAlloy
Maude/PreComorphism.hs
gpl-2.0
maudeSymbol2caslSort' :: MSym.Symbol -> IdMap -> CAS.SORT maudeSymbol2caslSort' (MSym.Sort q) _ = token2id $ mkSimpleId $ "kind_" ++ show q
143
maudeSymbol2caslSort' :: MSym.Symbol -> IdMap -> CAS.SORT maudeSymbol2caslSort' (MSym.Sort q) _ = token2id $ mkSimpleId $ "kind_" ++ show q
143
maudeSymbol2caslSort' (MSym.Sort q) _ = token2id $ mkSimpleId $ "kind_" ++ show q
85
false
true
0
8
23
51
25
26
null
null
jsnajder/confusion-matrix
src/Statistics/ConfusionMatrix.hs
bsd-3-clause
-- | Builds a multiclass confusion matrix. Indices @1@, @2@, etc. are used as -- category labels. confMatrix :: [[Int]] -> ConfMatrix Int confMatrix xs | isMatrix xs && n == m && n > 1 = CM [1..n] xs | otherwise = error "confMatrix: not an NxN matrix (N>=2)" where (n,m) = matrixDim xs -- | Builds a binary confusion matrix from a list @[[TP,FN],[FP,TN]]@, where -- @TP@, @FN@, @FP@, and @TN@ are the true positives, the false positives, the -- false negatives, and the true negatives, respectively.
506
confMatrix :: [[Int]] -> ConfMatrix Int confMatrix xs | isMatrix xs && n == m && n > 1 = CM [1..n] xs | otherwise = error "confMatrix: not an NxN matrix (N>=2)" where (n,m) = matrixDim xs -- | Builds a binary confusion matrix from a list @[[TP,FN],[FP,TN]]@, where -- @TP@, @FN@, @FP@, and @TN@ are the true positives, the false positives, the -- false negatives, and the true negatives, respectively.
408
confMatrix xs | isMatrix xs && n == m && n > 1 = CM [1..n] xs | otherwise = error "confMatrix: not an NxN matrix (N>=2)" where (n,m) = matrixDim xs -- | Builds a binary confusion matrix from a list @[[TP,FN],[FP,TN]]@, where -- @TP@, @FN@, @FP@, and @TN@ are the true positives, the false positives, the -- false negatives, and the true negatives, respectively.
368
true
true
1
12
95
94
49
45
null
null
marcellussiegburg/autotool
yesod/Settings/StaticFiles.hs
gpl-2.0
combineSettings :: CombineSettings combineSettings = def
56
combineSettings :: CombineSettings combineSettings = def
56
combineSettings = def
21
false
true
0
4
5
11
6
5
null
null
kim/amazonka
amazonka-autoscaling/gen/Network/AWS/AutoScaling/EnableMetricsCollection.hs
mpl-2.0
-- | 'EnableMetricsCollection' constructor. -- -- The fields accessible through corresponding lenses are: -- -- * 'emcAutoScalingGroupName' @::@ 'Text' -- -- * 'emcGranularity' @::@ 'Text' -- -- * 'emcMetrics' @::@ ['Text'] -- enableMetricsCollection :: Text -- ^ 'emcAutoScalingGroupName' -> Text -- ^ 'emcGranularity' -> EnableMetricsCollection enableMetricsCollection p1 p2 = EnableMetricsCollection { _emcAutoScalingGroupName = p1 , _emcGranularity = p2 , _emcMetrics = mempty }
568
enableMetricsCollection :: Text -- ^ 'emcAutoScalingGroupName' -> Text -- ^ 'emcGranularity' -> EnableMetricsCollection enableMetricsCollection p1 p2 = EnableMetricsCollection { _emcAutoScalingGroupName = p1 , _emcGranularity = p2 , _emcMetrics = mempty }
341
enableMetricsCollection p1 p2 = EnableMetricsCollection { _emcAutoScalingGroupName = p1 , _emcGranularity = p2 , _emcMetrics = mempty }
173
true
true
0
8
148
65
39
26
null
null
sol/pandoc
src/Tests/Helpers.hs
gpl-2.0
test :: (ToString a, ToString b, ToString c) => (a -> b) -- ^ function to test -> String -- ^ name of test case -> (a, c) -- ^ (input, expected value) -> Test test fn name (input, expected) = testCase name $ assertBool msg (actual' == expected') where msg = nl ++ dashes "input" ++ nl ++ input' ++ nl ++ dashes "expected" ++ nl ++ expected'' ++ dashes "got" ++ nl ++ actual'' ++ dashes "" nl = "\n" input' = toString input actual' = toString $ fn input expected' = toString expected diff = getDiff (lines expected') (lines actual') expected'' = unlines $ map vividize $ filter (\(d,_) -> d /= S) diff actual'' = unlines $ map vividize $ filter (\(d,_) -> d /= F) diff dashes "" = replicate 72 '-' dashes x = replicate (72 - length x - 5) '-' ++ " " ++ x ++ " ---"
951
test :: (ToString a, ToString b, ToString c) => (a -> b) -- ^ function to test -> String -- ^ name of test case -> (a, c) -- ^ (input, expected value) -> Test test fn name (input, expected) = testCase name $ assertBool msg (actual' == expected') where msg = nl ++ dashes "input" ++ nl ++ input' ++ nl ++ dashes "expected" ++ nl ++ expected'' ++ dashes "got" ++ nl ++ actual'' ++ dashes "" nl = "\n" input' = toString input actual' = toString $ fn input expected' = toString expected diff = getDiff (lines expected') (lines actual') expected'' = unlines $ map vividize $ filter (\(d,_) -> d /= S) diff actual'' = unlines $ map vividize $ filter (\(d,_) -> d /= F) diff dashes "" = replicate 72 '-' dashes x = replicate (72 - length x - 5) '-' ++ " " ++ x ++ " ---"
951
test fn name (input, expected) = testCase name $ assertBool msg (actual' == expected') where msg = nl ++ dashes "input" ++ nl ++ input' ++ nl ++ dashes "expected" ++ nl ++ expected'' ++ dashes "got" ++ nl ++ actual'' ++ dashes "" nl = "\n" input' = toString input actual' = toString $ fn input expected' = toString expected diff = getDiff (lines expected') (lines actual') expected'' = unlines $ map vividize $ filter (\(d,_) -> d /= S) diff actual'' = unlines $ map vividize $ filter (\(d,_) -> d /= F) diff dashes "" = replicate 72 '-' dashes x = replicate (72 - length x - 5) '-' ++ " " ++ x ++ " ---"
765
false
true
0
18
343
340
175
165
null
null
creswick/pgftransform
app/haskell/Config.hs
apache-2.0
detailsHeader :: [String] detailsHeader = [ "For example:" , "" , "TODO: add examples" ]
136
detailsHeader :: [String] detailsHeader = [ "For example:" , "" , "TODO: add examples" ]
136
detailsHeader = [ "For example:" , "" , "TODO: add examples" ]
110
false
true
0
5
62
23
14
9
null
null
phischu/fragnix
tests/packages/scotty/System.FilePath.Windows.hs
bsd-3-clause
hasLeadingPathSeparator :: FilePath -> Bool hasLeadingPathSeparator "" = False
78
hasLeadingPathSeparator :: FilePath -> Bool hasLeadingPathSeparator "" = False
78
hasLeadingPathSeparator "" = False
34
false
true
0
5
8
18
9
9
null
null
pbl64k/HackerRank-Contests
2014-10-10-FP/Infer/i.nlp.hs
bsd-2-clause
q n m t = (t, n, m)
19
q n m t = (t, n, m)
19
q n m t = (t, n, m)
19
false
false
0
5
7
23
12
11
null
null
Kromgart/tesML
src/Main.hs
agpl-3.0
dataFolderPath :: FilePath dataFolderPath = "Data/"
51
dataFolderPath :: FilePath dataFolderPath = "Data/"
51
dataFolderPath = "Data/"
24
false
true
0
4
5
11
6
5
null
null
piccolo-lang/piccolo
src/Backend/CBackend.hs
gpl-3.0
emitInstr (Case lbl) = do decrIndent emitLn $ "case " ++ show lbl ++ ":" incrIndent
97
emitInstr (Case lbl) = do decrIndent emitLn $ "case " ++ show lbl ++ ":" incrIndent
97
emitInstr (Case lbl) = do decrIndent emitLn $ "case " ++ show lbl ++ ":" incrIndent
97
false
false
0
9
29
39
17
22
null
null
aretoky/egison2
hs-src/Language/Egison/Types.hs
mit
runIOThrows :: IOThrowsError String -> IO (Maybe String) runIOThrows action = do runState <- runErrorT action case runState of Left err -> return $ Just (show err) Right _ -> return $ Nothing -- -- Expression --
236
runIOThrows :: IOThrowsError String -> IO (Maybe String) runIOThrows action = do runState <- runErrorT action case runState of Left err -> return $ Just (show err) Right _ -> return $ Nothing -- -- Expression --
236
runIOThrows action = do runState <- runErrorT action case runState of Left err -> return $ Just (show err) Right _ -> return $ Nothing -- -- Expression --
179
false
true
0
14
61
91
41
50
null
null