code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Monitoring.Services.ServiceLevelObjectives.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Update the given ServiceLevelObjective.
--
-- /See:/ <https://cloud.google.com/monitoring/api/ Cloud Monitoring API Reference> for @monitoring.services.serviceLevelObjectives.patch@.
module Network.Google.Resource.Monitoring.Services.ServiceLevelObjectives.Patch
(
-- * REST Resource
ServicesServiceLevelObjectivesPatchResource
-- * Creating a Request
, servicesServiceLevelObjectivesPatch
, ServicesServiceLevelObjectivesPatch
-- * Request Lenses
, sslopXgafv
, sslopUploadProtocol
, sslopUpdateMask
, sslopAccessToken
, sslopUploadType
, sslopPayload
, sslopName
, sslopCallback
) where
import Network.Google.Monitoring.Types
import Network.Google.Prelude
-- | A resource alias for @monitoring.services.serviceLevelObjectives.patch@ method which the
-- 'ServicesServiceLevelObjectivesPatch' request conforms to.
type ServicesServiceLevelObjectivesPatchResource =
"v3" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "updateMask" GFieldMask :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] ServiceLevelObjective :>
Patch '[JSON] ServiceLevelObjective
-- | Update the given ServiceLevelObjective.
--
-- /See:/ 'servicesServiceLevelObjectivesPatch' smart constructor.
data ServicesServiceLevelObjectivesPatch =
ServicesServiceLevelObjectivesPatch'
{ _sslopXgafv :: !(Maybe Xgafv)
, _sslopUploadProtocol :: !(Maybe Text)
, _sslopUpdateMask :: !(Maybe GFieldMask)
, _sslopAccessToken :: !(Maybe Text)
, _sslopUploadType :: !(Maybe Text)
, _sslopPayload :: !ServiceLevelObjective
, _sslopName :: !Text
, _sslopCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ServicesServiceLevelObjectivesPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sslopXgafv'
--
-- * 'sslopUploadProtocol'
--
-- * 'sslopUpdateMask'
--
-- * 'sslopAccessToken'
--
-- * 'sslopUploadType'
--
-- * 'sslopPayload'
--
-- * 'sslopName'
--
-- * 'sslopCallback'
servicesServiceLevelObjectivesPatch
:: ServiceLevelObjective -- ^ 'sslopPayload'
-> Text -- ^ 'sslopName'
-> ServicesServiceLevelObjectivesPatch
servicesServiceLevelObjectivesPatch pSslopPayload_ pSslopName_ =
ServicesServiceLevelObjectivesPatch'
{ _sslopXgafv = Nothing
, _sslopUploadProtocol = Nothing
, _sslopUpdateMask = Nothing
, _sslopAccessToken = Nothing
, _sslopUploadType = Nothing
, _sslopPayload = pSslopPayload_
, _sslopName = pSslopName_
, _sslopCallback = Nothing
}
-- | V1 error format.
sslopXgafv :: Lens' ServicesServiceLevelObjectivesPatch (Maybe Xgafv)
sslopXgafv
= lens _sslopXgafv (\ s a -> s{_sslopXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
sslopUploadProtocol :: Lens' ServicesServiceLevelObjectivesPatch (Maybe Text)
sslopUploadProtocol
= lens _sslopUploadProtocol
(\ s a -> s{_sslopUploadProtocol = a})
-- | A set of field paths defining which fields to use for the update.
sslopUpdateMask :: Lens' ServicesServiceLevelObjectivesPatch (Maybe GFieldMask)
sslopUpdateMask
= lens _sslopUpdateMask
(\ s a -> s{_sslopUpdateMask = a})
-- | OAuth access token.
sslopAccessToken :: Lens' ServicesServiceLevelObjectivesPatch (Maybe Text)
sslopAccessToken
= lens _sslopAccessToken
(\ s a -> s{_sslopAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
sslopUploadType :: Lens' ServicesServiceLevelObjectivesPatch (Maybe Text)
sslopUploadType
= lens _sslopUploadType
(\ s a -> s{_sslopUploadType = a})
-- | Multipart request metadata.
sslopPayload :: Lens' ServicesServiceLevelObjectivesPatch ServiceLevelObjective
sslopPayload
= lens _sslopPayload (\ s a -> s{_sslopPayload = a})
-- | Resource name for this ServiceLevelObjective. The format is:
-- projects\/[PROJECT_ID_OR_NUMBER]\/services\/[SERVICE_ID]\/serviceLevelObjectives\/[SLO_NAME]
sslopName :: Lens' ServicesServiceLevelObjectivesPatch Text
sslopName
= lens _sslopName (\ s a -> s{_sslopName = a})
-- | JSONP
sslopCallback :: Lens' ServicesServiceLevelObjectivesPatch (Maybe Text)
sslopCallback
= lens _sslopCallback
(\ s a -> s{_sslopCallback = a})
instance GoogleRequest
ServicesServiceLevelObjectivesPatch
where
type Rs ServicesServiceLevelObjectivesPatch =
ServiceLevelObjective
type Scopes ServicesServiceLevelObjectivesPatch =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/monitoring"]
requestClient
ServicesServiceLevelObjectivesPatch'{..}
= go _sslopName _sslopXgafv _sslopUploadProtocol
_sslopUpdateMask
_sslopAccessToken
_sslopUploadType
_sslopCallback
(Just AltJSON)
_sslopPayload
monitoringService
where go
= buildClient
(Proxy ::
Proxy ServicesServiceLevelObjectivesPatchResource)
mempty
| brendanhay/gogol | gogol-monitoring/gen/Network/Google/Resource/Monitoring/Services/ServiceLevelObjectives/Patch.hs | mpl-2.0 | 6,278 | 0 | 17 | 1,353 | 859 | 500 | 359 | 131 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.ImportExport.ListJobs
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- This operation returns the jobs associated with the requester. AWS
-- Import\/Export lists the jobs in reverse chronological order based on
-- the date of creation. For example if Job Test1 was created 2009Dec30 and
-- Test2 was created 2010Feb05, the ListJobs operation would return Test2
-- followed by Test1.
--
-- /See:/ <http://docs.aws.amazon.com/AWSImportExport/latest/DG/WebListJobs.html AWS API Reference> for ListJobs.
--
-- This operation returns paginated results.
module Network.AWS.ImportExport.ListJobs
(
-- * Creating a Request
listJobs
, ListJobs
-- * Request Lenses
, ljAPIVersion
, ljMarker
, ljMaxJobs
-- * Destructuring the Response
, listJobsResponse
, ListJobsResponse
-- * Response Lenses
, ljrsJobs
, ljrsIsTruncated
, ljrsResponseStatus
) where
import Network.AWS.ImportExport.Types
import Network.AWS.ImportExport.Types.Product
import Network.AWS.Pager
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | Input structure for the ListJobs operation.
--
-- /See:/ 'listJobs' smart constructor.
data ListJobs = ListJobs'
{ _ljAPIVersion :: !(Maybe Text)
, _ljMarker :: !(Maybe Text)
, _ljMaxJobs :: !(Maybe Int)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListJobs' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ljAPIVersion'
--
-- * 'ljMarker'
--
-- * 'ljMaxJobs'
listJobs
:: ListJobs
listJobs =
ListJobs'
{ _ljAPIVersion = Nothing
, _ljMarker = Nothing
, _ljMaxJobs = Nothing
}
-- | Undocumented member.
ljAPIVersion :: Lens' ListJobs (Maybe Text)
ljAPIVersion = lens _ljAPIVersion (\ s a -> s{_ljAPIVersion = a});
-- | Undocumented member.
ljMarker :: Lens' ListJobs (Maybe Text)
ljMarker = lens _ljMarker (\ s a -> s{_ljMarker = a});
-- | Undocumented member.
ljMaxJobs :: Lens' ListJobs (Maybe Int)
ljMaxJobs = lens _ljMaxJobs (\ s a -> s{_ljMaxJobs = a});
instance AWSPager ListJobs where
page rq rs
| stop (rs ^. ljrsIsTruncated) = Nothing
| isNothing (rs ^? ljrsJobs . _last . jobJobId) =
Nothing
| otherwise =
Just $ rq &
ljMarker .~ rs ^? ljrsJobs . _last . jobJobId
instance AWSRequest ListJobs where
type Rs ListJobs = ListJobsResponse
request = postQuery importExport
response
= receiveXMLWrapper "ListJobsResult"
(\ s h x ->
ListJobsResponse' <$>
(x .@? "Jobs" .!@ mempty >>=
may (parseXMLList "member"))
<*> (x .@? "IsTruncated")
<*> (pure (fromEnum s)))
instance ToHeaders ListJobs where
toHeaders = const mempty
instance ToPath ListJobs where
toPath = const "/"
instance ToQuery ListJobs where
toQuery ListJobs'{..}
= mconcat
["Operation=ListJobs",
"Action" =: ("ListJobs" :: ByteString),
"Version" =: ("2010-06-01" :: ByteString),
"APIVersion" =: _ljAPIVersion, "Marker" =: _ljMarker,
"MaxJobs" =: _ljMaxJobs]
-- | Output structure for the ListJobs operation.
--
-- /See:/ 'listJobsResponse' smart constructor.
data ListJobsResponse = ListJobsResponse'
{ _ljrsJobs :: !(Maybe [Job])
, _ljrsIsTruncated :: !(Maybe Bool)
, _ljrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListJobsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ljrsJobs'
--
-- * 'ljrsIsTruncated'
--
-- * 'ljrsResponseStatus'
listJobsResponse
:: Int -- ^ 'ljrsResponseStatus'
-> ListJobsResponse
listJobsResponse pResponseStatus_ =
ListJobsResponse'
{ _ljrsJobs = Nothing
, _ljrsIsTruncated = Nothing
, _ljrsResponseStatus = pResponseStatus_
}
-- | Undocumented member.
ljrsJobs :: Lens' ListJobsResponse [Job]
ljrsJobs = lens _ljrsJobs (\ s a -> s{_ljrsJobs = a}) . _Default . _Coerce;
-- | Undocumented member.
ljrsIsTruncated :: Lens' ListJobsResponse (Maybe Bool)
ljrsIsTruncated = lens _ljrsIsTruncated (\ s a -> s{_ljrsIsTruncated = a});
-- | The response status code.
ljrsResponseStatus :: Lens' ListJobsResponse Int
ljrsResponseStatus = lens _ljrsResponseStatus (\ s a -> s{_ljrsResponseStatus = a});
| olorin/amazonka | amazonka-importexport/gen/Network/AWS/ImportExport/ListJobs.hs | mpl-2.0 | 5,270 | 0 | 16 | 1,269 | 926 | 541 | 385 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidEnterprise.ServiceAccountkeys.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Generates new credentials for the service account associated with this
-- enterprise. The calling service account must have been retrieved by
-- calling Enterprises.GetServiceAccount and must have been set as the
-- enterprise service account by calling Enterprises.SetAccount. Only the
-- type of the key should be populated in the resource to be inserted.
--
-- /See:/ <https://developers.google.com/android/work/play/emm-api Google Play EMM API Reference> for @androidenterprise.serviceaccountkeys.insert@.
module Network.Google.Resource.AndroidEnterprise.ServiceAccountkeys.Insert
(
-- * REST Resource
ServiceAccountkeysInsertResource
-- * Creating a Request
, serviceAccountkeysInsert
, ServiceAccountkeysInsert
-- * Request Lenses
, saiXgafv
, saiUploadProtocol
, saiEnterpriseId
, saiAccessToken
, saiUploadType
, saiPayload
, saiCallback
) where
import Network.Google.AndroidEnterprise.Types
import Network.Google.Prelude
-- | A resource alias for @androidenterprise.serviceaccountkeys.insert@ method which the
-- 'ServiceAccountkeysInsert' request conforms to.
type ServiceAccountkeysInsertResource =
"androidenterprise" :>
"v1" :>
"enterprises" :>
Capture "enterpriseId" Text :>
"serviceAccountKeys" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] ServiceAccountKey :>
Post '[JSON] ServiceAccountKey
-- | Generates new credentials for the service account associated with this
-- enterprise. The calling service account must have been retrieved by
-- calling Enterprises.GetServiceAccount and must have been set as the
-- enterprise service account by calling Enterprises.SetAccount. Only the
-- type of the key should be populated in the resource to be inserted.
--
-- /See:/ 'serviceAccountkeysInsert' smart constructor.
data ServiceAccountkeysInsert =
ServiceAccountkeysInsert'
{ _saiXgafv :: !(Maybe Xgafv)
, _saiUploadProtocol :: !(Maybe Text)
, _saiEnterpriseId :: !Text
, _saiAccessToken :: !(Maybe Text)
, _saiUploadType :: !(Maybe Text)
, _saiPayload :: !ServiceAccountKey
, _saiCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ServiceAccountkeysInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'saiXgafv'
--
-- * 'saiUploadProtocol'
--
-- * 'saiEnterpriseId'
--
-- * 'saiAccessToken'
--
-- * 'saiUploadType'
--
-- * 'saiPayload'
--
-- * 'saiCallback'
serviceAccountkeysInsert
:: Text -- ^ 'saiEnterpriseId'
-> ServiceAccountKey -- ^ 'saiPayload'
-> ServiceAccountkeysInsert
serviceAccountkeysInsert pSaiEnterpriseId_ pSaiPayload_ =
ServiceAccountkeysInsert'
{ _saiXgafv = Nothing
, _saiUploadProtocol = Nothing
, _saiEnterpriseId = pSaiEnterpriseId_
, _saiAccessToken = Nothing
, _saiUploadType = Nothing
, _saiPayload = pSaiPayload_
, _saiCallback = Nothing
}
-- | V1 error format.
saiXgafv :: Lens' ServiceAccountkeysInsert (Maybe Xgafv)
saiXgafv = lens _saiXgafv (\ s a -> s{_saiXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
saiUploadProtocol :: Lens' ServiceAccountkeysInsert (Maybe Text)
saiUploadProtocol
= lens _saiUploadProtocol
(\ s a -> s{_saiUploadProtocol = a})
-- | The ID of the enterprise.
saiEnterpriseId :: Lens' ServiceAccountkeysInsert Text
saiEnterpriseId
= lens _saiEnterpriseId
(\ s a -> s{_saiEnterpriseId = a})
-- | OAuth access token.
saiAccessToken :: Lens' ServiceAccountkeysInsert (Maybe Text)
saiAccessToken
= lens _saiAccessToken
(\ s a -> s{_saiAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
saiUploadType :: Lens' ServiceAccountkeysInsert (Maybe Text)
saiUploadType
= lens _saiUploadType
(\ s a -> s{_saiUploadType = a})
-- | Multipart request metadata.
saiPayload :: Lens' ServiceAccountkeysInsert ServiceAccountKey
saiPayload
= lens _saiPayload (\ s a -> s{_saiPayload = a})
-- | JSONP
saiCallback :: Lens' ServiceAccountkeysInsert (Maybe Text)
saiCallback
= lens _saiCallback (\ s a -> s{_saiCallback = a})
instance GoogleRequest ServiceAccountkeysInsert where
type Rs ServiceAccountkeysInsert = ServiceAccountKey
type Scopes ServiceAccountkeysInsert =
'["https://www.googleapis.com/auth/androidenterprise"]
requestClient ServiceAccountkeysInsert'{..}
= go _saiEnterpriseId _saiXgafv _saiUploadProtocol
_saiAccessToken
_saiUploadType
_saiCallback
(Just AltJSON)
_saiPayload
androidEnterpriseService
where go
= buildClient
(Proxy :: Proxy ServiceAccountkeysInsertResource)
mempty
| brendanhay/gogol | gogol-android-enterprise/gen/Network/Google/Resource/AndroidEnterprise/ServiceAccountkeys/Insert.hs | mpl-2.0 | 6,018 | 0 | 19 | 1,338 | 794 | 465 | 329 | 117 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Directory.ResolvedAppAccessSettings.GetSettings
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves resolved app access settings of the logged in user.
--
-- /See:/ <https://developers.google.com/admin-sdk/directory/ Admin Directory API Reference> for @directory.resolvedAppAccessSettings.GetSettings@.
module Network.Google.Resource.Directory.ResolvedAppAccessSettings.GetSettings
(
-- * REST Resource
ResolvedAppAccessSettingsGetSettingsResource
-- * Creating a Request
, resolvedAppAccessSettingsGetSettings
, ResolvedAppAccessSettingsGetSettings
) where
import Network.Google.Directory.Types
import Network.Google.Prelude
-- | A resource alias for @directory.resolvedAppAccessSettings.GetSettings@ method which the
-- 'ResolvedAppAccessSettingsGetSettings' request conforms to.
type ResolvedAppAccessSettingsGetSettingsResource =
"admin" :>
"directory" :>
"v1" :>
"resolvedappaccesssettings" :>
QueryParam "alt" AltJSON :>
Get '[JSON] AppAccessCollections
-- | Retrieves resolved app access settings of the logged in user.
--
-- /See:/ 'resolvedAppAccessSettingsGetSettings' smart constructor.
data ResolvedAppAccessSettingsGetSettings =
ResolvedAppAccessSettingsGetSettings'
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ResolvedAppAccessSettingsGetSettings' with the minimum fields required to make a request.
--
resolvedAppAccessSettingsGetSettings
:: ResolvedAppAccessSettingsGetSettings
resolvedAppAccessSettingsGetSettings = ResolvedAppAccessSettingsGetSettings'
instance GoogleRequest
ResolvedAppAccessSettingsGetSettings
where
type Rs ResolvedAppAccessSettingsGetSettings =
AppAccessCollections
type Scopes ResolvedAppAccessSettingsGetSettings =
'[]
requestClient ResolvedAppAccessSettingsGetSettings'{}
= go (Just AltJSON) directoryService
where go
= buildClient
(Proxy ::
Proxy ResolvedAppAccessSettingsGetSettingsResource)
mempty
| brendanhay/gogol | gogol-admin-directory/gen/Network/Google/Resource/Directory/ResolvedAppAccessSettings/GetSettings.hs | mpl-2.0 | 2,879 | 0 | 12 | 603 | 221 | 137 | 84 | 45 | 1 |
module BrickStacking.A333650Spec (main, spec) where
import Test.Hspec
import BrickStacking.A333650 (a333650)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A333650" $
it "correctly computes the six rows elements" $
map a333650 [1..21] `shouldBe` expectedValue where
expectedValue = [1,1,2,1,4,4,1,7,11,8,1,12,24,28,16,1,20,52,70,68,32]
| peterokagey/haskellOEIS | test/BrickStacking/A333650Spec.hs | apache-2.0 | 365 | 0 | 8 | 57 | 157 | 94 | 63 | 10 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- | Count running jobs on Jenkins instance
--
-- Uses an awful hack, that is inspecting the job ball color. Jenkins sets
-- it to "blue_anime", meaning "animated blue ball" if job is running
module Main (main) where
import Control.Lens -- lens
import Data.Aeson.Lens -- lens-aeson
import Data.ByteString.Lazy (ByteString) -- bytestring
import Env -- envparse
import Jenkins.Rest (Jenkins, (-?-), (-=-))
import qualified Jenkins.Rest as Jenkins -- libjenkins
import Text.Printf (printf) -- base
main :: IO ()
main = do
conf <- envConf
jobs <- Jenkins.run conf getJobs
printf "Running jobs count: %d\n" (lengthOf (_Right.running) jobs)
envConf :: IO Jenkins.Master
envConf = Env.parse (desc "Get running jobs count") $
Jenkins.Master <$> var str "JENKINS_URL" (help "Jenkins URL")
<*> var str "JENKINS_USERNAME" (help "Jenkins username")
<*> var str "JENKINS_API_TOKEN" (help "Jenkins API token")
getJobs :: Jenkins ByteString
getJobs = Jenkins.get Jenkins.json ("" -?- "tree" -=- "jobs[color]")
running :: Fold ByteString ()
running = key "jobs".values.key "color"._String.only "blue_anime"
| supki/libjenkins | example/running-jobs-count.hs | bsd-2-clause | 1,333 | 0 | 11 | 367 | 288 | 159 | 129 | 23 | 1 |
module Codec.Base64 (encode, decode) where
import Data.Array.Unboxed
import Data.Bits
import Data.Char
import Data.Word
-- |
-- Base64 encoding.
--
-- >>> encode "foo bar"
-- "Zm9vIGJhcg=="
encode :: String -> String
encode = map (base64array !) . encode' . map (fromIntegral . ord)
encode' :: [Word8] -> [Word8]
encode' [] = []
encode' [a] = e1 a : e2 a 0 : pad : pad : []
encode' [a,b] = e1 a : e2 a b : e3 b 0 : pad : []
encode' (a:b:c:xs) = e1 a : e2 a b : e3 b c : e4 c : encode' xs
e1,e4 :: Word8 -> Word8
e2,e3 :: Word8 -> Word8 -> Word8
e1 a = shiftR a 2
e2 a b = shiftL (a .&. 0x03) 4 .|. shiftR b 4
e3 b c = shiftL (b .&. 0x0f) 2 .|. shiftR c 6
e4 c = c .&. 0x3f
base64 :: String
base64 = ['A'..'Z']++['a'..'z']++['0'..'9']++"+/="
base64array :: UArray Word8 Char
base64array = array (0,pad) $ zip [0..pad] base64
pad :: Word8
pad = 64
-- |
-- Base64 decoding.
--
-- >>> decode "Zm9vIGJhcg=="
-- "foo bar"
decode :: String -> String
decode = map (chr . fromIntegral)
. decode'
. map ((base128array !) . fromIntegral . ord)
decode' :: [Word8] -> [Word8]
decode' [] = []
decode' (a:b:c:d:xs)
| c == pad = d1 a b : []
| d == pad = d1 a b : d2 b c : []
| otherwise = d1 a b : d2 b c : d3 c d : decode' xs
decode' _ = error "decode'"
d1,d2,d3 :: Word8 -> Word8 -> Word8
d1 a b = shiftL a 2 .|. shiftR b 4
d2 b c = shiftL (b .&. 0x0f) 4 .|. shiftR c 2
d3 c d = shiftL (c .&. 0x03) 6 .|. d
base128array :: UArray Word8 Word8
base128array = array (0,255) $ zip [0..255] base128
ng :: Word8
ng = 0
base128 :: [Word8]
base128 =
[ ng, ng, ng, ng, ng, ng, ng, ng, ng, ng, ng, ng, ng, ng, ng, ng
, ng, ng, ng, ng, ng, ng, ng, ng, ng, ng, ng, ng, ng, ng, ng, ng
, ng, ng, ng, ng, ng, ng, ng, ng, ng, ng, ng, 62, ng, ng, ng, 63
, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, ng, ng, ng, 64, ng, ng
, ng, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14
, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, ng, ng, ng, ng, ng
, ng, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40
, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, ng, ng, ng, ng, ng
]
| tonosaman/haskell-code-snippets | src/Codec/Base64.hs | bsd-2-clause | 2,173 | 0 | 10 | 605 | 1,200 | 682 | 518 | 53 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- | Most of this code depends on OverloadedStrings.
--
-- This is a utility package for Scotty web framework which provides signed cookie functionality
module Web.Scotty.SignedCookies ( setSignedCookie
, getSignedCookie
, deleteCookie ) where
import Control.Monad.IO.Class
import Data.Binary.Builder (Builder, toLazyByteString)
import Data.Digest.Pure.SHA
import Data.Monoid ((<>))
import Data.Text (Text, pack)
import Data.Text.Encoding as E
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.Encoding as LTE
import qualified Data.ByteString.Lazy as LBS
import qualified Data.ByteString.Char8 as S
import Blaze.ByteString.Builder (toLazyByteString)
import Web.Cookie
import Web.Scotty
import Web.Scotty.Internal.Types (ActionError (Next))
import Data.Attoparsec.Text
import Control.Applicative
import Data.Time.Clock.POSIX (posixSecondsToUTCTime)
import Web.Scotty.SignedCookies.SignedCookiesInternal
setSignedCookie :: Text -- ^ secret
-> SetCookie -- ^ cookie
-> ActionM ()
setSignedCookie secret cookie = do
let cv = LBS.fromStrict $ setCookieName cookie <> setCookieValue cookie
bs = (LTE.encodeUtf8 . LT.fromStrict) secret
hash = S.pack $ generateHash bs cv
newCookie = def { setCookieName = setCookieName cookie
, setCookieValue = setCookieValue cookie <> "|" <> hash
, setCookiePath = setCookiePath cookie
, setCookieExpires = setCookieExpires cookie
, setCookieMaxAge = setCookieMaxAge cookie
, setCookieDomain = setCookieDomain cookie
, setCookieHttpOnly = setCookieHttpOnly cookie
, setCookieSecure = setCookieSecure cookie
, setCookieSameSite = setCookieSameSite cookie }
addHeader "Set-Cookie" $ (LTE.decodeUtf8 . toLazyByteString . renderSetCookie) newCookie
setCookieToText = LTE.decodeUtf8 . toLazyByteString . renderSetCookie
-- | geta cookie value if it exists, return Nohting if key doesn't exist or hash value doesn't match
-- > getSignedCookie "secret" "userid"
getSignedCookie :: Text -- ^ secret key to verify hashed values
-> Text -- ^ key to retrieve
-> ActionM (Maybe Text)
getSignedCookie secret key = do
-- get headers as Maybe Text
h <- header "Cookie"
-- parse Text of maybe with attoparsec
let maybeCookies = fmap (parseOnly getCookies . LT.toStrict) h
case maybeCookies of
Just a -> case a of
Right cookies -> if null filteredCookies
then return Nothing
else return response
where filteredCookies = filter (\(c, _) -> E.decodeUtf8 (setCookieName c) == key) cookies
filteredAndVerified = filter (validateCookie secret) filteredCookies
response = if null filteredAndVerified
then Nothing
else Just $ (E.decodeUtf8 . setCookieValue) $ fst (head filteredAndVerified)
_ -> return Nothing
_ -> return Nothing
deleteCookie :: Text -- ^ key to remove
-> ActionM ()
deleteCookie key = do
let cookie = def { setCookieName = E.encodeUtf8 key
, setCookieValue = ""
, setCookieExpires = Just (posixSecondsToUTCTime 1) }
addHeader "Set-Cookie" $ setCookieToText cookie
| kgwinnup/signed-cookies | Web/Scotty/SignedCookies.hs | bsd-2-clause | 3,682 | 0 | 21 | 1,131 | 730 | 407 | 323 | 65 | 5 |
-- | Support for running propellor, as built outside a container,
-- inside the container, without needing to install anything into the
-- container.
--
-- Note: This is currently Debian specific, due to glibcLibs.
module Propellor.Shim (setup, cleanEnv, file) where
import Propellor
import Utility.LinuxMkLibs
import Utility.SafeCommand
import Utility.FileMode
import Utility.FileSystemEncoding
import Data.List
import System.Posix.Files
-- | Sets up a shimmed version of the program, in a directory, and
-- returns its path.
--
-- Propellor may be running from an existing shim, in which case it's
-- simply reused.
setup :: FilePath -> Maybe FilePath -> FilePath -> IO FilePath
setup propellorbin propellorbinpath dest = checkAlreadyShimmed propellorbin $ do
createDirectoryIfMissing True dest
libs <- parseLdd <$> readProcess "ldd" [propellorbin]
glibclibs <- glibcLibs
let libs' = nub $ libs ++ glibclibs
libdirs <- map (dest ++) . nub . catMaybes
<$> mapM (installLib installFile dest) libs'
let linker = (dest ++) $
fromMaybe (error "cannot find ld-linux linker") $
headMaybe $ filter ("ld-linux" `isInfixOf`) libs'
let linkersym = takeDirectory linker </> takeFileName propellorbin
createSymbolicLink linkersym (takeFileName linker)
let gconvdir = (dest ++) $ takeDirectory $
fromMaybe (error "cannot find gconv directory") $
headMaybe $ filter ("/gconv/" `isInfixOf`) glibclibs
let linkerparams = ["--library-path", intercalate ":" libdirs ]
let shim = file propellorbin dest
writeFile shim $ unlines
[ shebang
, "GCONV_PATH=" ++ shellEscape gconvdir
, "export GCONV_PATH"
, "exec " ++ unwords (map shellEscape $ linkersym : linkerparams) ++
" " ++ shellEscape (fromMaybe propellorbin propellorbinpath) ++ " \"$@\""
]
modifyFileMode shim (addModes executeModes)
return shim
shebang :: String
shebang = "#!/bin/sh"
checkAlreadyShimmed :: FilePath -> IO FilePath -> IO FilePath
checkAlreadyShimmed f nope = withFile f ReadMode $ \h -> do
fileEncoding h
s <- hGetLine h
if s == shebang
then return f
else nope
-- Called when the shimmed propellor is running, so that commands it runs
-- don't see it.
cleanEnv :: IO ()
cleanEnv = void $ unsetEnv "GCONV_PATH"
file :: FilePath -> FilePath -> FilePath
file propellorbin dest = dest </> takeFileName propellorbin
installFile :: FilePath -> FilePath -> IO ()
installFile top f = do
createDirectoryIfMissing True destdir
nukeFile dest
createLink f dest `catchIO` (const copy)
where
copy = void $ boolSystem "cp" [Param "-a", Param f, Param dest]
destdir = inTop top $ takeDirectory f
dest = inTop top f
| avengerpenguin/propellor | src/Propellor/Shim.hs | bsd-2-clause | 2,623 | 21 | 19 | 456 | 733 | 368 | 365 | 55 | 2 |
{-# LANGUAGE OverloadedStrings #-}
-- Rendering a bread recipe.
module Rendering where
import qualified Data.Ratio as R
import Formatting
import BreadData
type RecipeFieldLengths = (Int, Int, Int)
-- Render ingredient record into a line
render :: Int -> RecipeFieldLengths -> IngredientRecord -> String
render sigFigs (x, y, z) record =
formatToString
((right x ' ' %. string) % " " %
(left y ' ' %. string) % " " %
(right z ' ' %. string))
(ingredientName record)
(renderAmount sigFigs $ amount record)
(unit record)
-- Render section into a sequence of lines
renderSection :: Int -> RecipeFieldLengths -> Section -> [String]
renderSection sigFigs lengths (Section {sectionName = name, ingredients = xs}) =
[dashes lengths, name, dashes lengths] ++
map (render sigFigs lengths) xs
-- Render recipe into a sequence of lines
renderRecipe :: Int -> Recipe -> [String]
renderRecipe sigFigs sections =
concatMap (renderSection sigFigs fieldLengths) sections ++
[dashes fieldLengths]
where fieldLengths = recipeFieldLengths sigFigs sections
-- Render numerical quantities to strings sensibly: one wants to render as few
-- significant digits as possible while keeping n significant digits of
-- accuracy. Rendering as few significant digits as possible is provided by the
-- Formatting library.
--
-- For three significant digits (renderAmount 3 x):
-- 0.0 => "0"
-- 23.4601 => "23.5"
-- 1500 => "1500"
-- 0.04659 => "0.0466"
renderAmount :: Int -> Float -> String
renderAmount _ 0.0 = "0"
renderAmount sigFigs x = formatToString shortest $ roundSigFig sigFigs x
-- Round to requested number of significant digits. If the number of
-- significant digits is greater than or equal to the place of the most
-- significant digit, move the decimal to the right the correct number of
-- places; round to an integer; then move it left. In the other case, move the
-- decimal to the left; round off; then move it right. Use the Rational type so
-- these rounded results come out exactly as rounded.
roundSigFig :: Int -> Float -> Rational
roundSigFig sigFigs x
| k >= 0 = (round $ x * (10 ^ k)) R.% (10 ^ k)
| k < 0 = (round $ x / (10.0 ^ (-k))) * (10 ^ (-k)) R.% 1
where
k = sigFigs - placeOfLeadingDigit
placeOfLeadingDigit = (floor $ logBase 10 $ abs x) + 1
ingredientFieldLengths :: Int -> IngredientRecord -> RecipeFieldLengths
ingredientFieldLengths sigFigs IngredientRecord
{ingredientName = i, amount = a, unit = u} =
(length i, length $ renderAmount sigFigs a, length u)
maxFieldLengths :: [RecipeFieldLengths] -> RecipeFieldLengths
maxFieldLengths xs = (maximum a, maximum b, maximum c)
where
(a, b, c) = unzip3 xs
-- Field lengths implied by a section are defined as follows. First, size each
-- field so it's big enough to fit that field for all of the ingredients in the
-- section. But if the section name is too long to render that way, expand the
-- second (ingredient name) field so all of it fits.
sectionFieldLengths :: Int -> Section -> RecipeFieldLengths
sectionFieldLengths sigFigs Section {sectionName = n, ingredients = is} =
if length n <= a + b + c + 2
then (a, b, c)
else (a, length n - a - c - 2, c)
where
(a, b, c) = maxFieldLengths $ map (ingredientFieldLengths sigFigs) is
recipeFieldLengths :: Int -> Recipe -> RecipeFieldLengths
recipeFieldLengths sigFigs r = maxFieldLengths $
map (sectionFieldLengths sigFigs) r
-- To demarcate information in the rendered recipe
dashes :: RecipeFieldLengths -> String
dashes (x, y, z) = take (x + y + z + 2) $ repeat '-'
| ramanshah/bread | src/Rendering.hs | bsd-3-clause | 3,631 | 0 | 13 | 747 | 878 | 479 | 399 | 51 | 2 |
--------------------------------------------------------------------------------
-- | Read templates in Hakyll's native format
module Hakyll.Web.Template.Read
( readTemplate
) where
--------------------------------------------------------------------------------
import Data.List (isPrefixOf)
--------------------------------------------------------------------------------
import Hakyll.Web.Template.Internal
--------------------------------------------------------------------------------
-- | Construct a @Template@ from a string.
readTemplate :: String -> Template
readTemplate = Template . readTemplate'
where
readTemplate' [] = []
readTemplate' string
| "$$" `isPrefixOf` string =
Escaped : readTemplate' (drop 2 string)
| "$" `isPrefixOf` string =
case readKey (drop 1 string) of
Just (key, rest) -> Key key : readTemplate' rest
Nothing -> Chunk "$" : readTemplate' (drop 1 string)
| otherwise =
let (chunk, rest) = break (== '$') string
in Chunk chunk : readTemplate' rest
-- Parse an key into (key, rest) if it's valid, and return
-- Nothing otherwise
readKey string =
let (key, rest) = span validKeyChar string
in if not (null key) && "$" `isPrefixOf` rest
then Just (key, drop 1 rest)
else Nothing
validKeyChar x = x `notElem` ['$', '\n', '\r']
| bergmark/hakyll | src/Hakyll/Web/Template/Read.hs | bsd-3-clause | 1,490 | 0 | 14 | 389 | 325 | 173 | 152 | 23 | 4 |
module Math.Probable.Distribution
( -- * Common distributions
beta
, cauchy
, cauchyStd
, chiSquared
, fisher
, gamma
, improperGamma
, geometric
, geometric0
, student
, uniform
, normal
, standard
, normalFromSample
, exponential
, exponentialFromSample
-- * Finite distributions
, module Math.Probable.Distribution.Finite
-- * Utility functions
, continuous
, discrete
) where
import Control.Monad.Primitive
import Math.Probable.Distribution.Finite
import Math.Probable.Random
import Statistics.Distribution (ContGen, DiscreteGen, genContVar, genDiscreteVar, fromSample)
import Statistics.Distribution.Beta
import Statistics.Distribution.CauchyLorentz
import qualified Statistics.Distribution.ChiSquared as Chi
import qualified Statistics.Distribution.Exponential as E
import Statistics.Distribution.FDistribution
import Statistics.Distribution.Gamma
import qualified Statistics.Distribution.Geometric as G
import qualified Statistics.Distribution.Normal as N
import Statistics.Distribution.StudentT
import Statistics.Distribution.Uniform
import Statistics.Types (Sample)
-- | Sample from a continuous distribution from the 'statistics' package
--
-- > λ> import qualified Statistics.Distribution.Normal as Normal
-- > λ> mwc $ continuous (Normal.normalDistr 0 1)
-- > -0.7266583064693862
--
-- This is equivalent to using 'normal' from this module.
continuous :: (ContGen d, PrimMonad m)
=> d -- ^ the continuous distribution to sample from
-> RandT m Double
continuous d = RandT $ genContVar d
{-# INLINE continuous #-}
-- | Sample from a discrete distribution from the 'statistics' package
--
-- > λ> import qualified Statistics.Distribution.Normal as Normal
-- > λ> mwc $ discrete (Geo.geometric 0.6)
-- > 2
--
-- This is equivalent to using 'geometric' from this module.
discrete :: (DiscreteGen d, PrimMonad m)
=> d -- ^ the discrete distribution to sample from
-> RandT m Int
discrete d = RandT $ genDiscreteVar d
-- | Beta distribution (from @Statistics.Distribution.Beta@)
--
-- > λ> mwc $ listOf 10 (beta 81 219)
-- > [ 0.23238372272745833,0.252972980515086,0.22708315774257903
-- > , 0.25807200295967214,0.29794072226119983,0.24534701159196015
-- > , 0.24766870269839578,0.2994199351220346,0.2728157476212405,0.2593318159573564
-- > ]
beta :: PrimMonad m
=> Double -- ^ shape parameter alpha
-> Double -- ^ shape parameter beta
-> RandT m Double
beta alpha bet = continuous $ betaDistr alpha bet
-- | Cauchy distribution (from @Statistics.Distribution.Cauchy@)
--
-- > λ> mwc $ listOf 10 (cauchy 0 0.1)
-- > [ -0.3932758718373347,0.490467375093784,4.2620417667423555e-2
-- > , 3.370509874905657e-2,-8.186484692937862e-2,9.371858212168262e-2
-- > , -1.1095818809115384e-2,3.0353983716155386e-2,0.22759697862410477
-- > , -0.1881828277028582 ]
cauchy :: PrimMonad m
=> Double -- ^ central point
-> Double -- ^ scale parameter
-> RandT m Double
cauchy p l = continuous $ cauchyDistribution p l
-- | Cauchy distribution around 0, with scale 1 (from @Statistics.Distribution.Cauchy@)
--
-- > λ> mwc $ listOf 10 cauchyStd
-- > [ 9.409701589649838,-7.361963972107541,0.168746305673769
-- > , 5.091825420838711,-0.326080163135388,-1.2989850787629456
-- > , -2.685658063444485,0.22671438734899435,-1.602349559644217e-2
-- > , -0.6476292643908057 ]
cauchyStd :: PrimMonad m
=> RandT m Double
cauchyStd = cauchy 0 1
-- | Chi-squared distribution (from @Statistics.Distribution.ChiSquared@)
--
-- > λ> mwc $ listOf 10 (chiSquare 4)
-- > [ 8.068852054279787,1.861584389294606,6.3049415103095265
-- > , 1.0512164068833838,1.6243237867165086,5.284901049954076
-- > , 0.4773242487947021,1.1753876666374887,5.21554771873363
-- > , 3.477574639460651 ]
chiSquared :: PrimMonad m
=> Int -- ^ number of degrees of freedom
-> RandT m Double
chiSquared = continuous . Chi.chiSquared
-- | Fisher's F-Distribution (from @Statistics.Distribution.FDistribution@)
--
-- > λ> mwc $ listOf 10 (fisher 4 3)
-- > [ 3.437898578540642,0.844120450719367,1.9907851466347173
-- > , 2.0089975147012784,1.3729208790549117,0.9380430357924707
-- > , 2.642389323945247,1.0918121624055352,0.45650856735477335
-- > , 2.5134537326659196 ]
fisher :: PrimMonad m
=> Int
-> Int
-> RandT m Double
fisher a b = continuous $ fDistribution a b
-- | Gamma distribution (from @Statistics.Distribution.Gamma@)
--
-- > λ> mwc $ listOf 10 (gamma 3 0.1)
-- > [ 5.683745415884202e-2,0.20726188766138176,0.3150672538487696
-- > , 0.4250825346490057,0.5586516230326105,0.46897413151474315
-- > , 0.18374916962208182,9.93000480494153e-2,0.6057279704154832
-- > , 0.11070190282993911 ]
gamma :: PrimMonad m
=> Double -- ^ shape parameter k
-> Double -- ^ scale parameter theta
-> RandT m Double
gamma k theta = continuous $ gammaDistr k theta
-- | Gamma distribution, without checking whether the parameter are valid
-- (from @Statistics.Distribution.Gamma@)
--
-- > λ> mwc $ listOf 10 (improperGamma 3 0.1)
-- > [ 0.30431838005485,0.4044380297376584,2.8950141419406657e-2
-- > , 0.468271612850147,0.18587792578128381,0.22735854572527045
-- > , 0.5168050216325927,5.896911236207261e-2,0.24654560998405564
-- > , 0.10557650513145429 ]
improperGamma :: PrimMonad m
=> Double -- ^ shape parameter k
-> Double -- ^ scale parameter theta
-> RandT m Double
improperGamma k theta = continuous $ improperGammaDistr k theta
-- | Geometric distribution.
--
-- Distribution of the number of trials needed to get one success.
-- See @Statistics.Distribution.Geometric@
--
-- > λ> mwc $ listOf 10 (geometric 0.8)
-- > [2,1,1,1,1,1,1,2,1,5]
geometric :: PrimMonad m
=> Double -- ^ success rate
-> RandT m Int
geometric = discrete . G.geometric
-- | Geometric distribution.
--
-- Distribution of the number of failures before getting one success.
-- See @Statistics.Distribution.Geometric@
--
-- > λ> mwc $ listOf 10 (geometric0 0.8)
-- > [0,0,0,0,0,1,1,0,0,0]
geometric0 :: PrimMonad m
=> Double
-> RandT m Int
geometric0 = discrete . G.geometric0
-- | Student-T distribution (from @Statistics.Distribution.StudentT@)
--
-- > λ> mwc $ listOf 10 (student 0.2)
-- > [ -14.221373473810829,-29.395749168822267,19.448665112984997
-- > , -30.00446058929083,-0.5033202547957609,2.321975597874013
-- > , 0.7884787761643617,-0.1895113832448149,-131.12901170537924
-- > , 1.371956948317759 ]
student :: PrimMonad m
=> Double
-> RandT m Double
student = continuous . studentT
-- | Uniform distribution between 'a' and 'b' (from @Statistics.Distribution.Uniform@)
--
-- > λ> mwc $ listOf 10 (uniform 0.1 0.2)
-- > [ 0.1711914559256124,0.1275212181343327,0.15347702635758945
-- > , 0.1743662387063698,0.12047749686635312,0.10719840237585587
-- > , 0.10543681342025846,0.13482973080648325,0.19779298960413577
-- > , 0.1681037592576508 ]
uniform :: PrimMonad m
=> Double
-> Double
-> RandT m Double
uniform a b = continuous $ uniformDistr a b
-- | Normal distribution (from @Statistics.Distribution.Normal@)
--
-- > λ> mwc $ listOf 10 (normal 4 1)
-- > [ 3.6815394812555144,3.5958531529526727,3.775960990625964
-- > , 4.413109650155896,4.825826384709198,4.805629590118984
-- > , 5.259267547365003,4.45410634165052,4.886537243027636
-- > , 3.0409409067356954 ]
normal :: PrimMonad m
=> Double -- ^ mean
-> Double -- ^ standard deviation
-> RandT m Double
normal mean stddev = continuous $ N.normalDistr mean stddev
-- | The standard normal distribution (mean = 0, stddev = 1) (from @Statistics.Distribution.Normal@)
--
-- > λ> mwc $ listOf 10 standard
-- > [ 0.2252627935262769,1.1831885443897947,-0.6577353418647461
-- > , 2.1574536855051853,-0.16983072710637676,0.9667954287638821
-- > , -1.8758605246293683,-0.8578048838241616,1.9516838769731923
-- > , 0.43752574431460434 ]
standard :: PrimMonad m
=> RandT m Double
standard = continuous N.standard
-- | Create a normal distribution using parameters estimated from the sample
-- (from @Statistics.Distribution.Normal@)
--
-- > λ> mwc . listOf 10 $
-- > normalFromSample $
-- > V.fromList [1,1,1,3,3,3,4
-- > ,4,4,4,4,4,4,4
-- > ,4,4,4,4,4,5,5
-- > ,5,7,7,7]
-- > [ 7.1837511677441395,2.388433817342809,5.252282321156134
-- > , 4.988163140851522,0.40102386713467864,4.4840751065620665
-- > , 2.1471370686776874,2.6591948802201046,3.843667372514598
-- > , 1.7650436484843248 ]
normalFromSample :: PrimMonad m
=> Sample -- ^ sample
-> Maybe (RandT m Double)
normalFromSample s = continuous <$> (fromSample s :: Maybe N.NormalDistribution)
-- | Exponential distribution (from @Statistics.Distribution.Exponential@)
--
-- > λ> mwc $ listOf 10 (exponential 0.2)
-- > [ 5.713524665694821,1.7774315204594584,2.434017573227628
-- > , 5.463202731505528,0.5403008025455847,14.346316301765576
-- > , 7.380393612391503,24.800854500680032,0.8731076703020924
-- > , 6.1661076502236645 ]
exponential :: PrimMonad m
=> Double -- ^ lambda (scale) parameter
-> RandT m Double
exponential = continuous . E.exponential
-- | Exponential distribution given a sample (from @Statistics.Distribution.Exponential@)
--
-- > λ> mwc $ listOf 10 (exponentialFromSample $ V.fromList [1,1,1,0])
-- > [ 0.4237050903604833,1.934301502525168,0.7435728843566659
-- > , 1.8720263209574293,0.605750265970631,0.24103955067365979
-- > , 0.6294952762436511,1.660404952631443,0.6448230847113577
-- > , 0.8891555734786789 ]
exponentialFromSample :: PrimMonad m
=> Sample
-> Maybe (RandT m Double)
exponentialFromSample s = continuous <$> (fromSample s :: Maybe E.ExponentialDistribution)
| alpmestan/probable | src/Math/Probable/Distribution.hs | bsd-3-clause | 9,954 | 0 | 9 | 1,799 | 1,063 | 635 | 428 | 114 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# OPTIONS_GHC -fno-warn-missing-import-lists #-}
{-# OPTIONS_GHC -fno-warn-implicit-prelude #-}
module Paths_Test (
version,
getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import Foreign
import Foreign.C
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
#if defined(VERSION_base)
#if MIN_VERSION_base(4,0,0)
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
#else
catchIO :: IO a -> (Exception.Exception -> IO a) -> IO a
#endif
#else
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
#endif
catchIO = Exception.catch
version :: Version
version = Version [0,1] []
prefix, bindirrel :: FilePath
prefix = "C:\\Users\\Pichau\\AppData\\Roaming\\cabal"
bindirrel = "bin"
getBinDir :: IO FilePath
getBinDir = getPrefixDirRel bindirrel
getLibDir :: IO FilePath
getLibDir = getPrefixDirRel "x86_64-windows-ghc-8.0.2\\Test-0.1"
getDynLibDir :: IO FilePath
getDynLibDir = getPrefixDirRel "x86_64-windows-ghc-8.0.2"
getDataDir :: IO FilePath
getDataDir = catchIO (getEnv "Test_datadir") (\_ -> getPrefixDirRel "x86_64-windows-ghc-8.0.2\\Test-0.1")
getLibexecDir :: IO FilePath
getLibexecDir = getPrefixDirRel "Test-0.1"
getSysconfDir :: IO FilePath
getSysconfDir = getPrefixDirRel "etc"
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir `joinFileName` name)
getPrefixDirRel :: FilePath -> IO FilePath
getPrefixDirRel dirRel = try_size 2048 -- plenty, PATH_MAX is 512 under Win32.
where
try_size size = allocaArray (fromIntegral size) $ \buf -> do
ret <- c_GetModuleFileName nullPtr buf size
case ret of
0 -> return (prefix `joinFileName` dirRel)
_ | ret < size -> do
exePath <- peekCWString buf
let (bindir,_) = splitFileName exePath
return ((bindir `minusFileName` bindirrel) `joinFileName` dirRel)
| otherwise -> try_size (size * 2)
foreign import ccall unsafe "windows.h GetModuleFileNameW"
c_GetModuleFileName :: Ptr () -> CWString -> Int32 -> IO Int32
minusFileName :: FilePath -> String -> FilePath
minusFileName dir "" = dir
minusFileName dir "." = dir
minusFileName dir suffix =
minusFileName (fst (splitFileName dir)) (fst (splitFileName suffix))
joinFileName :: String -> String -> FilePath
joinFileName "" fname = fname
joinFileName "." fname = fname
joinFileName dir "" = dir
joinFileName dir fname
| isPathSeparator (last dir) = dir++fname
| otherwise = dir++pathSeparator:fname
splitFileName :: FilePath -> (String, String)
splitFileName p = (reverse (path2++drive), reverse fname)
where
(path,drive) = case p of
(c:':':p') -> (reverse p',[':',c])
_ -> (reverse p ,"")
(fname,path1) = break isPathSeparator path
path2 = case path1 of
[] -> "."
[_] -> path1 -- don't remove the trailing slash if
-- there is only one character
(c:path') | isPathSeparator c -> path'
_ -> path1
pathSeparator :: Char
pathSeparator = '\\'
isPathSeparator :: Char -> Bool
isPathSeparator c = c == '/' || c == '\\'
| Hidowga/Projetos | dist/build/autogen/Paths_Test.hs | bsd-3-clause | 3,435 | 0 | 21 | 792 | 911 | 482 | 429 | 77 | 5 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Network.Syncthing.Types.Config
( Config(..)
, FolderConfig(..)
, DeviceConfig(..)
, VersioningConfig(..)
, OptionsConfig(..)
, GuiConfig(..)
, AddressType(..)
) where
import Control.Applicative ((<$>), (<*>))
import Control.Monad (MonadPlus (mzero))
import Data.Aeson
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import Data.Text (Text, cons, uncons)
import Network.Syncthing.Types.Common
import Network.Syncthing.Internal.Utils
-------------------------------------------------------------------------------
-- CONFIG RECORD -----
-------------------------------------------------------------------------------
-- | The current configuration data structure.
data Config = Config {
getConfigVersion :: Int
, getFolderConfigs :: [FolderConfig]
, getDeviceConfigs :: [DeviceConfig]
, getGuiConfig :: GuiConfig
, getOptionsConfig :: OptionsConfig
} deriving (Eq, Show)
instance FromJSON Config where
parseJSON (Object v) =
Config <$> (v .: "version")
<*> (v .: "folders")
<*> (v .: "devices")
<*> (v .: "gui")
<*> (v .: "options")
parseJSON _ = mzero
instance ToJSON Config where
toJSON Config{..} =
object [ "version" .= getConfigVersion
, "folders" .= getFolderConfigs
, "devices" .= getDeviceConfigs
, "gui" .= getGuiConfig
, "options" .= getOptionsConfig
]
-------------------------------------------------------------------------------
-- ADDRESS TYPE -----
-------------------------------------------------------------------------------
-- | An address can be dynamic or static.
data AddressType =
Dynamic
| Address Addr
deriving (Eq, Show)
decodeAddressType :: Text -> AddressType
decodeAddressType "dynamic" = Dynamic
decodeAddressType addr = Address $ parseAddr addr
encodeAddressType :: AddressType -> Text
encodeAddressType Dynamic = "dynamic"
encodeAddressType (Address addr) = encodeAddr addr
-------------------------------------------------------------------------------
-- FOLDER CONFIG -----
-------------------------------------------------------------------------------
-- | The folder specific configuration.
data FolderConfig = FolderConfig {
getId :: FolderName
, getPath :: Path
, getFolderDevices :: [Device]
, getReadOnly :: Bool
, getRescanIntervalS :: Int
, getIgnorePerms :: Bool
, getAutoNormalize :: Bool
, getVersioning :: VersioningConfig
, getCopiers :: Int
, getPullers :: Int
, getHashers :: Int
, getOrder :: Text
, getFolderInvalid :: Text
} deriving (Eq, Show)
instance FromJSON FolderConfig where
parseJSON (Object v) =
FolderConfig <$> (v .: "id")
<*> (v .: "path")
<*> (map getFolderDevice <$> (v .: "devices"))
<*> (v .: "readOnly")
<*> (v .: "rescanIntervalS")
<*> (v .: "ignorePerms")
<*> (v .: "autoNormalize")
<*> (v .: "versioning")
<*> (v .: "copiers")
<*> (v .: "pullers")
<*> (v .: "hashers")
<*> (v .: "order")
<*> (v .: "invalid")
parseJSON _ = mzero
instance ToJSON FolderConfig where
toJSON FolderConfig{..} =
object [ "id" .= getId
, "path" .= getPath
, "devices" .= map FolderDeviceConfig getFolderDevices
, "readOnly" .= getReadOnly
, "rescanIntervalS" .= getRescanIntervalS
, "ignorePerms" .= getIgnorePerms
, "autoNormalize" .= getAutoNormalize
, "versioning" .= getVersioning
, "copiers" .= getCopiers
, "pullers" .= getPullers
, "hashers" .= getHashers
, "order" .= getOrder
, "invalid" .= getFolderInvalid
]
-------------------------------------------------------------------------------
-- VERSIONING CONFIG -----
-------------------------------------------------------------------------------
-- | Information about versioning.
data VersioningConfig = VersioningConfig {
getType :: Text
, getParams :: M.Map Text Text
} deriving (Eq, Show)
instance FromJSON VersioningConfig where
parseJSON (Object v) =
VersioningConfig <$> (v .: "type")
<*> (v .: "params")
parseJSON _ = mzero
instance ToJSON VersioningConfig where
toJSON VersioningConfig{..} =
object [ "type" .= getType
, "params" .= getParams
]
-------------------------------------------------------------------------------
-- DEVICE CONFIG -----
-------------------------------------------------------------------------------
-- | Device specific configuration information.
data DeviceConfig = DeviceConfig {
getDevice :: Device
, getDeviceName :: Text
, getAddresses :: [AddressType]
, getCompression :: Text
, getCertName :: Text
, getIntroducer :: Bool
} deriving (Eq, Show)
instance FromJSON DeviceConfig where
parseJSON (Object v) =
DeviceConfig <$> (v .: "deviceID")
<*> (v .: "name")
<*> (map decodeAddressType <$> (v .: "addresses"))
<*> (v .: "compression")
<*> (v .: "certName")
<*> (v .: "introducer")
parseJSON _ = mzero
instance ToJSON DeviceConfig where
toJSON DeviceConfig{..} =
object [ "deviceID" .= getDevice
, "name" .= getDeviceName
, "addresses" .= map encodeAddressType getAddresses
, "compression" .= getCompression
, "certName" .= getCertName
, "introducer" .= getIntroducer
]
-------------------------------------------------------------------------------
-- FOLDER-DEVICE CONFIG -----
-------------------------------------------------------------------------------
data FolderDeviceConfig = FolderDeviceConfig {
getFolderDevice :: Device
} deriving (Eq, Show)
instance FromJSON FolderDeviceConfig where
parseJSON (Object v) = FolderDeviceConfig <$> (v .: "deviceID")
parseJSON _ = mzero
instance ToJSON FolderDeviceConfig where
toJSON (FolderDeviceConfig device) =
object [ "deviceID" .= device ]
-------------------------------------------------------------------------------
-- GUI CONFIG -----
-------------------------------------------------------------------------------
-- | Gui settings.
data GuiConfig = GuiConfig {
getEnabled :: Bool
, getApiKey :: Maybe Text
, getGuiAddress :: Addr
, getUser :: Text
, getPassword :: Text
, getUseTLS :: Bool
} deriving (Eq, Show)
instance FromJSON GuiConfig where
parseJSON (Object v) =
GuiConfig <$> (v .: "enabled")
<*> (decodeApiKey <$> (v .: "apiKey"))
<*> (parseAddr <$> (v .: "address"))
<*> (v .: "user")
<*> (v .: "password")
<*> (v .: "useTLS")
parseJSON _ = mzero
instance ToJSON GuiConfig where
toJSON GuiConfig{..} =
object [ "enabled" .= getEnabled
, "apiKey" .= encodeApiKey getApiKey
, "address" .= encodeAddr getGuiAddress
, "user" .= getUser
, "password" .= getPassword
, "useTLS" .= getUseTLS
]
decodeApiKey :: Text -> Maybe Text
decodeApiKey = (uncurry cons `fmap`) . uncons
encodeApiKey :: Maybe Text -> Text
encodeApiKey = fromMaybe ""
-------------------------------------------------------------------------------
-- OPTIONS CONFIG -----
-------------------------------------------------------------------------------
-- | Various config settings.
data OptionsConfig = OptionsConfig {
getListenAddress :: [Addr]
, getGlobalAnnounceServers :: [Text]
, getGlobalAnnounceEnabled :: Bool
, getLocalAnnounceEnabled :: Bool
, getLocalAnnouncePort :: Int
, getLocalAnnounceMCAddr :: Text
, getMaxSendKbps :: Int
, getMaxRecvKbps :: Int
, getReconnectionIntervalS :: Int
, getStartBrowser :: Bool
, getUpnpEnabled :: Bool
, getUpnpLeaseMinutes :: Int
, getUpnpRenewalMinutes :: Int
, getUpnpTimeoutSeconds :: Int
, getUrAccepted :: Int
, getUrUniqueID :: Text
, getRestartOnWakeup :: Bool
, getAutoUpgradeIntervalH :: Int
, getKeepTemporariesH :: Int
, getCacheIgnoredFiles :: Bool
, getProgressUpdateIntervalS :: Int
, getSymlinksEnabled :: Bool
, getLimitBandwidthInLan :: Bool
} deriving (Eq, Show)
instance FromJSON OptionsConfig where
parseJSON (Object v) =
OptionsConfig <$> (map parseAddr <$> (v .: "listenAddress"))
<*> (v .: "globalAnnounceServers")
<*> (v .: "globalAnnounceEnabled")
<*> (v .: "localAnnounceEnabled")
<*> (v .: "localAnnouncePort")
<*> (v .: "localAnnounceMCAddr")
<*> (v .: "maxSendKbps")
<*> (v .: "maxRecvKbps")
<*> (v .: "reconnectionIntervalS")
<*> (v .: "startBrowser")
<*> (v .: "upnpEnabled")
<*> (v .: "upnpLeaseMinutes")
<*> (v .: "upnpRenewalMinutes")
<*> (v .: "upnpTimeoutSeconds")
<*> (v .: "urAccepted")
<*> (v .: "urUniqueId")
<*> (v .: "restartOnWakeup")
<*> (v .: "autoUpgradeIntervalH")
<*> (v .: "keepTemporariesH")
<*> (v .: "cacheIgnoredFiles")
<*> (v .: "progressUpdateIntervalS")
<*> (v .: "symlinksEnabled")
<*> (v .: "limitBandwidthInLan")
parseJSON _ = mzero
instance ToJSON OptionsConfig where
toJSON OptionsConfig{..} =
object [ "listenAddress" .= map encodeAddr getListenAddress
, "globalAnnounceServers" .= getGlobalAnnounceServers
, "globalAnnounceEnabled" .= getGlobalAnnounceEnabled
, "localAnnounceEnabled" .= getLocalAnnounceEnabled
, "localAnnouncePort" .= getLocalAnnouncePort
, "localAnnounceMCAddr" .= getLocalAnnounceMCAddr
, "maxSendKbps" .= getMaxSendKbps
, "maxRecvKbps" .= getMaxRecvKbps
, "reconnectionIntervalS" .= getReconnectionIntervalS
, "startBrowser" .= getStartBrowser
, "upnpEnabled" .= getUpnpEnabled
, "upnpLeaseMinutes" .= getUpnpLeaseMinutes
, "upnpRenewalMinutes" .= getUpnpRenewalMinutes
, "upnpTimeoutSeconds" .= getUpnpTimeoutSeconds
, "urAccepted" .= getUrAccepted
, "urUniqueId" .= getUrUniqueID
, "restartOnWakeup" .= getRestartOnWakeup
, "autoUpgradeIntervalH" .= getAutoUpgradeIntervalH
, "keepTemporariesH" .= getKeepTemporariesH
, "cacheIgnoredFiles" .= getCacheIgnoredFiles
, "progressUpdateIntervalS" .= getProgressUpdateIntervalS
, "symlinksEnabled" .= getSymlinksEnabled
, "limitBandwidthInLan" .= getLimitBandwidthInLan
]
| jetho/syncthing-hs | Network/Syncthing/Types/Config.hs | bsd-3-clause | 12,516 | 0 | 32 | 4,309 | 2,277 | 1,301 | 976 | 248 | 1 |
{-
- qqlele/src/QQLeLe/CFG.hs
- copyright (c) 2014 Frano Perleta
-}
-- extensions {{{
{-# LANGUAGE
FlexibleContexts, FlexibleInstances, FunctionalDependencies,
GeneralizedNewtypeDeriving, KindSignatures, MultiParamTypeClasses,
OverlappingInstances, RankNTypes, ScopedTypeVariables,
UndecidableInstances
#-}
-- }}}
-- exports {{{
module QQLeLe.CFG
-- basic blocks:
( BBlock(..)
, BB()
-- graphs:
, CFGT()
, runCFGT
, MonadCFG(..)
-- graph operations:
, rootBB
, newBB
, readBB
, predBB
, succBB
-- traversal:
, reachableBBs
, traverseBBs
, traverseBBs_
-- attributes:
, Tag(..)
, BBAttrT()
, runBBAttrT
, MonadBBAttr(..)
, getBBAttr
, setBBAttr
-- pure attributes:
, PureBBAttrT()
, runPureBBAttrT
, MonadPureBBAttr(..)
) where
-- }}}
-- imports {{{
import Control.Applicative
import Control.Monad.Reader
import Control.Monad.State
import qualified Data.IntMap as IM
import Data.IntMap (IntMap)
import qualified Data.IntSet as IS
import Data.IntSet (IntSet)
-- }}}
-- basic blocks {{{
class BBlock bb where
bblockSucc :: bb g -> [BB g]
bblockRefs :: bb g -> [BB g]
-- }}}
-- graphs {{{
data CFG bb g = CFG
{ cfgNext :: {-# UNPACK #-} !Int
, cfgBBlocks :: IntMap (bb g)
, cfgPred :: IntMap IntSet
, cfgSucc :: IntMap IntSet
}
newtype BB g = BB { unBB :: Int }
deriving (Eq, Ord)
cfgEmpty :: CFG bb g
cfgEmpty = CFG 0 IM.empty IM.empty IM.empty
cfgUpdate :: (BBlock bb) => BB g -> bb g -> CFG bb g -> CFG bb g
cfgUpdate (BB k) x (CFG n bbs pss sss) = CFG n bbs pss' sss
where
ss = IM.findWithDefault IS.empty k sss
ss' = IS.fromList . map unBB $ bblockSucc x
stale = IM.fromSet (const $ IS.singleton k) $ ss `IS.difference` ss'
fresh = IM.fromSet (const $ IS.singleton k) $ ss' `IS.difference` ss
pss' = IM.unionWith IS.union fresh $ IM.unionWith IS.difference pss stale
-- }}}
-- the monad transformer {{{
newtype CFGT bb g m a = CFGT { unCFGT :: StateT (CFG bb g) m a }
deriving (Functor, Applicative, Monad, MonadIO, MonadTrans)
runCFGT :: (Monad m) => (forall g. CFGT bb g m a) -> m a
runCFGT (CFGT body) = evalStateT body cfgEmpty
class (Monad m) => MonadCFG bb g m | m -> bb g where
cfgState :: (CFG bb g -> (a, CFG bb g)) -> m a
writeBB :: (BBlock bb) => BB g -> bb g -> m ()
writeBB r@(BB k) x = cfgState $ \g -> let
{ bbs = IM.insert k x $ cfgBBlocks g
; g' = g { cfgBBlocks = bbs }
} in ((), cfgUpdate r x g')
instance (Monad m) => MonadCFG bb g (CFGT bb g m) where
cfgState = CFGT . state
instance (MonadCFG bb g m, Monad (t m), MonadTrans t) => MonadCFG bb g (t m) where
cfgState = lift . cfgState
-- }}}
-- operations {{{
rootBB :: (MonadCFG bb g m) => m (Maybe (BB g))
rootBB = cfgState $ \g -> let
{ res = case cfgNext g of
0 -> Nothing
_ -> Just $ BB 0
} in (res, g)
newBB :: (MonadCFG bb g m, BBlock bb) => bb g -> m (BB g)
newBB x = do
bb <- cfgState $ \g -> let
{ k = cfgNext g
; r = BB k
; g' = g { cfgNext = succ k }
} in (r, g')
writeBB bb x
return bb
readBB :: (MonadCFG bb g m) => BB g -> m (bb g)
readBB (BB k) = cfgState $ \g -> case IM.lookup k $ cfgBBlocks g of
Just x -> (x, g)
Nothing -> error "readBB: invalid basic block reference"
predBB :: (MonadCFG bb g m) => BB g -> m [BB g]
predBB (BB k) = cfgState $ \g -> let
{ ps = IM.findWithDefault IS.empty k $ cfgPred g
} in (map BB $ IS.toList ps, g)
succBB :: (MonadCFG bb g m) => BB g -> m [BB g]
succBB (BB k) = cfgState $ \g -> let
{ ss = IM.findWithDefault IS.empty k $ cfgSucc g
} in (map BB $ IS.toList ss, g)
-- }}}
-- traversal {{{
reachableBBs :: (MonadCFG bb g m) => m [BB g]
reachableBBs = do
mroot <- rootBB
case mroot of
Just root -> traverseBBs root $ \bb -> do
ss <- succBB bb
return (bb, ss)
Nothing -> return []
traverseBBs :: (MonadCFG bb g m) => BB g -> (BB g -> m (a, [BB g])) -> m [a]
traverseBBs from action = go IS.empty [from]
where
go _ [] = return []
go seen (bb@(BB k) : rest)
| k `IS.member` seen = go seen rest
| otherwise = do
(x, ss) <- action bb
(x :) `liftM` go (IS.insert k seen) (ss ++ rest)
traverseBBs_ :: (MonadCFG bb g m) => BB g -> (BB g -> m [BB g]) -> m ()
traverseBBs_ from action = go IS.empty [from]
where
go _ [] = return ()
go seen (bb@(BB k) : rest)
| k `IS.member` seen = go seen rest
| otherwise = do
ss <- action bb
go (IS.insert k seen) (ss ++ rest)
-- }}}
-- attributes {{{
class Tag tag t g | tag -> t g where
getTag :: tag
newtype BBAttrT tag t (bb :: * -> *) g m a
= BBAttrT { unBBAttrT :: StateT (IntMap t) m a }
deriving (Functor, Applicative, Monad, MonadIO, MonadTrans)
runBBAttrT :: (MonadCFG bb g m) => tag -> BBAttrT tag t bb g m a -> m a
runBBAttrT _ = flip evalStateT IM.empty . unBBAttrT
class (MonadCFG bb g m, Tag tag t g)
=> MonadBBAttr tag t bb g m where
withBBAttr :: tag -> BB g -> (Maybe t -> (a, Maybe t)) -> m a
instance (MonadCFG bb g m, Tag tag t g)
=> MonadBBAttr tag t bb g (BBAttrT tag t bb g m) where
withBBAttr _ (BB k) f = BBAttrT $ do
(x, v) <- gets $ f . IM.lookup k
modify $ IM.alter (const v) k
return x
instance (MonadBBAttr tag t bb g m, Tag tag t g, Monad (u m), MonadTrans u)
=> MonadBBAttr tag t bb g (u m) where
withBBAttr tag bb = lift . withBBAttr tag bb
setBBAttr :: (MonadBBAttr tag t bb g m, Tag tag t g) => tag -> BB g -> t -> m ()
setBBAttr tag bb x = withBBAttr tag bb $ const ((), Just x)
getBBAttr :: (MonadBBAttr tag t bb g m, Tag tag t g) => tag -> BB g -> m (Maybe t)
getBBAttr tag bb = withBBAttr tag bb $ \v -> (v, v)
-- }}}
-- pure attributes {{{
newtype PureBBAttrT tag t (bb :: * -> *) g m a
= PureBBAttrT { unPureBBAttrT :: ReaderT (bb g -> t) (BBAttrT tag t bb g m) a }
deriving (Functor, Applicative, Monad, MonadIO)
runPureBBAttrT :: (MonadCFG bb g m) => tag -> (bb g -> t) -> PureBBAttrT tag t bb g m a -> m a
runPureBBAttrT tag f = runBBAttrT tag . flip runReaderT f . unPureBBAttrT
instance (MonadCFG bb g m, Tag tag t g) => MonadCFG bb g (PureBBAttrT tag t bb g m) where
cfgState = PureBBAttrT . cfgState
writeBB bb x = PureBBAttrT $ do
f <- ask
lift $ setBBAttr (getTag :: tag) bb (f x)
lift $ writeBB bb x
class (MonadCFG bb g m, Tag tag t g)
=> MonadPureBBAttr tag t bb g m where
pureBBAttr :: tag -> BB g -> m t
instance (MonadCFG bb g m, Tag tag t g)
=> MonadPureBBAttr tag t bb g (PureBBAttrT tag t bb g m) where
pureBBAttr tag bb = PureBBAttrT $ do
m <- lift $ getBBAttr tag bb
case m of
Just x -> return x
Nothing -> do
f <- ask
x <- f `liftM` readBB bb
setBBAttr tag bb x
return x
instance (MonadPureBBAttr tag t bb g m, Tag tag t g, Monad (u m), MonadTrans u)
=> MonadPureBBAttr tag t bb g (u m) where
pureBBAttr tag = lift . pureBBAttr tag
-- }}}
-- vim:fdm=marker:
| fperleta/qqlele | src/QQLeLe/CFG.hs | bsd-3-clause | 7,323 | 160 | 13 | 2,200 | 2,928 | 1,586 | 1,342 | 180 | 2 |
module Trains where
import Db
-- Data
u1_01 = TrainId "u1_01"
u1_02 = TrainId "u1_02"
--u1_03 = TrainId "u1_03"
--u1_04 = TrainId "u1_04"
u2_01 = TrainId "u2_01"
u2_02 = TrainId "u2_02"
--u2_03 = TrainId "u2_03"
--u2_04 = TrainId "u2_04"
u3_01 = TrainId "u3_01"
u3_02 = TrainId "u3_02"
--u3_03 = TrainId "u3_03"
--u3_04 = TrainId "u3_04"
u4_01 = TrainId "u4_01"
u4_02 = TrainId "u4_02"
--u4_03 = TrainId "u4_03"
--u4_04 = TrainId "u4_04"
u6_01 = TrainId "u6_01"
u6_02 = TrainId "u6_02"
--u6_03 = TrainId "u6_03"
--u6_04 = TrainId "u6_04"
quota_1 = 5
quota_2 = 10
quota_3 = 15
quota_4 = 20
car_id_01 = TrainCarId "car_01"
car_id_02 = TrainCarId "car_02"
car_id_03 = TrainCarId "car_03"
car_id_04 = TrainCarId "car_04"
car_id_05 = TrainCarId "car_05"
car_id_06 = TrainCarId "car_06"
car_id_07 = TrainCarId "car_07"
car_id_08 = TrainCarId "car_08"
car_id_09 = TrainCarId "car_09"
car_id_10 = TrainCarId "car_10"
car_id_11 = TrainCarId "car_11"
car_id_12 = TrainCarId "car_12"
car_id_13 = TrainCarId "car_13"
car_id_14 = TrainCarId "car_14"
car_id_15 = TrainCarId "car_15"
car_id_16 = TrainCarId "car_16"
car_id_17 = TrainCarId "car_17"
car_id_18 = TrainCarId "car_18"
car_id_19 = TrainCarId "car_19"
car_id_20 = TrainCarId "car_20"
num_seats_01 = 20
num_seats_02 = 30
num_seats_03 = 40
num_seats_04 = 50
--Cities
c_01 = City "Stadion"
c_02 = City "Krieau"
c_03 = City "Messe-Prater"
c_04 = City "Praterstern"
c_05 = City "Taborstrasse"
c_06 = City "Schottenring"
c_07 = City "Schottentor"
c_08 = City "Rathaus"
c_09 = City "Volkstheater"
c_10 = City "Museumsquartier"
c_11 = City "Karlsplatz"
c_12 = City "Stadtpark"
c_13 = City "Landstrasse"
c_14 = City "Schwedenplatz"
c_15 = City "RossauerLaende"
c_16 = City "Friedensbruecke"
c_17 = City "Nestroyplatz"
c_18 = City "Stephansplatz"
c_19 = City "Taubstummengasse"
c_20 = City "SuedtirolerPlatz"
c_21 = City "Herrengasse"
c_22 = City "Stubentor"
c_23 = City "Rochusgasse"
c_24 = City "KardinalNaglPlatz"
route_id_01 = RouteId "U1"
route_id_02 = RouteId "U2"
route_id_03 = RouteId "U3"
route_id_04 = RouteId "U4"
route_id_05 = RouteId "U6"
--Route
route_01 = Route route_id_01 [c_04, c_17, c_14, c_18, c_11, c_19, c_20]
route_02 = Route route_id_02 [c_01, c_02, c_03, c_04, c_05, c_06, c_07, c_08, c_09, c_10, c_11]
route_03 = Route route_id_03 [c_09, c_21, c_18, c_22, c_13, c_23, c_24]
route_04 = Route route_id_04 [c_15, c_06, c_14, c_13, c_12, c_11] --Rossauerlaende - Schottentor - Stephansplatz - Stadtpark - Rochusgasse
route_05 = Route route_id_05 [c_15, c_07, c_18, c_12, c_23]
--TrainCars
car_01 = TrainCar car_id_01 num_seats_01
car_02 = TrainCar car_id_02 num_seats_01
car_03 = TrainCar car_id_03 num_seats_02
car_04 = TrainCar car_id_04 num_seats_02
car_05 = TrainCar car_id_05 num_seats_03
car_06 = TrainCar car_id_06 num_seats_03
car_07 = TrainCar car_id_07 num_seats_04
car_08 = TrainCar car_id_08 num_seats_04
car_09 = TrainCar car_id_09 num_seats_01
car_10 = TrainCar car_id_10 num_seats_01
car_11 = TrainCar car_id_11 num_seats_02
car_12 = TrainCar car_id_12 num_seats_02
car_13 = TrainCar car_id_13 num_seats_03
car_14 = TrainCar car_id_14 num_seats_03
car_15 = TrainCar car_id_15 num_seats_04
car_16 = TrainCar car_id_16 num_seats_04
car_17 = TrainCar car_id_17 num_seats_01
car_18 = TrainCar car_id_18 num_seats_01
car_19 = TrainCar car_id_19 num_seats_02
car_20 = TrainCar car_id_20 num_seats_02
--Train
t01 = Train u1_01 route_01 [car_01, car_02] quota_1
t02 = Train u1_02 route_01 [car_03, car_04] quota_1
t03 = Train u2_01 route_02 [car_05, car_06] quota_2
t04 = Train u2_02 route_02 [car_07, car_08] quota_2
t05 = Train u3_01 route_03 [car_09, car_10] quota_1
t06 = Train u3_02 route_03 [car_11, car_12] quota_4
t07 = Train u4_01 route_04 [car_13, car_14] quota_1
t08 = Train u4_02 route_04 [car_15, car_16] quota_2
t09 = Train u6_01 route_05 [car_17, car_18] quota_2
t10 = Train u6_02 route_05 [car_19, car_20] quota_3
-- The actual database
db = db_add_trains [t01, t02, t03, t04, t05, t06, t07, t08, t09, t10]
. db_add_routes [route_01, route_02, route_03, route_04, route_05]
$ empty_db
| fadeopolis/prog-spr-ue3 | Trains.hs | bsd-3-clause | 4,097 | 0 | 8 | 614 | 1,158 | 628 | 530 | 107 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.GHC
-- Copyright : Isaac Jones 2003-2007
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- This is a fairly large module. It contains most of the GHC-specific code for
-- configuring, building and installing packages. It also exports a function
-- for finding out what packages are already installed. Configuring involves
-- finding the @ghc@ and @ghc-pkg@ programs, finding what language extensions
-- this version of ghc supports and returning a 'Compiler' value.
--
-- 'getInstalledPackages' involves calling the @ghc-pkg@ program to find out
-- what packages are installed.
--
-- Building is somewhat complex as there is quite a bit of information to take
-- into account. We have to build libs and programs, possibly for profiling and
-- shared libs. We have to support building libraries that will be usable by
-- GHCi and also ghc's @-split-objs@ feature. We have to compile any C files
-- using ghc. Linking, especially for @split-objs@ is remarkably complex,
-- partly because there tend to be 1,000's of @.o@ files and this can often be
-- more than we can pass to the @ld@ or @ar@ programs in one go.
--
-- There is also some code for generating @Makefiles@ but the less said about
-- that the better.
--
-- Installing for libs and exes involves finding the right files and copying
-- them to the right places. One of the more tricky things about this module is
-- remembering the layout of files in the build directory (which is not
-- explicitly documented) and thus what search dirs are used for various kinds
-- of files.
{- Copyright (c) 2003-2005, Isaac Jones
All rights reserved.
Redistribution and use in source and binary forms, with or without
modiication, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Simple.GHC (
configure, getInstalledPackages, build, makefile, installLib, installExe,
ghcOptions,
ghcVerbosityOptions
) where
import Distribution.Simple.GHC.Makefile
import qualified Distribution.Simple.GHC.IPI641 as IPI641
import qualified Distribution.Simple.GHC.IPI642 as IPI642
import Distribution.Simple.Setup ( CopyFlags(..), MakefileFlags(..),
fromFlag, fromFlagOrDefault)
import Distribution.PackageDescription as PD
( PackageDescription(..), BuildInfo(..),
withLib,
Executable(..), withExe, Library(..),
libModules, hcOptions )
import Distribution.InstalledPackageInfo
( InstalledPackageInfo
, parseInstalledPackageInfo )
import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo
( InstalledPackageInfo_(..) )
import Distribution.Simple.PackageIndex
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.ParseUtils ( ParseResult(..) )
import Distribution.Simple.LocalBuildInfo
( LocalBuildInfo(..), InstallDirs(..) )
import Distribution.Simple.InstallDirs
import Distribution.Simple.BuildPaths
import Distribution.Simple.Utils
import Distribution.Package
( PackageIdentifier, Package(..), PackageName(..) )
import qualified Distribution.ModuleName as ModuleName
import Distribution.Simple.Program
( Program(..), ConfiguredProgram(..), ProgramConfiguration, ProgArg
, ProgramLocation(..), rawSystemProgram, rawSystemProgramConf
, rawSystemProgramStdout, rawSystemProgramStdoutConf, requireProgram
, userMaybeSpecifyPath, programPath, lookupProgram, addKnownProgram
, ghcProgram, ghcPkgProgram, arProgram, ranlibProgram, ldProgram
, gccProgram, stripProgram )
import Distribution.Simple.Compiler
( CompilerFlavor(..), CompilerId(..), Compiler(..), compilerVersion
, OptimisationLevel(..), PackageDB(..), PackageDBStack
, Flag, extensionsToFlags )
import Distribution.Version
( Version(..), anyVersion, orLaterVersion )
import Distribution.System
( OS(..), buildOS )
import Distribution.Verbosity
import Distribution.Text
( display, simpleParse )
import Language.Haskell.Extension (Extension(..))
import Control.Monad ( unless, when )
import Data.Char
import Data.List
import Data.Maybe ( catMaybes )
import System.Directory ( removeFile, renameFile,
getDirectoryContents, doesFileExist,
getTemporaryDirectory )
import System.FilePath ( (</>), (<.>), takeExtension,
takeDirectory, replaceExtension, splitExtension )
import System.IO (openFile, IOMode(WriteMode), hClose, hPutStrLn)
import Distribution.Compat.Exception (catchExit, catchIO)
import Distribution.Compat.CopyFile
( setFileExecutable )
-- -----------------------------------------------------------------------------
-- Configuring
configure :: Verbosity -> Maybe FilePath -> Maybe FilePath
-> ProgramConfiguration -> IO (Compiler, ProgramConfiguration)
configure verbosity hcPath hcPkgPath conf = do
(ghcProg, conf') <- requireProgram verbosity ghcProgram
(orLaterVersion (Version [6,4] []))
(userMaybeSpecifyPath "ghc" hcPath conf)
let Just ghcVersion = programVersion ghcProg
-- This is slightly tricky, we have to configure ghc first, then we use the
-- location of ghc to help find ghc-pkg in the case that the user did not
-- specify the location of ghc-pkg directly:
(ghcPkgProg, conf'') <- requireProgram verbosity ghcPkgProgram {
programFindLocation = guessGhcPkgFromGhcPath ghcProg
}
(orLaterVersion (Version [0] []))
(userMaybeSpecifyPath "ghc-pkg" hcPkgPath conf')
let Just ghcPkgVersion = programVersion ghcPkgProg
when (ghcVersion /= ghcPkgVersion) $ die $
"Version mismatch between ghc and ghc-pkg: "
++ programPath ghcProg ++ " is version " ++ display ghcVersion ++ " "
++ programPath ghcPkgProg ++ " is version " ++ display ghcPkgVersion
languageExtensions <- getLanguageExtensions verbosity ghcProg
let comp = Compiler {
compilerId = CompilerId GHC ghcVersion,
compilerExtensions = languageExtensions
}
conf''' = configureToolchain ghcProg conf'' -- configure gcc and ld
return (comp, conf''')
-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a
-- corresponding ghc-pkg, we try looking for both a versioned and unversioned
-- ghc-pkg in the same dir, that is:
--
-- > /usr/local/bin/ghc-pkg-6.6.1(.exe)
-- > /usr/local/bin/ghc-pkg(.exe)
--
guessGhcPkgFromGhcPath :: ConfiguredProgram -> Verbosity -> IO (Maybe FilePath)
guessGhcPkgFromGhcPath ghcProg verbosity
= do let path = programPath ghcProg
dir = takeDirectory path
versionSuffix = takeVersionSuffix (dropExeExtension path)
guessNormal = dir </> "ghc-pkg" <.> exeExtension
guessVersioned = dir </> ("ghc-pkg" ++ versionSuffix) <.> exeExtension
guesses | null versionSuffix = [guessNormal]
| otherwise = [guessVersioned, guessNormal]
info verbosity $ "looking for package tool: ghc-pkg near compiler in " ++ dir
exists <- mapM doesFileExist guesses
case [ file | (file, True) <- zip guesses exists ] of
[] -> return Nothing
(pkgtool:_) -> do info verbosity $ "found package tool in " ++ pkgtool
return (Just pkgtool)
where takeVersionSuffix :: FilePath -> String
takeVersionSuffix = reverse . takeWhile (`elem ` "0123456789.-") . reverse
dropExeExtension :: FilePath -> FilePath
dropExeExtension filepath =
case splitExtension filepath of
(filepath', extension) | extension == exeExtension -> filepath'
| otherwise -> filepath
-- | Adjust the way we find and configure gcc and ld
--
configureToolchain :: ConfiguredProgram -> ProgramConfiguration
-> ProgramConfiguration
configureToolchain ghcProg =
addKnownProgram gccProgram {
programFindLocation = findProg gccProgram (baseDir </> "gcc.exe"),
programPostConf = configureGcc
}
. addKnownProgram ldProgram {
programFindLocation = findProg ldProgram (libDir </> "ld.exe"),
programPostConf = configureLd
}
where
compilerDir = takeDirectory (programPath ghcProg)
baseDir = takeDirectory compilerDir
libDir = baseDir </> "gcc-lib"
includeDir = baseDir </> "include" </> "mingw"
isWindows = case buildOS of Windows -> True; _ -> False
-- on Windows finding and configuring ghc's gcc and ld is a bit special
findProg :: Program -> FilePath -> Verbosity -> IO (Maybe FilePath)
findProg prog location | isWindows = \verbosity -> do
exists <- doesFileExist location
if exists then return (Just location)
else do warn verbosity ("Couldn't find " ++ programName prog ++ " where I expected it. Trying the search path.")
programFindLocation prog verbosity
| otherwise = programFindLocation prog
configureGcc :: Verbosity -> ConfiguredProgram -> IO [ProgArg]
configureGcc
| isWindows = \_ gccProg -> case programLocation gccProg of
-- if it's found on system then it means we're using the result
-- of programFindLocation above rather than a user-supplied path
-- that means we should add this extra flag to tell ghc's gcc
-- where it lives and thus where gcc can find its various files:
FoundOnSystem {} -> return ["-B" ++ libDir, "-I" ++ includeDir]
UserSpecified {} -> return []
| otherwise = \_ _ -> return []
-- we need to find out if ld supports the -x flag
configureLd :: Verbosity -> ConfiguredProgram -> IO [ProgArg]
configureLd verbosity ldProg = do
tempDir <- getTemporaryDirectory
ldx <- withTempFile tempDir ".c" $ \testcfile testchnd ->
withTempFile tempDir ".o" $ \testofile testohnd -> do
hPutStrLn testchnd "int foo() {}"
hClose testchnd; hClose testohnd
rawSystemProgram verbosity ghcProg ["-c", testcfile,
"-o", testofile]
withTempFile tempDir ".o" $ \testofile' testohnd' ->
do
hClose testohnd'
rawSystemProgramStdout verbosity ldProg
["-x", "-r", testofile, "-o", testofile']
return True
`catchIO` (\_ -> return False)
`catchExit` (\_ -> return False)
if ldx
then return ["-x"]
else return []
getLanguageExtensions :: Verbosity -> ConfiguredProgram -> IO [(Extension, Flag)]
getLanguageExtensions verbosity ghcProg
| ghcVersion >= Version [6,7] [] = do
exts <- rawSystemStdout verbosity (programPath ghcProg)
["--supported-languages"]
-- GHC has the annoying habit of inverting some of the extensions
-- so we have to try parsing ("No" ++ ghcExtensionName) first
let readExtension str = do
ext <- simpleParse ("No" ++ str)
case ext of
UnknownExtension _ -> simpleParse str
_ -> return ext
return $ extensionHacks
++ [ (ext, "-X" ++ display ext)
| Just ext <- map readExtension (lines exts) ]
| otherwise = return oldLanguageExtensions
where
Just ghcVersion = programVersion ghcProg
-- ghc-6.8 intorduced RecordPuns however it should have been
-- NamedFieldPuns. We now encourage packages to use NamedFieldPuns so for
-- compatability we fake support for it in ghc-6.8 by making it an alias
-- for the old RecordPuns extension.
extensionHacks = [ (NamedFieldPuns, "-XRecordPuns")
| ghcVersion >= Version [6,8] []
&& ghcVersion < Version [6,10] [] ]
-- | For GHC 6.6.x and earlier, the mapping from supported extensions to flags
oldLanguageExtensions :: [(Extension, Flag)]
oldLanguageExtensions =
[(OverlappingInstances , "-fallow-overlapping-instances")
,(TypeSynonymInstances , "-fglasgow-exts")
,(TemplateHaskell , "-fth")
,(ForeignFunctionInterface , "-fffi")
,(NoMonomorphismRestriction , "-fno-monomorphism-restriction")
,(NoMonoPatBinds , "-fno-mono-pat-binds")
,(UndecidableInstances , "-fallow-undecidable-instances")
,(IncoherentInstances , "-fallow-incoherent-instances")
,(Arrows , "-farrows")
,(Generics , "-fgenerics")
,(NoImplicitPrelude , "-fno-implicit-prelude")
,(ImplicitParams , "-fimplicit-params")
,(CPP , "-cpp")
,(BangPatterns , "-fbang-patterns")
,(KindSignatures , fglasgowExts)
,(RecursiveDo , fglasgowExts)
,(ParallelListComp , fglasgowExts)
,(MultiParamTypeClasses , fglasgowExts)
,(FunctionalDependencies , fglasgowExts)
,(Rank2Types , fglasgowExts)
,(RankNTypes , fglasgowExts)
,(PolymorphicComponents , fglasgowExts)
,(ExistentialQuantification , fglasgowExts)
,(ScopedTypeVariables , "-fscoped-type-variables")
,(FlexibleContexts , fglasgowExts)
,(FlexibleInstances , fglasgowExts)
,(EmptyDataDecls , fglasgowExts)
,(PatternGuards , fglasgowExts)
,(GeneralizedNewtypeDeriving , fglasgowExts)
,(MagicHash , fglasgowExts)
,(UnicodeSyntax , fglasgowExts)
,(PatternSignatures , fglasgowExts)
,(UnliftedFFITypes , fglasgowExts)
,(LiberalTypeSynonyms , fglasgowExts)
,(TypeOperators , fglasgowExts)
,(GADTs , fglasgowExts)
,(RelaxedPolyRec , fglasgowExts)
,(ExtendedDefaultRules , "-fextended-default-rules")
,(UnboxedTuples , fglasgowExts)
,(DeriveDataTypeable , fglasgowExts)
,(ConstrainedClassMethods , fglasgowExts)
]
where
fglasgowExts = "-fglasgow-exts"
getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramConfiguration
-> IO (PackageIndex InstalledPackageInfo)
getInstalledPackages verbosity packagedbs conf = do
pkgss <- getInstalledPackages' verbosity packagedbs conf
checkPackageDbStack packagedbs
let pkgs = concatMap snd pkgss
-- On Windows, various fields have $topdir/foo rather than full
-- paths. We need to substitute the right value in so that when
-- we, for example, call gcc, we have proper paths to give it
Just ghcProg = lookupProgram ghcProgram conf
compilerDir = takeDirectory (programPath ghcProg)
topDir = takeDirectory compilerDir
pkgs' = map (substTopDir topDir) pkgs
pi1 = PackageIndex.fromList pkgs'
rtsPackages = lookupPackageName pi1 (PackageName "rts")
rtsPackages' = map removeMingwIncludeDir rtsPackages
pi2 = pi1 `merge` fromList rtsPackages'
return pi2
checkPackageDbStack :: PackageDBStack -> IO ()
checkPackageDbStack (GlobalPackageDB:rest)
| GlobalPackageDB `notElem` rest = return ()
checkPackageDbStack _ =
die $ "GHC.getInstalledPackages: the global package db must be "
++ "specified first and cannot be specified multiple times"
-- GHC < 6.10 put "$topdir/include/mingw" in rts's installDirs. This
-- breaks when you want to use a different gcc, so we need to filter
-- it out.
removeMingwIncludeDir :: InstalledPackageInfo -> InstalledPackageInfo
removeMingwIncludeDir pkg =
let ids = InstalledPackageInfo.includeDirs pkg
ids' = filter (not . ("mingw" `isSuffixOf`)) ids
in pkg { InstalledPackageInfo.includeDirs = ids' }
-- | Get the packages from specific PackageDBs, not cumulative.
--
getInstalledPackages' :: Verbosity -> [PackageDB] -> ProgramConfiguration
-> IO [(PackageDB, [InstalledPackageInfo])]
getInstalledPackages' verbosity packagedbs conf
| ghcVersion >= Version [6,9] [] =
sequence
[ do str <- rawSystemProgramStdoutConf verbosity ghcPkgProgram conf
["dump", packageDbGhcPkgFlag packagedb]
`catchExit` \_ -> die $ "ghc-pkg dump failed"
case parsePackages str of
Left ok -> return (packagedb, ok)
_ -> die "failed to parse output of 'ghc-pkg dump'"
| packagedb <- packagedbs ]
where
parsePackages str =
let parsed = map parseInstalledPackageInfo (splitPkgs str)
in case [ msg | ParseFailed msg <- parsed ] of
[] -> Left [ pkg | ParseOk _ pkg <- parsed ]
msgs -> Right msgs
Just ghcProg = lookupProgram ghcProgram conf
Just ghcVersion = programVersion ghcProg
splitPkgs :: String -> [String]
splitPkgs = map unlines . splitWith ("---" ==) . lines
where
splitWith :: (a -> Bool) -> [a] -> [[a]]
splitWith p xs = ys : case zs of
[] -> []
_:ws -> splitWith p ws
where (ys,zs) = break p xs
packageDbGhcPkgFlag GlobalPackageDB = "--global"
packageDbGhcPkgFlag UserPackageDB = "--user"
packageDbGhcPkgFlag (SpecificPackageDB path) = "--package-conf=" ++ path
getInstalledPackages' verbosity packagedbs conf = do
str <- rawSystemProgramStdoutConf verbosity ghcPkgProgram conf ["list"]
let pkgFiles = [ init line | line <- lines str, last line == ':' ]
dbFile packagedb = case (packagedb, pkgFiles) of
(GlobalPackageDB, global:_) -> return $ Just global
(UserPackageDB, _global:user:_) -> return $ Just user
(UserPackageDB, _global:_) -> return $ Nothing
(SpecificPackageDB specific, _) -> return $ Just specific
_ -> die "cannot read ghc-pkg package listing"
pkgFiles' <- mapM dbFile packagedbs
sequence [ withFileContents file $ \content -> do
pkgs <- readPackages file content
return (db, pkgs)
| (db , Just file) <- zip packagedbs pkgFiles' ]
where
-- Depending on the version of ghc we use a different type's Read
-- instance to parse the package file and then convert.
-- It's a bit yuck. But that's what we get for using Read/Show.
readPackages
| ghcVersion >= Version [6,4,2] []
= \file content -> case reads content of
[(pkgs, _)] -> return (map IPI642.toCurrent pkgs)
_ -> failToRead file
| otherwise
= \file content -> case reads content of
[(pkgs, _)] -> return (map IPI641.toCurrent pkgs)
_ -> failToRead file
Just ghcProg = lookupProgram ghcProgram conf
Just ghcVersion = programVersion ghcProg
failToRead file = die $ "cannot read ghc package database " ++ file
substTopDir :: FilePath -> InstalledPackageInfo -> InstalledPackageInfo
substTopDir topDir ipo
= ipo {
InstalledPackageInfo.importDirs
= map f (InstalledPackageInfo.importDirs ipo),
InstalledPackageInfo.libraryDirs
= map f (InstalledPackageInfo.libraryDirs ipo),
InstalledPackageInfo.includeDirs
= map f (InstalledPackageInfo.includeDirs ipo),
InstalledPackageInfo.frameworkDirs
= map f (InstalledPackageInfo.frameworkDirs ipo),
InstalledPackageInfo.haddockInterfaces
= map f (InstalledPackageInfo.haddockInterfaces ipo),
InstalledPackageInfo.haddockHTMLs
= map f (InstalledPackageInfo.haddockHTMLs ipo)
}
where f ('$':'t':'o':'p':'d':'i':'r':rest) = topDir ++ rest
f x = x
-- -----------------------------------------------------------------------------
-- Building
-- |Building for GHC. If .ghc-packages exists and is readable, add
-- it to the command-line.
build :: PackageDescription -> LocalBuildInfo -> Verbosity -> IO ()
build pkg_descr lbi verbosity = do
let pref = buildDir lbi
pkgid = packageId pkg_descr
runGhcProg = rawSystemProgramConf verbosity ghcProgram (withPrograms lbi)
ifVanillaLib forceVanilla = when (forceVanilla || withVanillaLib lbi)
ifProfLib = when (withProfLib lbi)
ifSharedLib = when (withSharedLib lbi)
ifGHCiLib = when (withGHCiLib lbi && withVanillaLib lbi)
-- Build lib
withLib pkg_descr () $ \lib -> do
info verbosity "Building library..."
libBi <- hackThreadedFlag verbosity
(compiler lbi) (withProfLib lbi) (libBuildInfo lib)
let libTargetDir = pref
forceVanillaLib = TemplateHaskell `elem` extensions libBi
-- TH always needs vanilla libs, even when building for profiling
createDirectoryIfMissingVerbose verbosity True libTargetDir
-- TODO: do we need to put hs-boot files into place for mutually recurive modules?
let ghcArgs =
["-package-name", display pkgid ]
++ constructGHCCmdLine lbi libBi libTargetDir verbosity
++ map display (libModules pkg_descr)
ghcArgsProf = ghcArgs
++ ["-prof",
"-hisuf", "p_hi",
"-osuf", "p_o"
]
++ ghcProfOptions libBi
ghcArgsShared = ghcArgs
++ ["-dynamic",
"-hisuf", "dyn_hi",
"-osuf", "dyn_o", "-fPIC"
]
++ ghcSharedOptions libBi
unless (null (libModules pkg_descr)) $
do ifVanillaLib forceVanillaLib (runGhcProg ghcArgs)
ifProfLib (runGhcProg ghcArgsProf)
ifSharedLib (runGhcProg ghcArgsShared)
-- build any C sources
unless (null (cSources libBi)) $ do
info verbosity "Building C Sources..."
sequence_ [do let (odir,args) = constructCcCmdLine lbi libBi pref
filename verbosity
createDirectoryIfMissingVerbose verbosity True odir
runGhcProg args
ifSharedLib (runGhcProg (args ++ ["-fPIC", "-osuf dyn_o"]))
| filename <- cSources libBi]
-- link:
info verbosity "Linking..."
let cObjs = map (`replaceExtension` objExtension) (cSources libBi)
cSharedObjs = map (`replaceExtension` ("dyn_" ++ objExtension)) (cSources libBi)
vanillaLibFilePath = libTargetDir </> mkLibName pkgid
profileLibFilePath = libTargetDir </> mkProfLibName pkgid
sharedLibFilePath = libTargetDir </> mkSharedLibName pkgid
(compilerId (compiler lbi))
ghciLibFilePath = libTargetDir </> mkGHCiLibName pkgid
stubObjs <- fmap catMaybes $ sequence
[ findFileWithExtension [objExtension] [libTargetDir]
(ModuleName.toFilePath x ++"_stub")
| x <- libModules pkg_descr ]
stubProfObjs <- fmap catMaybes $ sequence
[ findFileWithExtension ["p_" ++ objExtension] [libTargetDir]
(ModuleName.toFilePath x ++"_stub")
| x <- libModules pkg_descr ]
stubSharedObjs <- fmap catMaybes $ sequence
[ findFileWithExtension ["dyn_" ++ objExtension] [libTargetDir]
(ModuleName.toFilePath x ++"_stub")
| x <- libModules pkg_descr ]
hObjs <- getHaskellObjects pkg_descr libBi lbi
pref objExtension True
hProfObjs <-
if (withProfLib lbi)
then getHaskellObjects pkg_descr libBi lbi
pref ("p_" ++ objExtension) True
else return []
hSharedObjs <-
if (withSharedLib lbi)
then getHaskellObjects pkg_descr libBi lbi
pref ("dyn_" ++ objExtension) False
else return []
unless (null hObjs && null cObjs && null stubObjs) $ do
-- first remove library files if they exists
sequence_
[ removeFile libFilePath `catchIO` \_ -> return ()
| libFilePath <- [vanillaLibFilePath, profileLibFilePath
,sharedLibFilePath, ghciLibFilePath] ]
let arVerbosity | verbosity >= deafening = "v"
| verbosity >= normal = ""
| otherwise = "c"
arArgs = ["q"++ arVerbosity]
++ [vanillaLibFilePath]
arObjArgs =
hObjs
++ map (pref </>) cObjs
++ stubObjs
arProfArgs = ["q"++ arVerbosity]
++ [profileLibFilePath]
arProfObjArgs =
hProfObjs
++ map (pref </>) cObjs
++ stubProfObjs
ldArgs = ["-r"]
++ ["-o", ghciLibFilePath <.> "tmp"]
ldObjArgs =
hObjs
++ map (pref </>) cObjs
++ stubObjs
ghcSharedObjArgs =
hSharedObjs
++ map (pref </>) cSharedObjs
++ stubSharedObjs
-- After the relocation lib is created we invoke ghc -shared
-- with the dependencies spelled out as -package arguments
-- and ghc invokes the linker with the proper library paths
ghcSharedLinkArgs =
[ "-no-auto-link-packages",
"-shared",
"-dynamic",
"-o", sharedLibFilePath ]
++ ghcSharedObjArgs
++ ["-package-name", display pkgid ]
++ (concat [ ["-package", display pkg] | pkg <- packageDeps lbi ])
++ ["-l"++extraLib | extraLib <- extraLibs libBi]
++ ["-L"++extraLibDir | extraLibDir <- extraLibDirs libBi]
runLd ldLibName args = do
exists <- doesFileExist ldLibName
-- This method is called iteratively by xargs. The
-- output goes to <ldLibName>.tmp, and any existing file
-- named <ldLibName> is included when linking. The
-- output is renamed to <libName>.
rawSystemProgramConf verbosity ldProgram (withPrograms lbi)
(args ++ if exists then [ldLibName] else [])
renameFile (ldLibName <.> "tmp") ldLibName
runAr = rawSystemProgramConf verbosity arProgram (withPrograms lbi)
--TODO: discover this at configure time or runtime on unix
-- The value is 32k on Windows and posix specifies a minimum of 4k
-- but all sensible unixes use more than 4k.
-- we could use getSysVar ArgumentLimit but that's in the unix lib
maxCommandLineSize = 30 * 1024
ifVanillaLib False $ xargs maxCommandLineSize
runAr arArgs arObjArgs
ifProfLib $ xargs maxCommandLineSize
runAr arProfArgs arProfObjArgs
ifGHCiLib $ xargs maxCommandLineSize
(runLd ghciLibFilePath) ldArgs ldObjArgs
ifSharedLib $ runGhcProg ghcSharedLinkArgs
-- build any executables
withExe pkg_descr $ \exe@Executable { exeName = exeName', modulePath = modPath } -> do
info verbosity $ "Building executable: " ++ exeName' ++ "..."
exeBi <- hackThreadedFlag verbosity
(compiler lbi) (withProfExe lbi) (buildInfo exe)
-- exeNameReal, the name that GHC really uses (with .exe on Windows)
let exeNameReal = exeName' <.>
(if null $ takeExtension exeName' then exeExtension else "")
let targetDir = pref </> exeName'
let exeDir = targetDir </> (exeName' ++ "-tmp")
createDirectoryIfMissingVerbose verbosity True targetDir
createDirectoryIfMissingVerbose verbosity True exeDir
-- TODO: do we need to put hs-boot files into place for mutually recursive modules?
-- FIX: what about exeName.hi-boot?
-- build executables
unless (null (cSources exeBi)) $ do
info verbosity "Building C Sources."
sequence_ [do let (odir,args) = constructCcCmdLine lbi exeBi
exeDir filename verbosity
createDirectoryIfMissingVerbose verbosity True odir
runGhcProg args
| filename <- cSources exeBi]
srcMainFile <- findFile (exeDir : hsSourceDirs exeBi) modPath
let cObjs = map (`replaceExtension` objExtension) (cSources exeBi)
let binArgs linkExe profExe =
(if linkExe
then ["-o", targetDir </> exeNameReal]
else ["-c"])
++ constructGHCCmdLine lbi exeBi exeDir verbosity
++ [exeDir </> x | x <- cObjs]
++ [srcMainFile]
++ ["-optl" ++ opt | opt <- PD.ldOptions exeBi]
++ ["-l"++lib | lib <- extraLibs exeBi]
++ ["-L"++libDir | libDir <- extraLibDirs exeBi]
++ concat [["-framework", f] | f <- PD.frameworks exeBi]
++ if profExe
then ["-prof",
"-hisuf", "p_hi",
"-osuf", "p_o"
] ++ ghcProfOptions exeBi
else []
-- For building exe's for profiling that use TH we actually
-- have to build twice, once without profiling and the again
-- with profiling. This is because the code that TH needs to
-- run at compile time needs to be the vanilla ABI so it can
-- be loaded up and run by the compiler.
when (withProfExe lbi && TemplateHaskell `elem` extensions exeBi)
(runGhcProg (binArgs False False))
runGhcProg (binArgs True (withProfExe lbi))
-- | Filter the "-threaded" flag when profiling as it does not
-- work with ghc-6.8 and older.
hackThreadedFlag :: Verbosity -> Compiler -> Bool -> BuildInfo -> IO BuildInfo
hackThreadedFlag verbosity comp prof bi
| not mustFilterThreaded = return bi
| otherwise = do
warn verbosity $ "The ghc flag '-threaded' is not compatible with "
++ "profiling in ghc-6.8 and older. It will be disabled."
return bi { options = filterHcOptions (/= "-threaded") (options bi) }
where
mustFilterThreaded = prof && compilerVersion comp < Version [6, 10] []
&& "-threaded" `elem` hcOptions GHC bi
filterHcOptions p hcoptss =
[ (hc, if hc == GHC then filter p opts else opts)
| (hc, opts) <- hcoptss ]
-- when using -split-objs, we need to search for object files in the
-- Module_split directory for each module.
getHaskellObjects :: PackageDescription -> BuildInfo -> LocalBuildInfo
-> FilePath -> String -> Bool -> IO [FilePath]
getHaskellObjects pkg_descr _ lbi pref wanted_obj_ext allow_split_objs
| splitObjs lbi && allow_split_objs = do
let dirs = [ pref </> (ModuleName.toFilePath x ++ "_split")
| x <- libModules pkg_descr ]
objss <- mapM getDirectoryContents dirs
let objs = [ dir </> obj
| (objs',dir) <- zip objss dirs, obj <- objs',
let obj_ext = takeExtension obj,
'.':wanted_obj_ext == obj_ext ]
return objs
| otherwise =
return [ pref </> ModuleName.toFilePath x <.> wanted_obj_ext
| x <- libModules pkg_descr ]
constructGHCCmdLine
:: LocalBuildInfo
-> BuildInfo
-> FilePath
-> Verbosity
-> [String]
constructGHCCmdLine lbi bi odir verbosity =
["--make"]
++ ghcVerbosityOptions verbosity
-- Unsupported extensions have already been checked by configure
++ ghcOptions lbi bi odir
ghcVerbosityOptions :: Verbosity -> [String]
ghcVerbosityOptions verbosity
| verbosity >= deafening = ["-v"]
| verbosity >= normal = []
| otherwise = ["-w", "-v0"]
ghcOptions :: LocalBuildInfo -> BuildInfo -> FilePath -> [String]
ghcOptions lbi bi odir
= ["-hide-all-packages"]
++ (case withPackageDB lbi of
GlobalPackageDB -> ["-no-user-package-conf"]
UserPackageDB -> []
SpecificPackageDB db -> ["-no-user-package-conf"
,"-package-conf", db])
++ (if splitObjs lbi then ["-split-objs"] else [])
++ ["-i"]
++ ["-i" ++ odir]
++ ["-i" ++ l | l <- nub (hsSourceDirs bi)]
++ ["-i" ++ autogenModulesDir lbi]
++ ["-I" ++ autogenModulesDir lbi]
++ ["-I" ++ odir]
++ ["-I" ++ dir | dir <- PD.includeDirs bi]
++ ["-optP" ++ opt | opt <- cppOptions bi]
++ [ "-optP-include", "-optP"++ (autogenModulesDir lbi </> cppHeaderName) ]
++ [ "-#include \"" ++ inc ++ "\"" | inc <- PD.includes bi ]
++ [ "-odir", odir, "-hidir", odir ]
++ (if compilerVersion c >= Version [6,8] []
then ["-stubdir", odir] else [])
++ (concat [ ["-package", display pkg] | pkg <- packageDeps lbi ])
++ (case withOptimization lbi of
NoOptimisation -> []
NormalOptimisation -> ["-O"]
MaximumOptimisation -> ["-O2"])
++ hcOptions GHC bi
++ extensionsToFlags c (extensions bi)
where c = compiler lbi
constructCcCmdLine :: LocalBuildInfo -> BuildInfo -> FilePath
-> FilePath -> Verbosity -> (FilePath,[String])
constructCcCmdLine lbi bi pref filename verbosity
= let odir | compilerVersion (compiler lbi) >= Version [6,4,1] [] = pref
| otherwise = pref </> takeDirectory filename
-- ghc 6.4.1 fixed a bug in -odir handling
-- for C compilations.
in
(odir,
ghcCcOptions lbi bi odir
++ (if verbosity >= deafening then ["-v"] else [])
++ ["-c",filename])
ghcCcOptions :: LocalBuildInfo -> BuildInfo -> FilePath -> [String]
ghcCcOptions lbi bi odir
= ["-I" ++ dir | dir <- PD.includeDirs bi]
++ (case withPackageDB lbi of
SpecificPackageDB db -> ["-package-conf", db]
_ -> [])
++ concat [ ["-package", display pkg] | pkg <- packageDeps lbi ]
++ ["-optc" ++ opt | opt <- PD.ccOptions bi]
++ (case withOptimization lbi of
NoOptimisation -> []
_ -> ["-optc-O2"])
++ ["-odir", odir]
mkGHCiLibName :: PackageIdentifier -> String
mkGHCiLibName lib = "HS" ++ display lib <.> "o"
-- -----------------------------------------------------------------------------
-- Building a Makefile
makefile :: PackageDescription -> LocalBuildInfo -> MakefileFlags -> IO ()
makefile pkg_descr lbi flags = do
let file = fromFlagOrDefault "Makefile"(makefileFile flags)
verbosity = fromFlag (makefileVerbosity flags)
targetExists <- doesFileExist file
when targetExists $
die ("Not overwriting existing copy of " ++ file)
h <- openFile file WriteMode
let Just lib = library pkg_descr
bi = libBuildInfo lib
packageIdStr = display (packageId pkg_descr)
(arProg, _) <- requireProgram verbosity arProgram anyVersion
(withPrograms lbi)
(ldProg, _) <- requireProgram verbosity ldProgram anyVersion
(withPrograms lbi)
let builddir = buildDir lbi
Just ghcProg = lookupProgram ghcProgram (withPrograms lbi)
Just ghcVersion = programVersion ghcProg
let decls = [
("modules", unwords (map display (PD.exposedModules lib ++ otherModules bi))),
("GHC", programPath ghcProg),
("GHC_VERSION", (display (compilerVersion (compiler lbi)))),
("VANILLA_WAY", if withVanillaLib lbi then "YES" else "NO"),
("WAYS", (if withProfLib lbi then "p " else "") ++ (if withSharedLib lbi then "dyn" else "")),
("odir", builddir),
("package", packageIdStr),
("GHC_OPTS", unwords $ programArgs ghcProg
++ ["-package-name", packageIdStr ]
++ ghcOptions lbi bi (buildDir lbi)),
("MAKEFILE", file),
("C_SRCS", unwords (cSources bi)),
("GHC_CC_OPTS", unwords (ghcCcOptions lbi bi (buildDir lbi))),
("GHCI_LIB", if withGHCiLib lbi
then builddir </> mkGHCiLibName (packageId pkg_descr)
else ""),
("soext", dllExtension),
("LIB_LD_OPTS", unwords (["-package-name", packageIdStr]
++ concat [ ["-package", display pkg] | pkg <- packageDeps lbi ]
++ ["-l"++libName | libName <- extraLibs bi]
++ ["-L"++libDir | libDir <- extraLibDirs bi])),
("AR", programPath arProg),
("LD", programPath ldProg ++ concat [" " ++ arg | arg <- programArgs ldProg ]),
("GENERATE_DOT_DEPEND", if ghcVersion >= Version [6,9] []
then "-dep-makefile $(odir)/.depend"
else "-optdep-f -optdep$(odir)/.depend")
]
mkRules srcdir = [
"$(odir_)%.$(osuf) : " ++ srcdir ++ "/%.hs",
"\t$(GHC) $(GHC_OPTS) -c $< -o $@ -ohi $(basename $@).$(hisuf)",
"",
"$(odir_)%.$(osuf) : " ++ srcdir ++ "/%.lhs",
"\t$(GHC) $(GHC_OPTS) -c $< -o $@ -ohi $(basename $@).$(hisuf)",
"",
"$(odir_)%.$(osuf) : " ++ srcdir ++ "/%.$(way_)s",
"\t@$(RM) $@",
"\t$(GHC) $(GHC_CC_OPTS) -c $< -o $@",
"",
"$(odir_)%.$(osuf) : " ++ srcdir ++ "/%.S",
"\t@$(RM) $@",
"\t$(GHC) $(GHC_CC_OPTS) -c $< -o $@",
"",
"$(odir_)%.$(osuf)-boot : " ++ srcdir ++ "/%.hs-boot",
"\t$(GHC) $(GHC_OPTS) -c $< -o $@ -ohi $(basename $@).$(way_)hi-boot",
"",
"$(odir_)%.$(osuf)-boot : " ++ srcdir ++ "/%.lhs-boot",
"\t$(GHC) $(GHC_OPTS) -c $< -o $@ -ohi $(basename $@).$(way_)hi-boot",
""]
-- We used to do this with $(eval ...) and $(call ...) in the
-- Makefile, but make 3.79.1 (which is what comes with msys)
-- doesn't understand $(eval ...), so now we just stick the
-- expanded loop directly into the Makefile we generate.
vars = ["WAY_p_OPTS = -prof",
"WAY_dyn_OPTS = -fPIC -dynamic",
"WAY_dyn_CC_OPTS = -fPIC",
"",
"ifneq \"$(way)\" \"\"",
"way_ := $(way)_",
"_way := _$(way)",
"GHC_OPTS += $(WAY_$(way)_OPTS)",
"GHC_OPTS += -hisuf $(way_)hi -hcsuf $(way_)hc -osuf $(osuf)",
"GHC_CC_OPTS += $(WAY_$(way)_CC_OPTS)",
"endif",
"",
"osuf = $(way_)o",
"hisuf = $(way_)hi",
"",
"ifneq \"$(odir)\" \"\"",
"odir_ = $(odir)/",
"else",
"odir_ =",
"endif",
""]
rules = concatMap mkRules (hsSourceDirs bi)
hPutStrLn h "# DO NOT EDIT! Automatically generated by Cabal\n"
hPutStrLn h $ unlines (map (\(a,b)-> a ++ " = " ++ munge b) decls)
hPutStrLn h $ unlines vars
hPutStrLn h makefileTemplate
hPutStrLn h $ unlines rules
-- put the extra suffix rules *after* the suffix rules in the template.
-- the suffix rules in the tempate handle source files that have been
-- preprocessed and generated into distdir, whereas the suffix rules
-- here point to the source dir. We want the distdir to override the
-- source dir, just in case the user has left a preprocessed version
-- of a source file lying around in the source dir. Also this matches
-- the behaviour of 'cabal build'.
hClose h
where
munge "" = ""
munge ('#':s) = '\\':'#':munge s
munge ('\\':s) = '/':munge s
-- for Windows, we want to use forward slashes in our pathnames in the Makefile
munge (c:s) = c : munge s
-- -----------------------------------------------------------------------------
-- Installing
-- |Install executables for GHC.
installExe :: CopyFlags -- ^verbosity
-> LocalBuildInfo
-> InstallDirs FilePath -- ^Where to copy the files to
-> InstallDirs FilePath -- ^Where to pretend the files are (i.e. ignores --destdir)
-> FilePath -- ^Build location
-> (FilePath, FilePath) -- ^Executable (prefix,suffix)
-> PackageDescription
-> IO ()
installExe flags lbi installDirs pretendInstallDirs buildPref (progprefix, progsuffix) pkg_descr
= do let verbosity = fromFlag (copyVerbosity flags)
useWrapper = fromFlag (copyUseWrapper flags)
binDir = bindir installDirs
createDirectoryIfMissingVerbose verbosity True binDir
withExe pkg_descr $ \Executable { exeName = e } -> do
let exeFileName = e <.> exeExtension
fixedExeBaseName = progprefix ++ e ++ progsuffix
installBinary dest = do
installExecutableFile verbosity
(buildPref </> e </> exeFileName)
(dest <.> exeExtension)
stripExe verbosity lbi exeFileName (dest <.> exeExtension)
if useWrapper
then do
let libExecDir = libexecdir installDirs
pretendLibExecDir = libexecdir pretendInstallDirs
pretendAbsExeFileName =
pretendLibExecDir </> fixedExeBaseName <.> exeExtension
wrapperFileName = binDir </> fixedExeBaseName
myPkgId = packageId (PD.package (localPkgDescr lbi))
myCompilerId = compilerId (compiler lbi)
env = (ExecutableNameVar,
toPathTemplate pretendAbsExeFileName)
: fullPathTemplateEnv myPkgId myCompilerId
pretendInstallDirs
createDirectoryIfMissingVerbose verbosity True libExecDir
installBinary (libExecDir </> fixedExeBaseName)
-- XXX Should probably look somewhere more sensible
-- than just . for wrappers
wrapperTemplate <- readFile (e <.> "wrapper")
let wrapper = fromPathTemplate
$ substPathTemplate env
$ toPathTemplate wrapperTemplate
writeFileAtomic wrapperFileName wrapper
setFileExecutable wrapperFileName
else do
installBinary (binDir </> fixedExeBaseName)
stripExe :: Verbosity -> LocalBuildInfo -> FilePath -> FilePath -> IO ()
stripExe verbosity lbi name path = when (stripExes lbi) $
case lookupProgram stripProgram (withPrograms lbi) of
Just strip -> rawSystemProgram verbosity strip args
Nothing -> unless (buildOS == Windows) $
-- Don't bother warning on windows, we don't expect them to
-- have the strip program anyway.
warn verbosity $ "Unable to strip executable '" ++ name
++ "' (missing the 'strip' program)"
where
args = path : case buildOS of
OSX -> ["-x"] -- By default, stripping the ghc binary on at least
-- some OS X installations causes:
-- HSbase-3.0.o: unknown symbol `_environ'"
-- The -x flag fixes that.
_ -> []
-- |Install for ghc, .hi, .a and, if --with-ghci given, .o
installLib :: CopyFlags -- ^verbosity
-> LocalBuildInfo
-> FilePath -- ^install location
-> FilePath -- ^install location for dynamic librarys
-> FilePath -- ^Build location
-> PackageDescription -> IO ()
installLib flags lbi targetDir dynlibTargetDir builtDir
pkg@PackageDescription{library=Just lib} =
unless (fromFlag $ copyInPlace flags) $ do
-- copy .hi files over:
let verbosity = fromFlag (copyVerbosity flags)
copy src dst n = do
createDirectoryIfMissingVerbose verbosity True dst
installOrdinaryFile verbosity (src </> n) (dst </> n)
copyModuleFiles ext =
findModuleFiles [builtDir] [ext] (libModules pkg)
>>= installOrdinaryFiles verbosity targetDir
ifVanilla $ copyModuleFiles "hi"
ifProf $ copyModuleFiles "p_hi"
-- copy the built library files over:
ifVanilla $ copy builtDir targetDir vanillaLibName
ifProf $ copy builtDir targetDir profileLibName
ifGHCi $ copy builtDir targetDir ghciLibName
ifShared $ copy builtDir dynlibTargetDir sharedLibName
-- run ranlib if necessary:
ifVanilla $ updateLibArchive verbosity lbi
(targetDir </> vanillaLibName)
ifProf $ updateLibArchive verbosity lbi
(targetDir </> profileLibName)
where
vanillaLibName = mkLibName pkgid
profileLibName = mkProfLibName pkgid
ghciLibName = mkGHCiLibName pkgid
sharedLibName = mkSharedLibName pkgid (compilerId (compiler lbi))
pkgid = packageId pkg
hasLib = not $ null (libModules pkg)
&& null (cSources (libBuildInfo lib))
ifVanilla = when (hasLib && withVanillaLib lbi)
ifProf = when (hasLib && withProfLib lbi)
ifGHCi = when (hasLib && withGHCiLib lbi)
ifShared = when (hasLib && withSharedLib lbi)
installLib _ _ _ _ _ PackageDescription{library=Nothing}
= die $ "Internal Error. installLibGHC called with no library."
-- | use @ranlib@ or @ar -s@ to build an index. This is necessary on systems
-- like MacOS X. If we can't find those, don't worry too much about it.
--
updateLibArchive :: Verbosity -> LocalBuildInfo -> FilePath -> IO ()
updateLibArchive verbosity lbi path =
case lookupProgram ranlibProgram (withPrograms lbi) of
Just ranlib -> rawSystemProgram verbosity ranlib [path]
Nothing -> case lookupProgram arProgram (withPrograms lbi) of
Just ar -> rawSystemProgram verbosity ar ["-s", path]
Nothing -> warn verbosity $
"Unable to generate a symbol index for the static "
++ "library '" ++ path
++ "' (missing the 'ranlib' and 'ar' programs)"
| dcreager/cabal | Distribution/Simple/GHC.hs | bsd-3-clause | 48,956 | 0 | 27 | 15,245 | 9,837 | 5,178 | 4,659 | 776 | 12 |
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable
-}
module Fragment.IsoRec.Helpers (
tyRec
, tmFold
, tmUnfold
) where
import Bound (Bound)
import Control.Lens (review)
import Ast.Type
import Ast.Term
import Data.Bitransversable
import Fragment.IsoRec.Ast.Type
import Fragment.IsoRec.Ast.Term
tyRec :: (Eq a, Bound ki, Bitransversable ki, AsTyIsoRec ki ty)
=> a
-> Type ki ty a
-> Type ki ty a
tyRec v ty = review _TyRec (abstractTy v ty)
tmFold :: AsTmIsoRec ki ty pt tm => Type ki ty a -> Term ki ty pt tm a -> Term ki ty pt tm a
tmFold = curry $ review _TmFold
tmUnfold :: AsTmIsoRec ki ty pt tm => Type ki ty a -> Term ki ty pt tm a -> Term ki ty pt tm a
tmUnfold = curry $ review _TmUnfold
| dalaing/type-systems | src/Fragment/IsoRec/Helpers.hs | bsd-3-clause | 830 | 0 | 8 | 185 | 279 | 146 | 133 | 20 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
module Snap.Snaplet.Internal.Initializer
( addPostInitHook
, addPostInitHookBase
, toSnapletHook
, bracketInit
, modifyCfg
, nestSnaplet
, embedSnaplet
, makeSnaplet
, nameSnaplet
, onUnload
, addRoutes
, wrapSite
, runInitializer
, runSnaplet
, combineConfig
, serveSnaplet
, serveSnapletNoArgParsing
, loadAppConfig
, printInfo
, getRoutes
, getEnvironment
, modifyMaster
) where
------------------------------------------------------------------------------
import Control.Applicative ((<$>))
import Control.Concurrent.MVar (MVar, modifyMVar_, newEmptyMVar,
putMVar, readMVar)
import Control.Exception.Lifted (SomeException, catch, try)
import Control.Lens (ALens', cloneLens, over, set,
storing, (^#))
import Control.Monad (Monad (..), join, liftM, unless,
when, (=<<))
import Control.Monad.Reader (ask)
import Control.Monad.State (get, modify)
import Control.Monad.Trans (lift, liftIO)
import Control.Monad.Trans.Writer hiding (pass)
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as B
import Data.Configurator (Worth (..), addToConfig, empty,
loadGroups, subconfig)
import qualified Data.Configurator.Types as C
import Data.IORef (IORef, atomicModifyIORef,
newIORef, readIORef)
import Data.Maybe (Maybe (..), fromJust, fromMaybe,
isNothing)
import Data.Text (Text)
import qualified Data.Text as T
import Prelude (Bool (..), Either (..), Eq (..),
String, concat, concatMap,
const, either,
error, filter, flip, fst, id,
map, not, show, ($), ($!), (++),
(.))
import Snap.Core (Snap, liftSnap, route)
import Snap.Http.Server (Config, completeConfig,
getCompression, getErrorHandler,
getOther, getVerbose, httpServe)
import Snap.Util.GZip (withCompression)
import System.Directory (copyFile,
createDirectoryIfMissing,
doesDirectoryExist,
getCurrentDirectory)
import System.Directory.Tree (DirTree (..), FileName, buildL,
dirTree, readDirectoryWith)
import System.FilePath.Posix (dropFileName, makeRelative,
(</>))
import System.IO (FilePath, IO, hPutStrLn, stderr)
------------------------------------------------------------------------------
import Snap.Snaplet.Config (AppConfig, appEnvironment,
commandLineAppConfig)
import qualified Snap.Snaplet.Internal.Lensed as L
import qualified Snap.Snaplet.Internal.LensT as LT
import Snap.Snaplet.Internal.Types
------------------------------------------------------------------------------
------------------------------------------------------------------------------
-- | 'get' for InitializerState.
iGet :: Initializer b v (InitializerState b)
iGet = Initializer $ LT.getBase
------------------------------------------------------------------------------
-- | 'modify' for InitializerState.
iModify :: (InitializerState b -> InitializerState b) -> Initializer b v ()
iModify f = Initializer $ do
b <- LT.getBase
LT.putBase $ f b
------------------------------------------------------------------------------
-- | 'gets' for InitializerState.
iGets :: (InitializerState b -> a) -> Initializer b v a
iGets f = Initializer $ do
b <- LT.getBase
return $ f b
------------------------------------------------------------------------------
-- | Lets you retrieve the list of routes currently set up by an Initializer.
-- This can be useful in debugging.
getRoutes :: Initializer b v [ByteString]
getRoutes = liftM (map fst) $ iGets _handlers
------------------------------------------------------------------------------
-- | Return the current environment string. This will be the
-- environment given to 'runSnaplet' or from the command line when
-- using 'serveSnaplet'. Useful for changing behavior during
-- development and testing.
getEnvironment :: Initializer b v String
getEnvironment = iGets _environment
------------------------------------------------------------------------------
-- | Converts a plain hook into a Snaplet hook.
toSnapletHook :: (v -> IO (Either Text v))
-> (Snaplet v -> IO (Either Text (Snaplet v)))
toSnapletHook f (Snaplet cfg reset val) = do
val' <- f val
return $! Snaplet cfg reset <$> val'
------------------------------------------------------------------------------
-- | Adds an IO action that modifies the current snaplet state to be run at
-- the end of initialization on the state that was created. This makes it
-- easier to allow one snaplet's state to be modified by another snaplet's
-- initializer. A good example of this is when a snaplet has templates that
-- define its views. The Heist snaplet provides the 'addTemplates' function
-- which allows other snaplets to set up their own templates. 'addTemplates'
-- is implemented using this function.
addPostInitHook :: (v -> IO (Either Text v))
-> Initializer b v ()
addPostInitHook = addPostInitHook' . toSnapletHook
addPostInitHook' :: (Snaplet v -> IO (Either Text (Snaplet v)))
-> Initializer b v ()
addPostInitHook' h = do
h' <- upHook h
addPostInitHookBase h'
------------------------------------------------------------------------------
-- | Variant of addPostInitHook for when you have things wrapped in a Snaplet.
addPostInitHookBase :: (Snaplet b -> IO (Either Text (Snaplet b)))
-> Initializer b v ()
addPostInitHookBase = Initializer . lift . tell . Hook
------------------------------------------------------------------------------
-- | Helper function for transforming hooks.
upHook :: (Snaplet v -> IO (Either Text (Snaplet v)))
-> Initializer b v (Snaplet b -> IO (Either Text (Snaplet b)))
upHook h = Initializer $ do
l <- ask
return $ upHook' l h
------------------------------------------------------------------------------
-- | Helper function for transforming hooks.
upHook' :: Monad m => ALens' b a -> (a -> m (Either e a)) -> b -> m (Either e b)
upHook' l h b = do
v <- h (b ^# l)
return $ case v of
Left e -> Left e
Right v' -> Right $ storing l v' b
------------------------------------------------------------------------------
-- | Modifies the Initializer's SnapletConfig.
modifyCfg :: (SnapletConfig -> SnapletConfig) -> Initializer b v ()
modifyCfg f = iModify $ over curConfig $ \c -> f c
------------------------------------------------------------------------------
-- | If a snaplet has a filesystem presence, this function creates and copies
-- the files if they dont' already exist.
setupFilesystem :: Maybe (IO FilePath)
-- ^ The directory where the snaplet's reference files are
-- stored. Nothing if the snaplet doesn't come with any
-- files that need to be installed.
-> FilePath
-- ^ Directory where the files should be copied.
-> Initializer b v ()
setupFilesystem Nothing _ = return ()
setupFilesystem (Just getSnapletDataDir) targetDir = do
exists <- liftIO $ doesDirectoryExist targetDir
unless exists $ do
printInfo "...setting up filesystem"
liftIO $ createDirectoryIfMissing True targetDir
srcDir <- liftIO getSnapletDataDir
liftIO $ readDirectoryWith (doCopy srcDir targetDir) srcDir
return ()
where
doCopy srcRoot targetRoot filename = do
createDirectoryIfMissing True directory
copyFile filename toDir
where
toDir = targetRoot </> makeRelative srcRoot filename
directory = dropFileName toDir
------------------------------------------------------------------------------
-- | All snaplet initializers must be wrapped in a call to @makeSnaplet@,
-- which handles standardized housekeeping common to all snaplets.
-- Common usage will look something like
-- this:
--
-- @
-- fooInit :: SnapletInit b Foo
-- fooInit = makeSnaplet \"foo\" \"An example snaplet\" Nothing $ do
-- -- Your initializer code here
-- return $ Foo 42
-- @
--
-- Note that you're writing your initializer code in the Initializer monad,
-- and makeSnaplet converts it into an opaque SnapletInit type. This allows
-- us to use the type system to ensure that the API is used correctly.
makeSnaplet :: Text
-- ^ A default id for this snaplet. This is only used when
-- the end-user has not already set an id using the
-- nameSnaplet function.
-> Text
-- ^ A human readable description of this snaplet.
-> Maybe (IO FilePath)
-- ^ The path to the directory holding the snaplet's reference
-- filesystem content. This will almost always be the
-- directory returned by Cabal's getDataDir command, but it
-- has to be passed in because it is defined in a
-- package-specific import. Setting this value to Nothing
-- doesn't preclude the snaplet from having files in in the
-- filesystem, it just means that they won't be copied there
-- automatically.
-> Initializer b v v
-- ^ Snaplet initializer.
-> SnapletInit b v
makeSnaplet snapletId desc getSnapletDataDir m = SnapletInit $ do
modifyCfg $ \c -> if isNothing $ _scId c
then set scId (Just snapletId) c else c
sid <- iGets (T.unpack . fromJust . _scId . _curConfig)
topLevel <- iGets _isTopLevel
unless topLevel $ do
modifyCfg $ over scUserConfig (subconfig (T.pack sid))
modifyCfg $ \c -> set scFilePath
(_scFilePath c </> "snaplets" </> sid) c
iModify (set isTopLevel False)
modifyCfg $ set scDescription desc
cfg <- iGets _curConfig
printInfo $ T.pack $ concat
["Initializing "
,sid
," @ /"
,B.unpack $ buildPath $ _scRouteContext cfg
]
-- This has to happen here because it needs to be after scFilePath is set
-- up but before the config file is read.
setupFilesystem getSnapletDataDir (_scFilePath cfg)
env <- iGets _environment
let configLocation = _scFilePath cfg </> (env ++ ".cfg")
liftIO $ addToConfig [Optional configLocation]
(_scUserConfig cfg)
mkSnaplet m
------------------------------------------------------------------------------
-- | Internal function that gets the SnapletConfig out of the initializer
-- state and uses it to create a (Snaplet a).
mkSnaplet :: Initializer b v v -> Initializer b v (Snaplet v)
mkSnaplet m = do
res <- m
cfg <- iGets _curConfig
setInTop <- iGets masterReloader
l <- getLens
let modifier = setInTop . set (cloneLens l . snapletValue)
return $ Snaplet cfg modifier res
------------------------------------------------------------------------------
-- | Brackets an initializer computation, restoring curConfig after the
-- computation returns.
bracketInit :: Initializer b v a -> Initializer b v a
bracketInit m = do
s <- iGet
res <- m
iModify (set curConfig (_curConfig s))
return res
------------------------------------------------------------------------------
-- | Handles modifications to InitializerState that need to happen before a
-- snaplet is called with either nestSnaplet or embedSnaplet.
setupSnapletCall :: ByteString -> Initializer b v ()
setupSnapletCall rte = do
curId <- iGets (fromJust . _scId . _curConfig)
modifyCfg (over scAncestry (curId:))
modifyCfg (over scId (const Nothing))
unless (B.null rte) $ modifyCfg (over scRouteContext (rte:))
------------------------------------------------------------------------------
-- | Runs another snaplet's initializer and returns the initialized Snaplet
-- value. Calling an initializer with nestSnaplet gives the nested snaplet
-- access to the same base state that the current snaplet has. This makes it
-- possible for the child snaplet to make use of functionality provided by
-- sibling snaplets.
nestSnaplet :: ByteString
-- ^ The root url for all the snaplet's routes. An empty
-- string gives the routes the same root as the parent
-- snaplet's routes.
-> SnapletLens v v1
-- ^ Lens identifying the snaplet
-> SnapletInit b v1
-- ^ The initializer function for the subsnaplet.
-> Initializer b v (Snaplet v1)
nestSnaplet rte l (SnapletInit snaplet) =
with l $ bracketInit $ do
setupSnapletCall rte
snaplet
------------------------------------------------------------------------------
-- | Runs another snaplet's initializer and returns the initialized Snaplet
-- value. The difference between this and 'nestSnaplet' is the first type
-- parameter in the third argument. The \"v1 v1\" makes the child snaplet
-- think that it is the top-level state, which means that it will not be able
-- to use functionality provided by snaplets included above it in the snaplet
-- tree. This strongly isolates the child snaplet, and allows you to eliminate
-- the b type variable. The embedded snaplet can still get functionality
-- from other snaplets, but only if it nests or embeds the snaplet itself.
--
-- Note that this function does not change where this snaplet is located in
-- the filesystem. The snaplet directory structure convention stays the same.
-- Also, embedSnaplet limits the ways that snaplets can interact, so we
-- usually recommend using nestSnaplet instead. However, we provide this
-- function because sometimes reduced flexibility is useful. In short, if
-- you don't understand what this function does for you from looking at its
-- type, you probably don't want to use it.
embedSnaplet :: ByteString
-- ^ The root url for all the snaplet's routes. An empty
-- string gives the routes the same root as the parent
-- snaplet's routes.
--
-- NOTE: Because of the stronger isolation provided by
-- embedSnaplet, you should be more careful about using an
-- empty string here.
-> SnapletLens v v1
-- ^ Lens identifying the snaplet
-> SnapletInit v1 v1
-- ^ The initializer function for the subsnaplet.
-> Initializer b v (Snaplet v1)
embedSnaplet rte l (SnapletInit snaplet) = bracketInit $ do
curLens <- getLens
setupSnapletCall ""
chroot rte (cloneLens curLens . subSnaplet l) snaplet
------------------------------------------------------------------------------
-- | Changes the base state of an initializer.
chroot :: ByteString
-> SnapletLens (Snaplet b) v1
-> Initializer v1 v1 a
-> Initializer b v a
chroot rte l (Initializer m) = do
curState <- iGet
let newSetter f = masterReloader curState (over (cloneLens l) f)
((a,s), (Hook hook)) <- liftIO $ runWriterT $ LT.runLensT m id $
curState {
_handlers = [],
_hFilter = id,
masterReloader = newSetter
}
let handler = chrootHandler l $ _hFilter s $ route $ _handlers s
iModify $ over handlers (++[(rte,handler)])
. set cleanup (_cleanup s)
addPostInitHookBase $ upHook' l hook
return a
------------------------------------------------------------------------------
-- | Changes the base state of a handler.
chrootHandler :: SnapletLens (Snaplet v) b'
-> Handler b' b' a -> Handler b v a
chrootHandler l (Handler h) = Handler $ do
s <- get
(a, s') <- liftSnap $ L.runLensed h id (s ^# l)
modify $ storing l s'
return a
------------------------------------------------------------------------------
-- | Sets a snaplet's name. All snaplets have a default name set by the
-- snaplet author. This function allows you to override that name. You will
-- have to do this if you have more than one instance of the same kind of
-- snaplet because snaplet names must be unique. This function must
-- immediately surround the snaplet's initializer. For example:
--
-- @fooState <- nestSnaplet \"fooA\" $ nameSnaplet \"myFoo\" $ fooInit@
nameSnaplet :: Text
-- ^ The snaplet name
-> SnapletInit b v
-- ^ The snaplet initializer function
-> SnapletInit b v
nameSnaplet nm (SnapletInit m) = SnapletInit $
modifyCfg (set scId (Just nm)) >> m
------------------------------------------------------------------------------
-- | Adds routing to the current 'Handler'. The new routes are merged with
-- the main routing section and take precedence over existing routing that was
-- previously defined.
addRoutes :: [(ByteString, Handler b v ())]
-> Initializer b v ()
addRoutes rs = do
l <- getLens
ctx <- iGets (_scRouteContext . _curConfig)
let modRoute (r,h) = ( buildPath (r:ctx)
, setPattern r >> withTop' l h)
let rs' = map modRoute rs
iModify (\v -> over handlers (++rs') v)
where
setPattern r = do
p <- getRoutePattern
when (isNothing p) $ setRoutePattern r
------------------------------------------------------------------------------
-- | Wraps the /base/ snaplet's routing in another handler, allowing you to run
-- code before and after all routes in an application.
--
-- Here are some examples of things you might do:
--
-- > wrapSite (\site -> logHandlerStart >> site >> logHandlerFinished)
-- > wrapSite (\site -> ensureAdminUser >> site)
--
wrapSite :: (Handler b v () -> Handler b v ())
-- ^ Handler modifier function
-> Initializer b v ()
wrapSite f0 = do
f <- mungeFilter f0
iModify (\v -> over hFilter (f.) v)
------------------------------------------------------------------------------
mungeFilter :: (Handler b v () -> Handler b v ())
-> Initializer b v (Handler b b () -> Handler b b ())
mungeFilter f = do
myLens <- Initializer ask
return $ \m -> with' myLens $ f' m
where
f' (Handler m) = f $ Handler $ L.withTop id m
------------------------------------------------------------------------------
-- | Attaches an unload handler to the snaplet. The unload handler will be
-- called when the server shuts down, or is reloaded.
onUnload :: IO () -> Initializer b v ()
onUnload m = do
cleanupRef <- iGets _cleanup
liftIO $ atomicModifyIORef cleanupRef f
where
f curCleanup = (curCleanup >> m, ())
------------------------------------------------------------------------------
-- |
logInitMsg :: IORef Text -> Text -> IO ()
logInitMsg ref msg = atomicModifyIORef ref (\cur -> (cur `T.append` msg, ()))
------------------------------------------------------------------------------
-- | Initializers should use this function for all informational or error
-- messages to be displayed to the user. On application startup they will be
-- sent to the console. When executed from the reloader, they will be sent
-- back to the user in the HTTP response.
printInfo :: Text -> Initializer b v ()
printInfo msg = do
logRef <- iGets _initMessages
liftIO $ logInitMsg logRef (msg `T.append` "\n")
------------------------------------------------------------------------------
-- | Builds an IO reload action for storage in the SnapletState.
mkReloader :: FilePath
-> String
-> ((Snaplet b -> Snaplet b) -> IO ())
-> IORef (IO ())
-> Initializer b b (Snaplet b)
-> IO (Either Text Text)
mkReloader cwd env resetter cleanupRef i = do
join $ readIORef cleanupRef
!res <- runInitializer' resetter env i cwd
either (return . Left) good res
where
good (b,is) = do
_ <- resetter (const b)
msgs <- readIORef $ _initMessages is
return $ Right msgs
------------------------------------------------------------------------------
-- | Runs a top-level snaplet in the Snap monad.
runBase :: Handler b b a
-> MVar (Snaplet b)
-> Snap a
runBase (Handler m) mvar = do
!b <- liftIO (readMVar mvar)
(!a, _) <- L.runLensed m id b
return $! a
------------------------------------------------------------------------------
-- | Lets you change a snaplet's initial state. It's almost like a reload,
-- except that it doesn't run the initializer. It just modifies the result of
-- the initializer. This can be used to let you define actions for reloading
-- individual snaplets.
modifyMaster :: v -> Handler b v ()
modifyMaster v = do
modifier <- getsSnapletState _snapletModifier
liftIO $ modifier v
------------------------------------------------------------------------------
-- | Internal function for running Initializers. If any exceptions were
-- thrown by the initializer, this function catches them, runs any cleanup
-- actions that had been registered, and returns an expanded error message
-- containing the exception details as well as all messages generated by the
-- initializer before the exception was thrown.
runInitializer :: ((Snaplet b -> Snaplet b) -> IO ())
-> String
-> Initializer b b (Snaplet b)
-> IO (Either Text (Snaplet b, InitializerState b))
runInitializer resetter env b =
getCurrentDirectory >>= runInitializer' resetter env b
------------------------------------------------------------------------------
runInitializer' :: ((Snaplet b -> Snaplet b) -> IO ())
-> String
-> Initializer b b (Snaplet b)
-> FilePath
-> IO (Either Text (Snaplet b, InitializerState b))
runInitializer' resetter env b@(Initializer i) cwd = do
cleanupRef <- newIORef (return ())
let reloader_ = mkReloader cwd env resetter cleanupRef b
let builtinHandlers = [("/admin/reload", reloadSite)]
let cfg = SnapletConfig [] cwd Nothing "" empty [] Nothing reloader_
logRef <- newIORef ""
let body = do
((res, s), (Hook hook)) <- runWriterT $ LT.runLensT i id $
InitializerState True cleanupRef builtinHandlers id cfg logRef
env resetter
res' <- hook res
return $ (,s) <$> res'
handler e = do
join $ readIORef cleanupRef
logMessages <- readIORef logRef
return $ Left $ T.unlines
[ "Initializer threw an exception..."
, T.pack $ show (e :: SomeException)
, ""
, "...but before it died it generated the following output:"
, logMessages
]
catch body handler
------------------------------------------------------------------------------
-- | Given an environment and a Snaplet initializer, produce a concatenated log
-- of all messages generated during initialization, a snap handler, and a
-- cleanup action. The environment is an arbitrary string such as \"devel\" or
-- \"production\". This string is used to determine the name of the
-- configuration files used by each snaplet. If an environment of Nothing is
-- used, then runSnaplet defaults to \"devel\".
runSnaplet :: Maybe String -> SnapletInit b b -> IO (Text, Snap (), IO ())
runSnaplet env (SnapletInit b) = do
snapletMVar <- newEmptyMVar
let resetter f = modifyMVar_ snapletMVar (return . f)
eRes <- runInitializer resetter (fromMaybe "devel" env) b
let go (siteSnaplet,is) = do
putMVar snapletMVar siteSnaplet
msgs <- liftIO $ readIORef $ _initMessages is
let handler = runBase (_hFilter is $ route $ _handlers is) snapletMVar
cleanupAction <- readIORef $ _cleanup is
return (msgs, handler, cleanupAction)
either (error . ('\n':) . T.unpack) go eRes
------------------------------------------------------------------------------
-- | Given a configuration and a snap handler, complete it and produce the
-- completed configuration as well as a new toplevel handler with things like
-- compression and a 500 handler set up.
combineConfig :: Config Snap a -> Snap () -> IO (Config Snap a, Snap ())
combineConfig config handler = do
conf <- completeConfig config
let catch500 = (flip catch $ fromJust $ getErrorHandler conf)
let compress = if fromJust (getCompression conf)
then withCompression else id
let site = compress $ catch500 handler
return (conf, site)
------------------------------------------------------------------------------
-- | Initialize and run a Snaplet. This function parses command-line arguments,
-- runs the given Snaplet initializer, and starts an HTTP server running the
-- Snaplet's toplevel 'Handler'.
serveSnaplet :: Config Snap AppConfig
-- ^ The configuration of the server - you can usually pass a
-- default 'Config' via
-- 'Snap.Http.Server.Config.defaultConfig'.
-> SnapletInit b b
-- ^ The snaplet initializer function.
-> IO ()
serveSnaplet startConfig initializer = do
config <- commandLineAppConfig startConfig
serveSnapletNoArgParsing config initializer
------------------------------------------------------------------------------
-- | Like 'serveSnaplet', but don't try to parse command-line arguments.
serveSnapletNoArgParsing :: Config Snap AppConfig
-- ^ The configuration of the server - you can usually pass a
-- default 'Config' via
-- 'Snap.Http.Server.Config.defaultConfig'.
-> SnapletInit b b
-- ^ The snaplet initializer function.
-> IO ()
serveSnapletNoArgParsing config initializer = do
let env = appEnvironment =<< getOther config
(msgs, handler, doCleanup) <- runSnaplet env initializer
(conf, site) <- combineConfig config handler
createDirectoryIfMissing False "log"
let serve = httpServe conf
when (loggingEnabled conf) $ liftIO $ hPutStrLn stderr $ T.unpack msgs
_ <- try $ serve $ site
:: IO (Either SomeException ())
doCleanup
where
loggingEnabled = not . (== Just False) . getVerbose
------------------------------------------------------------------------------
-- | Allows you to get all of your app's config data in the IO monad without
-- the web server infrastructure.
loadAppConfig :: FileName
-- ^ The name of the config file to look for. In snap
-- applications, this is something based on the
-- environment...i.e. @devel.cfg@.
-> FilePath
-- ^ Path to the root directory of your project.
-> IO C.Config
loadAppConfig cfg root = do
tree <- buildL root
let groups = loadAppConfig' cfg "" $ dirTree tree
loadGroups groups
------------------------------------------------------------------------------
-- | Recursive worker for loadAppConfig.
loadAppConfig' :: FileName -> Text -> DirTree a -> [(Text, Worth a)]
loadAppConfig' cfg _prefix d@(Dir _ c) =
(map ((_prefix,) . Required) $ getCfg cfg d) ++
concatMap (\a -> loadAppConfig' cfg (nextPrefix $ name a) a) snaplets
where
nextPrefix p = T.concat [_prefix, T.pack p, "."]
snapletsDirs = filter isSnapletsDir c
snaplets = concatMap (filter isDir . contents) snapletsDirs
loadAppConfig' _ _ _ = []
isSnapletsDir :: DirTree t -> Bool
isSnapletsDir (Dir "snaplets" _) = True
isSnapletsDir _ = False
isDir :: DirTree t -> Bool
isDir (Dir _ _) = True
isDir _ = False
isCfg :: FileName -> DirTree t -> Bool
isCfg cfg (File n _) = cfg == n
isCfg _ _ = False
getCfg :: FileName -> DirTree b -> [b]
getCfg cfg (Dir _ c) = map file $ filter (isCfg cfg) c
getCfg _ _ = []
| snapframework/snap | src/Snap/Snaplet/Internal/Initializer.hs | bsd-3-clause | 29,192 | 0 | 19 | 7,776 | 5,567 | 2,891 | 2,676 | 386 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
module Numeric.Sparse.Vector where
import qualified Data.IntMap as M hiding ((!))
import Data.Maybe
import Data.Proxy
import GHC.TypeLits
import Numeric.Sparse.Internal
import Numeric.Sparse.Types
instance (Eq a, Num a, DIM n) => Num (SparseVector n a) where
(+) = unionWith (+)
(*) = intersectWith (*)
negate = fmap negate
fromInteger 0 = empty
fromInteger x = singleton 1 (fromInteger x)
abs = fmap abs
signum = fmap signum
null :: (DIM n) => SparseVector n a -> Bool
null (SV v) = M.null v
-- | Dot product of two `IntMap`s (for internal use)
spDot :: (Num a, Eq a, DIM n) => SparseVector n a -> SparseVector n a -> Maybe a
spDot (SV v) (SV w) = case M.foldl' (+) 0 $ M.intersectionWith (*) v w of
0 -> Nothing
x -> Just x
checkDim :: forall n a. (DIM n) => Index -> SparseVector n a -> SparseVector n a
checkDim i v | i < 1 || i > d = error $ "Index out of bounds (" ++ show i ++ " not in (1," ++ show d ++ "))"
| otherwise = v
where
d = natInt (Proxy :: Proxy n)
checkDim' :: Index -> (Index, a) -> (Index, a)
checkDim' d (i, v) | i < 1 || i > d = error $ "Index out of bounds (" ++ show i ++ " not in (1," ++ show d ++ "))"
| otherwise = (i, v)
empty :: forall a n. (DIM n, Num a) => SparseVector n a
empty = SV M.empty
nnz :: (DIM n) => SparseVector n a -> Int
nnz (SV v) = M.size v
singleton :: (DIM n) => Index -> a -> SparseVector n a
singleton i x = SV (M.singleton i x)
unionWith :: (Eq a, Num a, DIM n) => (a -> a -> a) -> SparseVector n a -> SparseVector n a -> SparseVector n a
unionWith f (SV x) (SV y) = SV $ M.filter (/= 0) (M.unionWith f x y)
intersectWith :: DIM n => (a -> a -> a) -> SparseVector n a -> SparseVector n a -> SparseVector n a
intersectWith f (SV x) (SV y) = SV $ M.intersectionWith f x y
append :: forall n m a. (DIM n, KnownNat m) => SparseVector n a -> SparseVector m a -> SparseVector (n + m) a
append (SV x) (SV y) = SV (M.union x (shiftKeys s y))
where
s = natInt (Proxy :: Proxy n)
-- Serialization --------------------------------------------------------------
toListWithSize :: forall n a. (Num a, Eq a, DIM n) => SparseVector n a -> (Int, [(Index, a)])
toListWithSize (SV v) = (natInt (Proxy :: Proxy n), M.toList v)
toList :: (Num a, Eq a, DIM n) => SparseVector n a -> [(Index, a)]
toList = snd . toListWithSize
fromList :: forall n a. (Num a, Eq a, DIM n) => [(Index, a)] -> SparseVector n a
fromList ivs = SV (M.fromList $ map (checkDim' d) ivs)
where
d = natInt (Proxy :: Proxy n)
-- Modification ---------------------------------------------------------------
del :: (Num a, DIM n) => SparseVector n a -> Index -> SparseVector n a
del v i = SV (M.delete i (vec $ checkDim i v))
del' :: (Num a, DIM n) => SparseVector n a -> Index -> Maybe (SparseVector n a)
del' v i = case M.size v' of
0 -> Nothing
otherwise -> Just (SV v')
where
v' = M.delete i $ vec (checkDim i v)
ins :: (Eq a, Num a, DIM n) => SparseVector n a -> (Index, a) -> SparseVector n a
ins v (i, 0) = del (checkDim i v) i
ins v (i, x) = SV (M.insert i x $ vec (checkDim i v))
-- Linear Algebra -------------------------------------------------------------
-- | Scale sparse vector by a scalar a * v
scale :: forall n a. (Eq a, Num a, DIM n) => a -> SparseVector n a -> SparseVector n a
scale 0 v = empty
scale 1 v = v
scale c v = fmap (* c) v
-- | Inner (dot) product of two sparse vectors <x,y>
dot :: (Eq a, Num a, DIM n) => SparseVector n a -> SparseVector n a -> a
dot x y = fromMaybe 0 (spDot x y)
(<>) :: (Eq a, Num a, DIM n) => SparseVector n a -> SparseVector n a -> a
x <> y = dot x y
-- | l2 norm of vector
norm :: (Eq a, Num a, Floating a, DIM n) => SparseVector n a -> a
norm v = sqrt $ sum $ fmap (^ (2::Int)) v
-- | Outer product
outer :: (Eq a, Num a, DIM n, DIM m) => SparseVector n a -> SparseVector m a -> SparseMatrix n m a
outer (SV x) v = SM $ M.map (`scale` v) x
(><) :: (Eq a, Num a, DIM n, DIM m) => SparseVector n a -> SparseVector m a -> SparseMatrix n m a
x >< y = outer x y
| mnick/hsparse | src/Numeric/Sparse/Vector.hs | bsd-3-clause | 4,277 | 0 | 11 | 1,113 | 2,009 | 1,039 | 970 | 75 | 2 |
{-# LANGUAGE EmptyDataDecls, TypeSynonymInstances #-}
{-# OPTIONS_GHC -fcontext-stack42 #-}
module Games.Chaos2010.Database.Current_wizard_square where
import Games.Chaos2010.Database.Fields
import Database.HaskellDB.DBLayout
type Current_wizard_square =
Record
(HCons (LVPair X (Expr (Maybe Int)))
(HCons (LVPair Y (Expr (Maybe Int))) HNil))
current_wizard_square :: Table Current_wizard_square
current_wizard_square = baseTable "current_wizard_square" | JakeWheat/Chaos-2010 | Games/Chaos2010/Database/Current_wizard_square.hs | bsd-3-clause | 480 | 0 | 15 | 67 | 104 | 58 | 46 | 11 | 1 |
{-# OPTIONS_GHC -F -pgmF she #-}
{-# LANGUAGE GADTs, FlexibleContexts, RankNTypes, KindSignatures #-}
module Generics.Morphism.Para where
import Annotation.Annotation
import Control.Applicative
import Control.Category
import Control.Monad hiding (mapM)
import Control.Monad.Identity
import Control.Monad.Lazy
import Data.Foldable
import Data.Traversable
import Generics.Regular.Functions.Seq
import Generics.Fixpoint
import Prelude hiding ((.), id, mapM)
data AlgA (a :: (* -> *) -> * -> *) (f :: * -> *) (r :: *) where
Psi :: (f (FixA a f, r) -> r) -> AlgA a f r
Proj :: AlgA a f (r -> s, r, s) -> AlgA a f s
type Alg f r = forall a. AlgA a f r
instance Functor f => Functor (AlgA a f) where
fmap f psi = Proj (pure f <++> psi)
instance Functor f => Applicative (AlgA a f) where
pure = Psi . const
a <*> b = Proj (a <++> b)
fst3 :: (a, b, c) -> a
fst3 (x, _, _) = x
snd3 :: (a, b, c) -> b
snd3 (_, y, _) = y
trd3 :: (a, b, c) -> c
trd3 (_, _, z) = z
instance Functor ((,,) a b) where
fmap f (a, b, c) = (a, b, f c)
instance Foldable ((,,) a b) where
foldMap f (_, _, c) = f c
instance Traversable ((,,) a b) where
traverse f (a, b, c) = (| ((,,) a b) (f c) |)
(<++>) :: (Functor f, Functor (AlgA a f)) => AlgA a f (r -> s) -> AlgA a f r -> AlgA a f (r -> s, r, s)
Proj f <++> Proj g = fmap trd3 f <++> fmap trd3 g
Psi f <++> Proj g = Proj (pure id <++> Psi f) <++> Proj g
Proj f <++> Psi g = Proj f <++> Proj (pure id <++> Psi g)
Psi f <++> Psi g = Psi (\x -> f (fmap2 fst3 x) `mk` g (fmap2 snd3 x))
where mk x y = (x, y, x y)
_para :: (Traversable f, Lazy m, AnnO a f m) => (x -> m r) -> (r -> x) -> AlgA a f x -> FixA a f -> m r
_para z y (Proj psi) = fmap trd3 . _para (mapM z) (fmap y) psi
_para z y (Psi psi) = z . psi <=< mapM (grp (fmap y . lazy . _para z y (Psi psi))) <=< annO
where grp f c = fmap ((,) c) (f c)
-- Lazy paramorphism in a monadic context for annotated structures.
paraMA :: (AnnO a f m, Lazy m, Traversable f) => AlgA a f r -> FixA a f -> m r
paraMA psi = _para return id psi
-- Lazy paramorphism in a monadic context for structures without annotations.
paraM :: (Applicative m, Monad m, Lazy m, Traversable f) => AlgA Id f r -> Fix f -> m r
paraM = paraMA
-- Lazy paramorphism for annotated structures.
paraA :: (AnnO a f Identity, Traversable f) => AlgA a f c -> FixA a f -> c
paraA psi = runIdentity . paraMA psi
-- Lazy paramorphism for structures without annotations.
para :: Traversable f => AlgA Id f c -> Fix f -> c
para psi = runIdentity . paraM psi
-- Strict paramorphism in a monadic context for annotated structures.
paraMA' :: (DeepSeq r, Traversable f, Lazy m, AnnO a f m) => AlgA a f r -> FixA a f -> m r
paraMA' psi f = (\a -> dseq a a) <$> paraMA psi f
-- Strict paramorphism in a monadic context for structures without annotations.
paraM' :: (DeepSeq r, Traversable f, Applicative m, Monad m, Lazy m) => AlgA Id f r -> Fix f -> m r
paraM' = paraMA'
-- Strict paramorphism for annotated structures.
paraA' :: (DeepSeq c, Traversable f, AnnO a f Identity) => AlgA a f c -> FixA a f -> c
paraA' psi = runIdentity . paraMA' psi
-- Strict paramorphism for structures without annotations.
para' :: (DeepSeq c, Traversable f) => AlgA Id f c -> Fix f -> c
para' psi = runIdentity . paraM' psi
type EndoA a f = AlgA a f (FixA a f)
type Endo f = forall a. EndoA a f
endoMA :: (Traversable f, AnnIO a f m, Lazy m) => EndoA a f -> FixA a f -> m (FixA a f)
endoMA psi = _para fullyIn id psi
endoM :: (Traversable f, Lazy m, Applicative m, Monad m) => EndoA Id f -> Fix f -> m (Fix f)
endoM = endoMA
endoA :: (Traversable f, AnnIO a f Identity) => EndoA a f -> FixA a f -> FixA a f
endoA psi = runIdentity . endoMA psi
endo :: Traversable f => EndoA Id f -> Fix f -> Fix f
endo psi = runIdentity . endoM psi
| sebastiaanvisser/islay | src/Generics/Morphism/Para.hs | bsd-3-clause | 3,805 | 2 | 14 | 882 | 1,872 | 968 | 904 | -1 | -1 |
module Main where
import UTPC (runApp)
main :: IO ()
main = runApp
| arknave/utpc | app/Main.hs | bsd-3-clause | 69 | 0 | 6 | 15 | 27 | 16 | 11 | 4 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Prompt.AppLauncher
-- Copyright : (C) 2008 Luis Cabellos
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : unstable
-- Portability : unportable
--
-- A module for launch applicationes that receive parameters in the command
-- line. The launcher call a prompt to get the parameters.
--
-----------------------------------------------------------------------------
module XMonad.Prompt.AppLauncher ( -- * Usage
-- $usage
launchApp
,module XMonad.Prompt
-- * Use case: launching gimp with file
-- $tip
) where
import XMonad (X(),MonadIO)
import XMonad.Core (spawn)
import XMonad.Prompt (XPrompt(showXPrompt), mkXPrompt, XPConfig())
import XMonad.Prompt.Shell (getShellCompl)
{- $usage
This module is intended to allow the launch of the same application
but changing the parameters using the user response. For example, when
you want to open a image in gimp program, you can open gimp and then use
the File Menu to open the image or you can use this module to select
the image in the command line.
We use Prompt to get the user command line. This also allow to autoexpand
the names of the files when we are writing the command line.
-}
{- $tip
First, you need to import necessary modules. Prompt is used to get the promp
configuration and the AppLauncher module itself.
> import XMonad.Prompt
> import XMonad.Prompt.AppLauncher as AL
Then you can add the bindings to the applications.
> ...
> , ((modm, xK_g), AL.launchApp defaultXPConfig "gimp" )
> , ((modm, xK_g), AL.launchApp defaultXPConfig "evince" )
> ...
-}
-- A customized prompt
data AppPrompt = AppPrompt String
instance XPrompt AppPrompt where
showXPrompt (AppPrompt n) = n ++ " "
type Application = String
type Parameters = String
{- | Given an application and its parameters, launch the application. -}
launch :: MonadIO m => Application -> Parameters -> m ()
launch app params = spawn ( app ++ " " ++ params )
{- | Get the user's response to a prompt an launch an application using the
input as command parameters of the application.-}
launchApp :: XPConfig -> Application -> X ()
launchApp config app = mkXPrompt (AppPrompt app) config (getShellCompl []) $ launch app
| MasseR/xmonadcontrib | XMonad/Prompt/AppLauncher.hs | bsd-3-clause | 2,528 | 0 | 9 | 624 | 251 | 149 | 102 | 16 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Quantity.RO.Corpus
( corpus ) where
import Prelude
import Data.String
import Duckling.Lang
import Duckling.Quantity.Types
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {lang = RO}, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (QuantityData Pound 2 (Just "carne"))
[ "doua livre de carne"
]
, examples (QuantityData Pound 1 Nothing)
[ "o livră"
]
, examples (QuantityData Pound 500 (Just "zahăr"))
[ "cinci sute livre de zahăr"
]
]
| rfranek/duckling | Duckling/Quantity/RO/Corpus.hs | bsd-3-clause | 953 | 0 | 11 | 224 | 165 | 98 | 67 | 19 | 1 |
module Text.Search.Sphinx.Configuration where
import qualified Text.Search.Sphinx.Types as T
-- | The configuration for a query
--
-- A note about encodings: The encoding specified here is used to encode
-- every @Text@ value that is sent to the server, and it used to decode all
-- of the server's answers, including error messages.
--
-- If the specified encoding doesn't support characters sent to the server,
-- they will silently be substituted with the byte value of @\'\\SUB\' ::
-- 'Char'@ before transmission.
--
-- If the server sends a byte value back that the encoding doesn't understand,
-- the affected bytes will be converted into special values as
-- specified by that encoding. For example, when decoding invalid UTF-8,
-- all invalid bytes are going to be substituted with @\'\\65533\' ::
-- 'Char'@.
--
data Configuration = Configuration {
-- | The hostname of the Sphinx daemon
host :: String
-- | The portnumber of the Sphinx daemon
, port :: Int
-- | Encoding used to encode queries to the server, and decode server responses
, encoding :: String
-- | Per-field weights
, weights :: [Int]
-- | How many records to seek from result-set start (default is 0)
, offset :: Int
-- | How many records to return from result-set starting at offset (default is 20)
, limit :: Int
-- | Query matching mode
, mode :: T.MatchMode
-- | Ranking mode
, ranker :: T.Rank
-- | Match sorting mode
, sort :: T.Sort
-- | Attribute to sort by
, sortBy :: String
-- | Minimum ID to match, 0 means no limit
, minId :: Int
-- | Maximum ID to match, 0 means no limit
, maxId :: Int
-- | attribute filters
, filters :: [T.Filter]
-- | Group-by sorting clause (to sort groups in result set with)
, groupBy :: String
-- | Group-by count-distinct attribute
, groupSort :: String
-- | Group-by function (to pre-process group-by attribute value with)
, groupByFunc :: T.GroupByFunction
-- | Group-by attribute name
, groupDistinct :: String
-- | Maximum number of matches to retrieve
, maxMatches :: Int
-- | Cutoff to stop searching at
, cutoff :: Int
-- | Distributed retries count
, retryCount :: Int
-- | Distributed retries delay
, retryDelay :: Int
-- | Per-index weights
, indexWeights :: [(String, Int)]
-- | Maximum query time in milliseconds, 0 means no limit
, maxQueryTime :: Int
-- | Per-field-name weights
, fieldWeights :: [(String, Int)]
-- | attributes to select, defaults to \"*\"
, selectClause :: String -- setSelect in regular API
}
deriving (Show)
-- | A basic, default configuration.
defaultConfig = Configuration {
port = 3312
, host = "127.0.0.1"
, encoding = "UTF-8"
, weights = []
, offset = 0
, limit = 20
, mode = T.All
, ranker = T.ProximityBm25
, sort = T.Relevance
, sortBy = ""
, minId = 0
, maxId = 0
, filters = []
, groupSort = "@group desc"
, groupBy = ""
, groupByFunc = T.Day
, groupDistinct = ""
, maxMatches = 1000
, cutoff = 0
, retryCount = 0
, retryDelay = 0
, indexWeights = []
, maxQueryTime = 0
, fieldWeights = []
, selectClause = "*"
}
| gregwebs/haskell-sphinx-client | Text/Search/Sphinx/Configuration.hs | bsd-3-clause | 3,678 | 0 | 10 | 1,253 | 434 | 299 | 135 | 55 | 1 |
{--
Copyright (c) 2009 Maciej Pacula
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
--}
{--
Types.hs - type operations for the language-description metalanguage
--}
module GP.Types
(
Type(..)
, isPrimitive
, isPolymorphic
, isTypeVariable
, isTypeCompatible
) where
import Data.List (intersperse, intersect)
import GP.AbstractTypeMatching (Matcher, (==>))
data Type = PrimitiveType { name :: String }
| PolymorphicType { name :: String, vars :: [Type]}
| TypeVariable { name :: String }
deriving (Eq, Ord)
instance Show Type where
show (PrimitiveType name) = name
show (PolymorphicType name vars) = "(" ++ name ++ " " ++ (concat . intersperse " " . map show) vars ++ ")"
show (TypeVariable name) = name
isPrimitive, isPolymorphic, isTypeVariable :: Type -> Bool
isPrimitive (PrimitiveType _) = True
isPrimitive _ = False
isPolymorphic (PolymorphicType _ _) = True
isPolymorphic _ = False
isTypeVariable (TypeVariable _) = True
isTypeVariable _ = False
-- a `isTypeCompatible` b iff a can be substituted for b
isTypeCompatible :: Type -> Type -> Bool
(PrimitiveType nameA) `isTypeCompatible` (PrimitiveType nameB) = nameA == nameB
(TypeVariable _) `isTypeCompatible` _ = True
_ `isTypeCompatible` (TypeVariable _) = True
(PolymorphicType nameA varsA) `isTypeCompatible` (PolymorphicType nameB varsB) =
nameA == nameB &&
length varsA == length varsB &&
all (\(a, b) -> a `isTypeCompatible` b) (zip varsA varsB)
_ `isTypeCompatible` _ = False
| petabricks/petabricks | scripts/oldtuner2/src/GP/Types.hs | mit | 2,810 | 0 | 12 | 769 | 461 | 253 | 208 | 33 | 1 |
{-# LANGUAGE FlexibleContexts, GADTs, OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell, TypeFamilies, QuasiQuotes #-}
module Model where
import Control.Monad (liftM)
import Data.Time
import Database.Persist.Sql
import Database.Persist.TH
import ModelTypes
import Types
import Utils
share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
Contest
name String
judgeType JudgeType
start ZonedTime
end ZonedTime
setter String
problems [String]
UniqueContestName name
deriving Show
Submit
submitTime ZonedTime
userId String
judgeType JudgeType
contestnumber Int
problemId String
judge JudgeStatus
time String
memory String
size String
lang String
code String
deriving Show
|]
findSubmit :: [Filter Submit] -> DatabaseT (Maybe (Entity Submit))
findSubmit filt = runSql $ selectFirst filt []
findAllSubmits :: [Filter Submit] -> DatabaseT [Entity Submit]
findAllSubmits filt = runSql $ selectList filt []
allSubmits :: DatabaseT [Entity Submit]
allSubmits = findAllSubmits []
updateSubmit :: Submit -> DatabaseT ()
updateSubmit s = do
submit <- findSubmit [SubmitSubmitTime ==. submitSubmitTime s,
SubmitUserId ==. submitUserId s]
case liftM entityKey submit of
Nothing -> return ()
Just submitId -> runSql $ replace submitId s
findContest :: [Filter Contest] -> DatabaseT (Maybe (Entity Contest))
findContest filt = runSql $ selectFirst filt []
findAllContests :: [Filter Contest] -> DatabaseT [Entity Contest]
findAllContests filt = runSql $ selectList filt []
allContests :: DatabaseT [Entity Contest]
allContests = findAllContests []
updateContest :: Contest -> DatabaseT ()
updateContest c = do
contest <- findContest [ContestName ==. contestName c]
case liftM entityKey contest of
Nothing -> return ()
Just contestId -> runSql $ replace contestId c
| wass80/WXCS | src/Model.hs | mit | 1,868 | 0 | 11 | 336 | 474 | 236 | 238 | 36 | 2 |
module Encryption (encryptPassword) where
import Crypto.Hash.SHA512 (hash)
import Data.ByteString.Char8 (pack, unpack)
encryptPassword :: String -> String
encryptPassword password =
unpack $ hash $ pack $ password
| jkwatson/eboshi_api_shootout | haskell_scotty/Encryption.hs | mit | 219 | 0 | 7 | 31 | 63 | 37 | 26 | 6 | 1 |
module Tandoori.GHC.Parse (parseMod, getDecls) where
import Tandoori.GHC.Internals
--import DynFlags (Settings (..))
import Control.Monad.IO.Class (liftIO)
import GHC
import GHC.Paths (libdir)
import HsSyn (hsmodDecls)
import HsSyn (HsModule (..))
import Lexer (ParseResult (..), mkPState, unP)
import Parser (parseModule)
import RdrName (RdrName (..))
import StringBuffer (hGetStringBuffer)
getDecls mod = hsmodDecls $ unLoc mod
parseMod :: FilePath -> IO (Located (HsModule RdrName))
parseMod srcFile = do
buf <- hGetStringBuffer srcFile
let loc = mkRealSrcLoc (mkFastString srcFile) 1 0
dflags <- runGhc (Just libdir) $ getSessionDynFlags
case unP Parser.parseModule (mkPState dflags buf loc) of
POk pst rdrModule -> return rdrModule
PFailed srcspan message -> error $ showSDoc dflags message
| themattchan/tandoori | library/Tandoori/GHC/Parse.hs | bsd-3-clause | 1,051 | 0 | 12 | 358 | 270 | 146 | 124 | 20 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Perform a build
module Stack.Build.Execute
( printPlan
, preFetch
, executePlan
-- * Running Setup.hs
, ExecuteEnv
, withExecuteEnv
, withSingleContext
) where
import Control.Applicative
import Control.Arrow ((&&&))
import Control.Concurrent.Execute
import Control.Concurrent.Async (withAsync, wait)
import Control.Concurrent.MVar.Lifted
import Control.Concurrent.STM
import Control.Exception.Enclosed (catchIO, tryIO)
import Control.Exception.Lifted
import Control.Monad (liftM, when, unless, void, join, guard, filterM, (<=<))
import Control.Monad.Catch (MonadCatch, MonadMask)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Control (liftBaseWith)
import Control.Monad.Trans.Resource
import qualified Data.ByteString as S
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as S8
import Data.Conduit
import qualified Data.Conduit.Binary as CB
import qualified Data.Conduit.List as CL
import Data.Foldable (forM_)
import Data.Function
import Data.IORef.RunOnce (runOnce)
import Data.List
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Maybe
import Data.Monoid ((<>))
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Streaming.Process hiding (callProcess, env)
import qualified Data.Streaming.Process as Process
import Data.Traversable (forM)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time.Clock (getCurrentTime)
import Data.Word8 (_colon)
import Distribution.System (OS (Windows),
Platform (Platform))
import qualified Distribution.Text
import Language.Haskell.TH as TH (location)
import Network.HTTP.Client.Conduit (HasHttpManager)
import Path
import Path.IO
import Prelude hiding (FilePath, writeFile)
import Stack.Build.Cache
import Stack.Build.Coverage
import Stack.Build.Haddock
import Stack.Build.Installed
import Stack.Build.Source
import Stack.Types.Build
import Stack.Fetch as Fetch
import Stack.GhcPkg
import Stack.Package
import Stack.PackageDump
import Stack.Constants
import Stack.Types
import Stack.Types.StackT
import Stack.Types.Internal
import qualified System.Directory as D
import System.Environment (getExecutablePath)
import System.Exit (ExitCode (ExitSuccess))
import qualified System.FilePath as FP
import System.IO
import System.PosixCompat.Files (createLink)
import System.Process.Read
import System.Process.Run
import System.Process.Log (showProcessArgDebug)
#if !MIN_VERSION_process(1,2,1)
import System.Process.Internals (createProcess_)
#endif
type M env m = (MonadIO m,MonadReader env m,HasHttpManager env,HasBuildConfig env,MonadLogger m,MonadBaseControl IO m,MonadCatch m,MonadMask m,HasLogLevel env,HasEnvConfig env,HasTerminal env)
preFetch :: M env m => Plan -> m ()
preFetch plan
| Set.null idents = $logDebug "Nothing to fetch"
| otherwise = do
$logDebug $ T.pack $
"Prefetching: " ++
intercalate ", " (map packageIdentifierString $ Set.toList idents)
menv <- getMinimalEnvOverride
fetchPackages menv idents
where
idents = Set.unions $ map toIdent $ Map.toList $ planTasks plan
toIdent (name, task) =
case taskType task of
TTLocal _ -> Set.empty
TTUpstream package _ -> Set.singleton $ PackageIdentifier
name
(packageVersion package)
printPlan :: M env m
=> Plan
-> m ()
printPlan plan = do
case Map.elems $ planUnregisterLocal plan of
[] -> $logInfo "No packages would be unregistered."
xs -> do
$logInfo "Would unregister locally:"
forM_ xs $ \(ident, mreason) -> $logInfo $ T.concat
[ T.pack $ packageIdentifierString ident
, case mreason of
Nothing -> ""
Just reason -> T.concat
[ " ("
, reason
, ")"
]
]
$logInfo ""
case Map.elems $ planTasks plan of
[] -> $logInfo "Nothing to build."
xs -> do
$logInfo "Would build:"
mapM_ ($logInfo . displayTask) xs
let hasTests = not . Set.null . lptbTests
hasBenches = not . Set.null . lptbBenches
tests = Map.elems $ fmap fst $ Map.filter (hasTests . snd) $ planFinals plan
benches = Map.elems $ fmap fst $ Map.filter (hasBenches . snd) $ planFinals plan
unless (null tests) $ do
$logInfo ""
$logInfo "Would test:"
mapM_ ($logInfo . displayTask) tests
unless (null benches) $ do
$logInfo ""
$logInfo "Would benchmark:"
mapM_ ($logInfo . displayTask) benches
$logInfo ""
case Map.toList $ planInstallExes plan of
[] -> $logInfo "No executables to be installed."
xs -> do
$logInfo "Would install executables:"
forM_ xs $ \(name, loc) -> $logInfo $ T.concat
[ name
, " from "
, case loc of
Snap -> "snapshot"
Local -> "local"
, " database"
]
-- | For a dry run
displayTask :: Task -> Text
displayTask task = T.pack $ concat
[ packageIdentifierString $ taskProvides task
, ": database="
, case taskLocation task of
Snap -> "snapshot"
Local -> "local"
, ", source="
, case taskType task of
TTLocal lp -> concat
[ toFilePath $ lpDir lp
]
TTUpstream _ _ -> "package index"
, if Set.null missing
then ""
else ", after: " ++ intercalate "," (map packageIdentifierString $ Set.toList missing)
]
where
missing = tcoMissing $ taskConfigOpts task
data ExecuteEnv = ExecuteEnv
{ eeEnvOverride :: !EnvOverride
, eeConfigureLock :: !(MVar ())
, eeInstallLock :: !(MVar ())
, eeBuildOpts :: !BuildOpts
, eeBaseConfigOpts :: !BaseConfigOpts
, eeGhcPkgIds :: !(TVar (Map PackageIdentifier Installed))
, eeTempDir :: !(Path Abs Dir)
, eeSetupHs :: !(Path Abs File)
-- ^ Temporary Setup.hs for simple builds
, eeSetupExe :: !(Maybe (Path Abs File))
-- ^ Compiled version of eeSetupHs
, eeCabalPkgVer :: !Version
, eeTotalWanted :: !Int
, eeWanted :: !(Set PackageName)
, eeLocals :: ![LocalPackage]
, eeSourceMap :: !SourceMap
, eeGlobalDB :: !(Path Abs Dir)
, eeGlobalPackages :: ![DumpPackage () ()]
}
-- | Get a compiled Setup exe
getSetupExe :: M env m
=> Path Abs File -- ^ Setup.hs input file
-> Path Abs Dir -- ^ temporary directory
-> m (Maybe (Path Abs File))
getSetupExe setupHs tmpdir = do
wc <- getWhichCompiler
econfig <- asks getEnvConfig
let config = getConfig econfig
baseNameS = concat
[ "setup-Simple-Cabal-"
, versionString $ envConfigCabalVersion econfig
, "-"
, Distribution.Text.display $ configPlatform config
, "-"
, compilerVersionString $ envConfigCompilerVersion econfig
]
exeNameS = baseNameS ++
case configPlatform config of
Platform _ Windows -> ".exe"
_ -> ""
outputNameS =
case wc of
Ghc -> exeNameS
Ghcjs -> baseNameS ++ ".jsexe"
jsExeNameS =
baseNameS ++ ".jsexe"
setupDir =
configStackRoot config </>
$(mkRelDir "setup-exe-cache")
exePath <- fmap (setupDir </>) $ parseRelFile exeNameS
jsExePath <- fmap (setupDir </>) $ parseRelDir jsExeNameS
exists <- liftIO $ D.doesFileExist $ toFilePath exePath
if exists
then return $ Just exePath
else do
tmpExePath <- fmap (setupDir </>) $ parseRelFile $ "tmp-" ++ exeNameS
tmpOutputPath <- fmap (setupDir </>) $ parseRelFile $ "tmp-" ++ outputNameS
tmpJsExePath <- fmap (setupDir </>) $ parseRelDir $ "tmp-" ++ jsExeNameS
liftIO $ D.createDirectoryIfMissing True $ toFilePath setupDir
menv <- getMinimalEnvOverride
let args =
[ "-clear-package-db"
, "-global-package-db"
, "-hide-all-packages"
, "-package"
, "base"
, "-package"
, "Cabal-" ++ versionString (envConfigCabalVersion econfig)
, toFilePath setupHs
, "-o"
, toFilePath tmpOutputPath
] ++
["-build-runner" | wc == Ghcjs]
runIn tmpdir (compilerExeName wc) menv args Nothing
when (wc == Ghcjs) $ renameDir tmpJsExePath jsExePath
renameFile tmpExePath exePath
return $ Just exePath
withExecuteEnv :: M env m
=> EnvOverride
-> BuildOpts
-> BaseConfigOpts
-> [LocalPackage]
-> [DumpPackage () ()] -- ^ global packages
-> SourceMap
-> (ExecuteEnv -> m a)
-> m a
withExecuteEnv menv bopts baseConfigOpts locals globals sourceMap inner = do
withCanonicalizedSystemTempDirectory stackProgName $ \tmpdir -> do
configLock <- newMVar ()
installLock <- newMVar ()
idMap <- liftIO $ newTVarIO Map.empty
let setupHs = tmpdir </> $(mkRelFile "Setup.hs")
liftIO $ writeFile (toFilePath setupHs) "import Distribution.Simple\nmain = defaultMain"
setupExe <- getSetupExe setupHs tmpdir
cabalPkgVer <- asks (envConfigCabalVersion . getEnvConfig)
globalDB <- getGlobalDB menv =<< getWhichCompiler
inner ExecuteEnv
{ eeEnvOverride = menv
, eeBuildOpts = bopts
-- Uncertain as to why we cannot run configures in parallel. This appears
-- to be a Cabal library bug. Original issue:
-- https://github.com/fpco/stack/issues/84. Ideally we'd be able to remove
-- this.
, eeConfigureLock = configLock
, eeInstallLock = installLock
, eeBaseConfigOpts = baseConfigOpts
, eeGhcPkgIds = idMap
, eeTempDir = tmpdir
, eeSetupHs = setupHs
, eeSetupExe = setupExe
, eeCabalPkgVer = cabalPkgVer
, eeTotalWanted = length $ filter lpWanted locals
, eeWanted = wantedLocalPackages locals
, eeLocals = locals
, eeSourceMap = sourceMap
, eeGlobalDB = globalDB
, eeGlobalPackages = globals
}
-- | Perform the actual plan
executePlan :: M env m
=> EnvOverride
-> BuildOpts
-> BaseConfigOpts
-> [LocalPackage]
-> [DumpPackage () ()] -- ^ globals
-> SourceMap
-> InstalledMap
-> Plan
-> m ()
executePlan menv bopts baseConfigOpts locals globals sourceMap installedMap plan = do
withExecuteEnv menv bopts baseConfigOpts locals globals sourceMap (executePlan' installedMap plan)
unless (Map.null $ planInstallExes plan) $ do
snapBin <- (</> bindirSuffix) `liftM` installationRootDeps
localBin <- (</> bindirSuffix) `liftM` installationRootLocal
destDir <- asks $ configLocalBin . getConfig
createTree destDir
destDir' <- liftIO . D.canonicalizePath . toFilePath $ destDir
isInPATH <- liftIO . fmap (any (FP.equalFilePath destDir')) . (mapM D.canonicalizePath <=< filterM D.doesDirectoryExist) $ (envSearchPath menv)
when (not isInPATH) $
$logWarn $ T.concat
[ "Installation path "
, T.pack destDir'
, " not found in PATH environment variable"
]
platform <- asks getPlatform
let ext =
case platform of
Platform _ Windows -> ".exe"
_ -> ""
currExe <- liftIO getExecutablePath -- needed for windows, see below
installed <- forM (Map.toList $ planInstallExes plan) $ \(name, loc) -> do
let bindir =
case loc of
Snap -> snapBin
Local -> localBin
mfp <- resolveFileMaybe bindir $ T.unpack name ++ ext
case mfp of
Nothing -> do
$logWarn $ T.concat
[ "Couldn't find executable "
, name
, " in directory "
, T.pack $ toFilePath bindir
]
return Nothing
Just file -> do
let destFile = destDir' FP.</> T.unpack name ++ ext
$logInfo $ T.concat
[ "Copying from "
, T.pack $ toFilePath file
, " to "
, T.pack destFile
]
liftIO $ case platform of
Platform _ Windows | FP.equalFilePath destFile currExe ->
windowsRenameCopy (toFilePath file) destFile
_ -> D.copyFile (toFilePath file) destFile
return $ Just (destDir', [T.append name (T.pack ext)])
let destToInstalled = Map.fromListWith (++) (catMaybes installed)
unless (Map.null destToInstalled) $ $logInfo ""
forM_ (Map.toList destToInstalled) $ \(dest, executables) -> do
$logInfo $ T.concat
[ "Copied executables to "
, T.pack dest
, ":"]
forM_ executables $ \exe -> $logInfo $ T.append "- " exe
config <- asks getConfig
menv' <- liftIO $ configEnvOverride config EnvSettings
{ esIncludeLocals = True
, esIncludeGhcPackagePath = True
, esStackExe = True
, esLocaleUtf8 = False
}
forM_ (boptsExec bopts) $ \(cmd, args) -> do
$logProcessRun cmd args
callProcess Nothing menv' cmd args
-- | Windows can't write over the current executable. Instead, we rename the
-- current executable to something else and then do the copy.
windowsRenameCopy :: FilePath -> FilePath -> IO ()
windowsRenameCopy src dest = do
D.copyFile src new
D.renameFile dest old
D.renameFile new dest
where
new = dest ++ ".new"
old = dest ++ ".old"
-- | Perform the actual plan (internal)
executePlan' :: M env m
=> InstalledMap
-> Plan
-> ExecuteEnv
-> m ()
executePlan' installedMap plan ee@ExecuteEnv {..} = do
wc <- getWhichCompiler
cv <- asks $ envConfigCompilerVersion . getEnvConfig
case Map.toList $ planUnregisterLocal plan of
[] -> return ()
ids -> do
localDB <- packageDatabaseLocal
forM_ ids $ \(id', (ident, mreason)) -> do
$logInfo $ T.concat
[ T.pack $ packageIdentifierString ident
, ": unregistering"
, case mreason of
Nothing -> ""
Just reason -> T.concat
[ " ("
, reason
, ")"
]
]
unregisterGhcPkgId eeEnvOverride wc cv localDB id' ident
-- Yes, we're explicitly discarding result values, which in general would
-- be bad. monad-unlift does this all properly at the type system level,
-- but I don't want to pull it in for this one use case, when we know that
-- stack always using transformer stacks that are safe for this use case.
runInBase <- liftBaseWith $ \run -> return (void . run)
let actions = concatMap (toActions installedMap' runInBase ee) $ Map.elems $ Map.mergeWithKey
(\_ b f -> Just (Just b, Just f))
(fmap (\b -> (Just b, Nothing)))
(fmap (\f -> (Nothing, Just f)))
(planTasks plan)
(planFinals plan)
threads <- asks $ configJobs . getConfig
concurrentTests <- asks $ configConcurrentTests . getConfig
let keepGoing =
case boptsKeepGoing eeBuildOpts of
Just kg -> kg
Nothing -> boptsTests eeBuildOpts || boptsBenchmarks eeBuildOpts
concurrentFinal =
-- TODO it probably makes more sense to use a lock for test suites
-- and just have the execution blocked. Turning off all concurrency
-- on finals based on the --test option doesn't fit in well.
if boptsTests eeBuildOpts
then concurrentTests
else True
terminal <- asks getTerminal
errs <- liftIO $ runActions threads keepGoing concurrentFinal actions $ \doneVar -> do
let total = length actions
loop prev
| prev == total =
runInBase $ $logStickyDone ("Completed all " <> T.pack (show total) <> " actions.")
| otherwise = do
when terminal $ runInBase $
$logSticky ("Progress: " <> T.pack (show prev) <> "/" <> T.pack (show total))
done <- atomically $ do
done <- readTVar doneVar
check $ done /= prev
return done
loop done
if total > 1
then loop 0
else return ()
when (toCoverage $ boptsTestOpts eeBuildOpts) $ do
generateHpcUnifiedReport
generateHpcMarkupIndex
unless (null errs) $ throwM $ ExecutionFailure errs
when (boptsHaddock eeBuildOpts) $ do
generateLocalHaddockIndex eeEnvOverride wc eeBaseConfigOpts eeLocals
generateDepsHaddockIndex eeEnvOverride wc eeBaseConfigOpts eeLocals
generateSnapHaddockIndex eeEnvOverride wc eeBaseConfigOpts eeGlobalDB
where
installedMap' = Map.difference installedMap
$ Map.fromList
$ map (\(ident, _) -> (packageIdentifierName ident, ()))
$ Map.elems
$ planUnregisterLocal plan
toActions :: M env m
=> InstalledMap
-> (m () -> IO ())
-> ExecuteEnv
-> (Maybe Task, Maybe (Task, LocalPackageTB)) -- build and final
-> [Action]
toActions installedMap runInBase ee (mbuild, mfinal) =
abuild ++ afinal
where
abuild =
case mbuild of
Nothing -> []
Just task@Task {..} ->
[ Action
{ actionId = ActionId taskProvides ATBuild
, actionDeps =
(Set.map (\ident -> ActionId ident ATBuild) (tcoMissing taskConfigOpts))
, actionDo = \ac -> runInBase $ singleBuild runInBase ac ee task installedMap
}
]
afinal =
case mfinal of
Nothing -> []
Just (task@Task {..}, lptb) ->
[ Action
{ actionId = ActionId taskProvides ATFinal
, actionDeps = addBuild taskProvides $
(Set.map (\ident -> ActionId ident ATBuild) (tcoMissing taskConfigOpts))
, actionDo = \ac -> runInBase $ do
unless (Set.null $ lptbTests lptb) $ do
singleTest runInBase topts lptb ac ee task installedMap
unless (Set.null $ lptbBenches lptb) $ do
singleBench runInBase beopts lptb ac ee task installedMap
}
]
where
addBuild ident =
case mbuild of
Nothing -> id
Just _ -> Set.insert $ ActionId ident ATBuild
bopts = eeBuildOpts ee
topts = boptsTestOpts bopts
beopts = boptsBenchmarkOpts bopts
-- | Generate the ConfigCache
getConfigCache :: MonadIO m
=> ExecuteEnv -> Task -> [Text]
-> m (Map PackageIdentifier GhcPkgId, ConfigCache)
getConfigCache ExecuteEnv {..} Task {..} extra = do
idMap <- liftIO $ readTVarIO eeGhcPkgIds
let getMissing ident =
case Map.lookup ident idMap of
Nothing -> error "singleBuild: invariant violated, missing package ID missing"
Just (Library ident' x) -> assert (ident == ident') $ Just (ident, x)
Just (Executable _) -> Nothing
missing' = Map.fromList $ mapMaybe getMissing $ Set.toList missing
TaskConfigOpts missing mkOpts = taskConfigOpts
opts = mkOpts missing'
allDeps = Set.fromList $ Map.elems missing' ++ Map.elems taskPresent
cache = ConfigCache
{ configCacheOpts = opts
{ coNoDirs = coNoDirs opts ++ map T.unpack extra
}
, configCacheDeps = allDeps
, configCacheComponents =
case taskType of
TTLocal lp -> Set.map renderComponent $ lpComponents lp
TTUpstream _ _ -> Set.empty
, configCacheHaddock =
shouldHaddockPackage eeBuildOpts eeWanted (packageIdentifierName taskProvides)
}
allDepsMap = Map.union missing' taskPresent
return (allDepsMap, cache)
-- | Ensure that the configuration for the package matches what is given
ensureConfig :: M env m
=> ConfigCache -- ^ newConfigCache
-> Path Abs Dir -- ^ package directory
-> ExecuteEnv
-> m () -- ^ announce
-> (Bool -> [String] -> m ()) -- ^ cabal
-> Path Abs File -- ^ .cabal file
-> m Bool
ensureConfig newConfigCache pkgDir ExecuteEnv {..} announce cabal cabalfp = do
newCabalMod <- liftIO (fmap modTime (D.getModificationTime (toFilePath cabalfp)))
needConfig <-
if boptsReconfigure eeBuildOpts
then return True
else do
-- Determine the old and new configuration in the local directory, to
-- determine if we need to reconfigure.
mOldConfigCache <- tryGetConfigCache pkgDir
mOldCabalMod <- tryGetCabalMod pkgDir
return $ mOldConfigCache /= Just newConfigCache
|| mOldCabalMod /= Just newCabalMod
let ConfigureOpts dirs nodirs = configCacheOpts newConfigCache
when needConfig $ withMVar eeConfigureLock $ \_ -> do
deleteCaches pkgDir
announce
menv <- getMinimalEnvOverride
let programNames =
if eeCabalPkgVer < $(mkVersion "1.22")
then ["ghc", "ghc-pkg"]
else ["ghc", "ghc-pkg", "ghcjs", "ghcjs-pkg"]
exes <- forM programNames $ \name -> do
mpath <- findExecutable menv name
return $ case mpath of
Nothing -> []
Just x -> return $ concat ["--with-", name, "=", toFilePath x]
cabal False $ "configure" : concat
[ concat exes
, dirs
, nodirs
]
writeConfigCache pkgDir newConfigCache
writeCabalMod pkgDir newCabalMod
return needConfig
announceTask :: MonadLogger m => Task -> Text -> m ()
announceTask task x = $logInfo $ T.concat
[ T.pack $ packageIdentifierString $ taskProvides task
, ": "
, x
]
withSingleContext :: M env m
=> (m () -> IO ())
-> ActionContext
-> ExecuteEnv
-> Task
-> Maybe (Map PackageIdentifier GhcPkgId)
-- ^ All dependencies' package ids to provide to Setup.hs. If
-- Nothing, just provide global and snapshot package
-- databases.
-> Maybe String
-> ( Package
-> Path Abs File
-> Path Abs Dir
-> (Bool -> [String] -> m ())
-> (Text -> m ())
-> Bool
-> Maybe (Path Abs File, Handle)
-> m a)
-> m a
withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} mdeps msuffix inner0 =
withPackage $ \package cabalfp pkgDir ->
withLogFile package $ \mlogFile ->
withCabal package pkgDir mlogFile $ \cabal ->
inner0 package cabalfp pkgDir cabal announce console mlogFile
where
announce = announceTask task
wanted =
case taskType of
TTLocal lp -> lpWanted lp
TTUpstream _ _ -> False
console = wanted
&& all (\(ActionId ident _) -> ident == taskProvides) (Set.toList acRemaining)
&& eeTotalWanted == 1
withPackage inner =
case taskType of
TTLocal lp -> inner (lpPackage lp) (lpCabalFile lp) (lpDir lp)
TTUpstream package _ -> do
mdist <- liftM Just distRelativeDir
m <- unpackPackageIdents eeEnvOverride eeTempDir mdist $ Set.singleton taskProvides
case Map.toList m of
[(ident, dir)]
| ident == taskProvides -> do
let name = packageIdentifierName taskProvides
cabalfpRel <- parseRelFile $ packageNameString name ++ ".cabal"
let cabalfp = dir </> cabalfpRel
inner package cabalfp dir
_ -> error $ "withPackage: invariant violated: " ++ show m
withLogFile package inner
| console = inner Nothing
| otherwise = do
logPath <- buildLogPath package msuffix
createTree (parent logPath)
let fp = toFilePath logPath
bracket
(liftIO $ openBinaryFile fp WriteMode)
(liftIO . hClose)
$ \h -> inner (Just (logPath, h))
withCabal package pkgDir mlogFile inner = do
config <- asks getConfig
menv <- liftIO $ configEnvOverride config EnvSettings
{ esIncludeLocals = taskLocation task == Local
, esIncludeGhcPackagePath = False
, esStackExe = False
, esLocaleUtf8 = True
}
getGhcPath <- runOnce $ liftIO $ join $ findExecutable menv "ghc"
getGhcjsPath <- runOnce $ liftIO $ join $ findExecutable menv "ghcjs"
distRelativeDir' <- distRelativeDir
esetupexehs <-
-- Avoid broken Setup.hs files causing problems for simple build
-- types, see:
-- https://github.com/commercialhaskell/stack/issues/370
case (packageSimpleType package, eeSetupExe) of
(True, Just setupExe) -> return $ Left setupExe
_ -> liftIO $ fmap Right $ getSetupHs pkgDir
inner $ \stripTHLoading args -> do
let cabalPackageArg =
"-package=" ++ packageIdentifierString
(PackageIdentifier cabalPackageName
eeCabalPkgVer)
packageArgs =
case mdeps of
Just deps | explicitSetupDeps (packageName package) config ->
-- Stack always builds with the global Cabal for various
-- reproducibility issues.
let depsMinusCabal
= map ghcPkgIdString
$ Set.toList
$ addGlobalPackages deps eeGlobalPackages
in
( "-clear-package-db"
: "-global-package-db"
: map (("-package-db=" ++) . toFilePath) (bcoExtraDBs eeBaseConfigOpts)
) ++
( ("-package-db=" ++ toFilePath (bcoSnapDB eeBaseConfigOpts))
: ("-package-db=" ++ toFilePath (bcoLocalDB eeBaseConfigOpts))
: "-hide-all-packages"
: cabalPackageArg
: map ("-package-id=" ++) depsMinusCabal
)
-- This branch is debatable. It adds access to the
-- snapshot package database for Cabal. There are two
-- possible objections:
--
-- 1. This doesn't isolate the build enough; arbitrary
-- other packages available could cause the build to
-- succeed or fail.
--
-- 2. This doesn't provide enough packages: we should also
-- include the local database when building local packages.
--
-- Currently, this branch is only taken via `stack
-- sdist` or when explicitly requested in the
-- stack.yaml file.
_ ->
cabalPackageArg
: "-clear-package-db"
: "-global-package-db"
: map (("-package-db=" ++) . toFilePath) (bcoExtraDBs eeBaseConfigOpts)
++ ["-package-db=" ++ toFilePath (bcoSnapDB eeBaseConfigOpts)]
setupArgs = ("--builddir=" ++ toFilePath distRelativeDir') : args
runExe exeName fullArgs = do
$logProcessRun (toFilePath exeName) fullArgs
-- Use createProcess_ to avoid the log file being closed afterwards
(Nothing, moutH, merrH, ph) <- liftIO $ createProcess_ "singleBuild" cp
let makeAbsolute = stripTHLoading -- If users want control, we should add a config option for this
ec <-
liftIO $
withAsync (runInBase $ maybePrintBuildOutput stripTHLoading makeAbsolute LevelInfo mlogFile moutH) $ \outThreadID ->
withAsync (runInBase $ maybePrintBuildOutput False makeAbsolute LevelWarn mlogFile merrH) $ \errThreadID -> do
ec <- waitForProcess ph
wait errThreadID
wait outThreadID
return ec
case ec of
ExitSuccess -> return ()
_ -> do
bs <- liftIO $
case mlogFile of
Nothing -> return ""
Just (logFile, h) -> do
hClose h
S.readFile $ toFilePath logFile
throwM $ CabalExitedUnsuccessfully
ec
taskProvides
exeName
fullArgs
(fmap fst mlogFile)
bs
where
cp0 = proc (toFilePath exeName) fullArgs
cp = cp0
{ cwd = Just $ toFilePath pkgDir
, Process.env = envHelper menv
-- Ideally we'd create a new pipe here and then close it
-- below to avoid the child process from taking from our
-- stdin. However, if we do this, the child process won't
-- be able to get the codepage on Windows that we want.
-- See:
-- https://github.com/commercialhaskell/stack/issues/738
-- , std_in = CreatePipe
, std_out =
case mlogFile of
Nothing -> CreatePipe
Just (_, h) -> UseHandle h
, std_err =
case mlogFile of
Nothing -> CreatePipe
Just (_, h) -> UseHandle h
}
wc <- getWhichCompiler
(exeName, fullArgs) <- case (esetupexehs, wc) of
(Left setupExe, _) -> return (setupExe, setupArgs)
(Right setuphs, compiler) -> do
distDir <- distDirFromDir pkgDir
let setupDir = distDir </> $(mkRelDir "setup")
outputFile = setupDir </> $(mkRelFile "setup")
createTree setupDir
compilerPath <-
case compiler of
Ghc -> getGhcPath
Ghcjs -> getGhcjsPath
runExe compilerPath $
[ "--make"
, "-odir", toFilePath setupDir
, "-hidir", toFilePath setupDir
, "-i", "-i."
] ++ packageArgs ++
[ toFilePath setuphs
, "-o", toFilePath outputFile
] ++
(case compiler of
Ghc -> []
Ghcjs -> ["-build-runner"])
return (outputFile, setupArgs)
runExe exeName $ (if boptsCabalVerbose eeBuildOpts then ("--verbose":) else id) fullArgs
maybePrintBuildOutput stripTHLoading makeAbsolute level mlogFile mh =
case mh of
Just h ->
case mlogFile of
Just{} -> return ()
Nothing -> printBuildOutput stripTHLoading makeAbsolute level h
Nothing -> return ()
singleBuild :: M env m
=> (m () -> IO ())
-> ActionContext
-> ExecuteEnv
-> Task
-> InstalledMap
-> m ()
singleBuild runInBase ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap = do
(allDepsMap, cache) <- getCache
mprecompiled <- getPrecompiled cache
minstalled <-
case mprecompiled of
Just precompiled -> copyPreCompiled precompiled
Nothing -> realConfigAndBuild cache allDepsMap
case minstalled of
Nothing -> return ()
Just installed -> do
writeFlagCache installed cache
liftIO $ atomically $ modifyTVar eeGhcPkgIds $ Map.insert taskProvides installed
where
pname = packageIdentifierName taskProvides
shouldHaddockPackage' = shouldHaddockPackage eeBuildOpts eeWanted pname
doHaddock package = shouldHaddockPackage' &&
-- Works around haddock failing on bytestring-builder since it has no modules
-- when bytestring is new enough.
packageHasExposedModules package
getCache = do
let extra =
-- We enable tests if the test suite dependencies are already
-- installed, so that we avoid unnecessary recompilation based on
-- cabal_macros.h changes when switching between 'stack build' and
-- 'stack test'. See:
-- https://github.com/commercialhaskell/stack/issues/805
case taskType of
TTLocal lp -> concat
[ ["--enable-tests" | depsPresent installedMap $ lpTestDeps lp]
, ["--enable-benchmarks" | depsPresent installedMap $ lpBenchDeps lp]
]
_ -> []
getConfigCache ee task extra
getPrecompiled cache =
case taskLocation task of
Snap | not shouldHaddockPackage' -> do
mpc <- readPrecompiledCache taskProvides
(configCacheOpts cache)
(configCacheDeps cache)
case mpc of
Nothing -> return Nothing
Just pc -> do
let allM _ [] = return True
allM f (x:xs) = do
b <- f x
if b then allM f xs else return False
b <- liftIO $ allM D.doesFileExist $ maybe id (:) (pcLibrary pc) $ pcExes pc
return $ if b then Just pc else Nothing
_ -> return Nothing
copyPreCompiled (PrecompiledCache mlib exes) = do
announceTask task "copying precompiled package"
forM_ mlib $ \libpath -> do
menv <- getMinimalEnvOverride
withMVar eeInstallLock $ \() -> do
-- We want to ignore the global and user databases.
-- Unfortunately, ghc-pkg doesn't take such arguments on the
-- command line. Instead, we'll set GHC_PACKAGE_PATH. See:
-- https://github.com/commercialhaskell/stack/issues/1146
menv' <- modifyEnvOverride menv
$ Map.insert
"GHC_PACKAGE_PATH"
(T.pack $ toFilePath $ bcoSnapDB eeBaseConfigOpts)
readProcessNull Nothing menv' "ghc-pkg"
[ "register"
, "--force"
, libpath
]
liftIO $ forM_ exes $ \exe -> do
D.createDirectoryIfMissing True bindir
let dst = bindir FP.</> FP.takeFileName exe
createLink exe dst `catchIO` \_ -> D.copyFile exe dst
case (mlib, exes) of
(Nothing, _:_) -> markExeInstalled (taskLocation task) taskProvides
_ -> return ()
-- Find the package in the database
wc <- getWhichCompiler
let pkgDbs = [bcoSnapDB eeBaseConfigOpts]
mpkgid <- findGhcPkgId eeEnvOverride wc pkgDbs pname
return $ Just $
case mpkgid of
Nothing -> Executable taskProvides
Just pkgid -> Library taskProvides pkgid
where
bindir = toFilePath $ bcoSnapInstallRoot eeBaseConfigOpts </> bindirSuffix
realConfigAndBuild cache allDepsMap = withSingleContext runInBase ac ee task (Just allDepsMap) Nothing
$ \package cabalfp pkgDir cabal announce console _mlogFile -> do
_neededConfig <- ensureConfig cache pkgDir ee (announce "configure") cabal cabalfp
if boptsOnlyConfigure eeBuildOpts
then return Nothing
else liftM Just $ realBuild cache package pkgDir cabal announce console
realBuild cache package pkgDir cabal announce console = do
wc <- getWhichCompiler
markExeNotInstalled (taskLocation task) taskProvides
case taskType of
TTLocal lp -> writeBuildCache pkgDir $ lpNewBuildCache lp
TTUpstream _ _ -> return ()
() <- announce "build"
config <- asks getConfig
extraOpts <- extraBuildOptions eeBuildOpts
preBuildTime <- modTime <$> liftIO getCurrentTime
cabal (console && configHideTHLoading config) $
(case taskType of
TTLocal lp -> concat
[ ["build"]
, ["lib:" ++ packageNameString (packageName package)
-- TODO: get this information from target parsing instead,
-- which will allow users to turn off library building if
-- desired
| packageHasLibrary package]
, map (T.unpack . T.append "exe:") $ Set.toList $
case lpExeComponents lp of
Just exes -> exes
-- Build all executables in the event that no
-- specific list is provided (as happens with
-- extra-deps).
Nothing -> packageExes package
]
TTUpstream _ _ -> ["build"]) ++ extraOpts
case taskType of
TTLocal lp -> do
(addBuildCache,warnings) <-
addUnlistedToBuildCache
preBuildTime
(lpPackage lp)
(lpCabalFile lp)
(lpNewBuildCache lp)
mapM_ ($logWarn . ("Warning: " <>) . T.pack . show) warnings
unless (null addBuildCache) $
writeBuildCache pkgDir $
Map.unions (lpNewBuildCache lp : addBuildCache)
TTUpstream _ _ -> return ()
when (doHaddock package) $ do
announce "haddock"
sourceFlag <- do
hyped <- tryProcessStdout Nothing eeEnvOverride "haddock" ["--hyperlinked-source"]
case hyped of
-- Fancy crosslinked source
Right _ -> do
return ["--haddock-option=--hyperlinked-source"]
-- Older hscolour colouring
Left _ -> do
hscolourExists <- doesExecutableExist eeEnvOverride "HsColour"
unless hscolourExists $ $logWarn
("Warning: haddock not generating hyperlinked sources because 'HsColour' not\n" <>
"found on PATH (use 'stack install hscolour' to install).")
return ["--hyperlink-source" | hscolourExists]
cabal False (concat [["haddock", "--html", "--hoogle", "--html-location=../$pkg-$version/"]
,sourceFlag])
withMVar eeInstallLock $ \() -> do
announce "install"
cabal False ["install"]
let pkgDbs =
case taskLocation task of
Snap -> [bcoSnapDB eeBaseConfigOpts]
Local ->
[ bcoSnapDB eeBaseConfigOpts
, bcoLocalDB eeBaseConfigOpts
]
mpkgid <- findGhcPkgId eeEnvOverride wc pkgDbs (packageName package)
let ident = PackageIdentifier (packageName package) (packageVersion package)
mpkgid' <- case (packageHasLibrary package, mpkgid) of
(False, _) -> assert (isNothing mpkgid) $ do
markExeInstalled (taskLocation task) taskProvides -- TODO unify somehow with writeFlagCache?
return $ Executable ident
(True, Nothing) -> throwM $ Couldn'tFindPkgId $ packageName package
(True, Just pkgid) -> return $ Library ident pkgid
when (doHaddock package && shouldHaddockDeps eeBuildOpts) $
withMVar eeInstallLock $ \() ->
copyDepHaddocks
eeEnvOverride
wc
eeBaseConfigOpts
(pkgDbs ++ [eeGlobalDB])
(PackageIdentifier (packageName package) (packageVersion package))
Set.empty
case taskLocation task of
Snap -> writePrecompiledCache eeBaseConfigOpts taskProvides
(configCacheOpts cache)
(configCacheDeps cache)
mpkgid (packageExes package)
Local -> return ()
return mpkgid'
-- | Determine if all of the dependencies given are installed
depsPresent :: InstalledMap -> Map PackageName VersionRange -> Bool
depsPresent installedMap deps = all
(\(name, range) ->
case Map.lookup name installedMap of
Just (_, installed) -> (installedVersion installed) `withinRange` range
Nothing -> False)
(Map.toList deps)
singleTest :: M env m
=> (m () -> IO ())
-> TestOpts
-> LocalPackageTB
-> ActionContext
-> ExecuteEnv
-> Task
-> InstalledMap
-> m ()
singleTest runInBase topts lptb ac ee task installedMap = do
(allDepsMap, cache) <- getConfigCache ee task $
case taskType task of
TTLocal lp -> concat
[ ["--enable-tests"]
, ["--enable-benchmarks" | depsPresent installedMap $ lpBenchDeps lp]
]
_ -> []
withSingleContext runInBase ac ee task (Just allDepsMap) (Just "test") $ \package cabalfp pkgDir cabal announce console mlogFile -> do
neededConfig <- ensureConfig cache pkgDir ee (announce "configure (test)") cabal cabalfp
config <- asks getConfig
testBuilt <- checkTestBuilt pkgDir
let needBuild = neededConfig ||
(case taskType task of
TTLocal lp ->
case lpDirtyFiles lp of
Just _ -> True
Nothing -> False
_ -> assert False True) ||
not testBuilt
needHpc = toCoverage topts
testsToRun = Set.toList $ lptbTests lptb
components = map (T.unpack . T.append "test:") testsToRun
when needBuild $ do
announce "build (test)"
unsetTestBuilt pkgDir
unsetTestSuccess pkgDir
case taskType task of
TTLocal lp -> writeBuildCache pkgDir $ lpNewBuildCache lp
TTUpstream _ _ -> assert False $ return ()
extraOpts <- extraBuildOptions (eeBuildOpts ee)
cabal (console && configHideTHLoading config) $
"build" : (components ++ extraOpts)
setTestBuilt pkgDir
toRun <-
if toDisableRun topts
then do
announce "Test running disabled by --no-run-tests flag."
return False
else if toRerunTests topts
then return True
else do
success <- checkTestSuccess pkgDir
if success
then do
unless (null testsToRun) $ announce "skipping already passed test"
return False
else return True
when toRun $ do
bconfig <- asks getBuildConfig
buildDir <- distDirFromDir pkgDir
hpcDir <- hpcDirFromDir pkgDir
when needHpc (createTree hpcDir)
let exeExtension =
case configPlatform $ getConfig bconfig of
Platform _ Windows -> ".exe"
_ -> ""
errs <- liftM Map.unions $ forM testsToRun $ \testName -> do
nameDir <- parseRelDir $ T.unpack testName
nameExe <- parseRelFile $ T.unpack testName ++ exeExtension
nameTix <- liftM (pkgDir </>) $ parseRelFile $ T.unpack testName ++ ".tix"
let exeName = buildDir </> $(mkRelDir "build") </> nameDir </> nameExe
exists <- fileExists exeName
menv <- liftIO $ configEnvOverride config EnvSettings
{ esIncludeLocals = taskLocation task == Local
, esIncludeGhcPackagePath = True
, esStackExe = True
, esLocaleUtf8 = False
}
if exists
then do
-- We clear out the .tix files before doing a run.
when needHpc $ do
tixexists <- fileExists nameTix
when tixexists $
$logWarn ("Removing HPC file " <> T.pack (toFilePath nameTix))
removeFileIfExists nameTix
let args = toAdditionalArgs topts
argsDisplay = case args of
[] -> ""
_ -> ", args: " <> T.intercalate " " (map showProcessArgDebug args)
announce $ "test (suite: " <> testName <> argsDisplay <> ")"
let cp = (proc (toFilePath exeName) args)
{ cwd = Just $ toFilePath pkgDir
, Process.env = envHelper menv
, std_in = CreatePipe
, std_out =
case mlogFile of
Nothing -> Inherit
Just (_, h) -> UseHandle h
, std_err =
case mlogFile of
Nothing -> Inherit
Just (_, h) -> UseHandle h
}
-- Use createProcess_ to avoid the log file being closed afterwards
(Just inH, Nothing, Nothing, ph) <- liftIO $ createProcess_ "singleBuild.runTests" cp
liftIO $ hClose inH
ec <- liftIO $ waitForProcess ph
-- Move the .tix file out of the package
-- directory into the hpc work dir, for
-- tidiness.
when needHpc $
updateTixFile nameTix (packageIdentifierString (packageIdentifier package))
return $ case ec of
ExitSuccess -> Map.empty
_ -> Map.singleton testName $ Just ec
else do
$logError $ T.concat
[ "Test suite "
, testName
, " executable not found for "
, packageNameText $ packageName package
]
return $ Map.singleton testName Nothing
when needHpc $ generateHpcReport pkgDir package testsToRun
bs <- liftIO $
case mlogFile of
Nothing -> return ""
Just (logFile, h) -> do
hClose h
S.readFile $ toFilePath logFile
unless (Map.null errs) $ throwM $ TestSuiteFailure
(taskProvides task)
errs
(fmap fst mlogFile)
bs
setTestSuccess pkgDir
singleBench :: M env m
=> (m () -> IO ())
-> BenchmarkOpts
-> LocalPackageTB
-> ActionContext
-> ExecuteEnv
-> Task
-> InstalledMap
-> m ()
singleBench runInBase beopts _lptb ac ee task installedMap = do
(allDepsMap, cache) <- getConfigCache ee task $
case taskType task of
TTLocal lp -> concat
[ ["--enable-tests" | depsPresent installedMap $ lpTestDeps lp]
, ["--enable-benchmarks"]
]
_ -> []
withSingleContext runInBase ac ee task (Just allDepsMap) (Just "bench") $ \_package cabalfp pkgDir cabal announce console _mlogFile -> do
neededConfig <- ensureConfig cache pkgDir ee (announce "configure (benchmarks)") cabal cabalfp
benchBuilt <- checkBenchBuilt pkgDir
let needBuild = neededConfig ||
(case taskType task of
TTLocal lp ->
case lpDirtyFiles lp of
Just _ -> True
Nothing -> False
_ -> assert False True) ||
not benchBuilt
when needBuild $ do
announce "build (benchmarks)"
unsetBenchBuilt pkgDir
case taskType task of
TTLocal lp -> writeBuildCache pkgDir $ lpNewBuildCache lp
TTUpstream _ _ -> assert False $ return ()
config <- asks getConfig
extraOpts <- extraBuildOptions (eeBuildOpts ee)
cabal (console && configHideTHLoading config) ("build" : extraOpts)
setBenchBuilt pkgDir
let args = maybe []
((:[]) . ("--benchmark-options=" <>))
(beoAdditionalArgs beopts)
toRun <-
if beoDisableRun beopts
then do
announce "Benchmark running disabled by --no-run-benchmarks flag."
return False
else do
return True
when toRun $ do
announce "benchmarks"
cabal False ("bench" : args)
-- | Grab all output from the given @Handle@ and print it to stdout, stripping
-- Template Haskell "Loading package" lines. Does work in a separate thread.
printBuildOutput :: (MonadIO m, MonadBaseControl IO m, MonadLogger m)
=> Bool -- ^ exclude TH loading?
-> Bool -- ^ convert paths to absolute?
-> LogLevel
-> Handle -> m ()
printBuildOutput excludeTHLoading makeAbsolute level outH = void $
CB.sourceHandle outH
$$ CB.lines
=$ CL.map stripCarriageReturn
=$ CL.filter (not . isTHLoading)
=$ CL.mapM toAbsolutePath
=$ CL.mapM_ (monadLoggerLog $(TH.location >>= liftLoc) "" level)
where
-- | Is this line a Template Haskell "Loading package" line
-- ByteString
isTHLoading :: S8.ByteString -> Bool
isTHLoading _ | not excludeTHLoading = False
isTHLoading bs =
"Loading package " `S8.isPrefixOf` bs &&
("done." `S8.isSuffixOf` bs || "done.\r" `S8.isSuffixOf` bs)
-- | Convert GHC error lines with file paths to have absolute file paths
toAbsolutePath bs | not makeAbsolute = return bs
toAbsolutePath bs = do
let (x, y) = S.break (== _colon) bs
mabs <-
if isValidSuffix y
then do
efp <- liftIO $ tryIO $ D.canonicalizePath $ S8.unpack x
case efp of
Left _ -> return Nothing
Right fp -> return $ Just $ S8.pack fp
else return Nothing
case mabs of
Nothing -> return bs
Just fp -> return $ fp `S.append` y
-- | Match the line:column format at the end of lines
isValidSuffix bs0 = maybe False (const True) $ do
guard $ not $ S.null bs0
guard $ S.head bs0 == _colon
(_, bs1) <- S8.readInt $ S.drop 1 bs0
guard $ not $ S.null bs1
guard $ S.head bs1 == _colon
(_, bs2) <- S8.readInt $ S.drop 1 bs1
guard $ bs2 == ":"
-- | Strip @\r@ characters from the byte vector. Used because Windows.
stripCarriageReturn :: ByteString -> ByteString
stripCarriageReturn = S8.filter (not . (=='\r'))
-- | Find the Setup.hs or Setup.lhs in the given directory. If none exists,
-- throw an exception.
getSetupHs :: Path Abs Dir -- ^ project directory
-> IO (Path Abs File)
getSetupHs dir = do
exists1 <- fileExists fp1
if exists1
then return fp1
else do
exists2 <- fileExists fp2
if exists2
then return fp2
else throwM $ NoSetupHsFound dir
where
fp1 = dir </> $(mkRelFile "Setup.hs")
fp2 = dir </> $(mkRelFile "Setup.lhs")
-- Do not pass `-hpcdir` as GHC option if the coverage is not enabled.
-- This helps running stack-compiled programs with dynamic interpreters like `hint`.
-- Cfr: https://github.com/commercialhaskell/stack/issues/997
extraBuildOptions :: M env m => BuildOpts -> m [String]
extraBuildOptions bopts = do
let ddumpOpts = " -ddump-hi -ddump-to-file"
case toCoverage (boptsTestOpts bopts) of
True -> do
hpcIndexDir <- toFilePath . (</> dotHpc) <$> hpcRelativeDir
return ["--ghc-options", "-hpcdir " ++ hpcIndexDir ++ ddumpOpts]
False -> return ["--ghc-options", ddumpOpts]
-- | Take the given list of package dependencies and the contents of the global
-- package database, and construct a set of installed package IDs that:
--
-- * Excludes the Cabal library (it's added later)
--
-- * Includes all packages depended on by this package
--
-- * Includes all global packages, unless: (1) it's hidden, (2) it's shadowed
-- by a depended-on package, or (3) one of its dependencies is not met.
--
-- See:
--
-- * https://github.com/commercialhaskell/stack/issues/941
--
-- * https://github.com/commercialhaskell/stack/issues/944
--
-- * https://github.com/commercialhaskell/stack/issues/949
addGlobalPackages :: Map PackageIdentifier GhcPkgId -- ^ dependencies of the package
-> [DumpPackage () ()] -- ^ global packages
-> Set GhcPkgId
addGlobalPackages deps globals0 =
res
where
-- Initial set of packages: the installed IDs of all dependencies
res0 = Map.elems $ Map.filterWithKey (\ident _ -> not $ isCabal ident) deps
-- First check on globals: it's not shadowed by a dep, it's not Cabal, and
-- it's exposed
goodGlobal1 dp = not (isDep dp)
&& not (isCabal $ dpPackageIdent dp)
&& dpIsExposed dp
globals1 = filter goodGlobal1 globals0
-- Create a Map of unique package names in the global database
globals2 = Map.fromListWith chooseBest
$ map (packageIdentifierName . dpPackageIdent &&& id) globals1
-- Final result: add in globals that have their dependencies met
res = loop id (Map.elems globals2) $ Set.fromList res0
----------------------------------
-- Some auxiliary helper functions
----------------------------------
-- Is the given package identifier for any version of Cabal
isCabal (PackageIdentifier name _) = name == $(mkPackageName "Cabal")
-- Is the given package name provided by the package dependencies?
isDep dp = packageIdentifierName (dpPackageIdent dp) `Set.member` depNames
depNames = Set.map packageIdentifierName $ Map.keysSet deps
-- Choose the best of two competing global packages (the newest version)
chooseBest dp1 dp2
| getVer dp1 < getVer dp2 = dp2
| otherwise = dp1
where
getVer = packageIdentifierVersion . dpPackageIdent
-- Are all dependencies of the given package met by the given Set of
-- installed packages
depsMet dp gids = all (`Set.member` gids) (dpDepends dp)
-- Find all globals that have all of their dependencies met
loop front (dp:dps) gids
-- This package has its deps met. Add it to the list of dependencies
-- and then traverse the list from the beginning (this package may have
-- been a dependency of an earlier one).
| depsMet dp gids = loop id (front dps) (Set.insert (dpGhcPkgId dp) gids)
-- Deps are not met, keep going
| otherwise = loop (front . (dp:)) dps gids
-- None of the packages we checked can be added, therefore drop them all
-- and return our results
loop _ [] gids = gids
| lukexi/stack | src/Stack/Build/Execute.hs | bsd-3-clause | 60,986 | 344 | 28 | 24,588 | 11,448 | 6,103 | 5,345 | -1 | -1 |
{-# LANGUAGE BangPatterns #-}
{- | This module defines `Schema`s,
which can generate passwords. -}
module Scat.Schemas
(
-- * Type
Schema
-- ** Constructors
, withDefaultSize
, ignoreSize
-- ** Destructor
, getBuilder
-- * Built-in schemas
-- ** Passwords
, safe
, alphanumeric
, paranoiac
-- ** PIN
, pin
-- ** Pass phrases
, pokemons
, diceware
-- ** Pattern lock
, androidPatternLock
) where
import Data.Functor ((<$>))
import Data.Ratio ((%))
import Data.Monoid ((<>))
import Data.List (intercalate, (\\))
import Data.Vector (Vector)
import qualified Data.Vector as V
import Control.Monad (replicateM)
import System.IO
import Scat.Builder
import Paths_scat
-- | Password builder.
data Schema = Schema
{ defaultSize :: Int
, builder :: Int -> Builder String }
-- | Returns a `Builder` given an optional size.
getBuilder :: Schema -> Maybe Int -> Builder String
getBuilder schema Nothing = builder schema $ defaultSize schema
getBuilder schema (Just s) = builder schema s
-- | Specifies the Schema will not be sensible to any size parameter.
ignoreSize :: Builder String -> Schema
ignoreSize = Schema undefined . const
-- | Specifies the Schema accepts a size parameter.
withDefaultSize :: Int -> (Int -> Builder String) -> Schema
withDefaultSize = Schema
-- | Paranoiac mode, entropy of 512 bits with the default size of 78.
paranoiac :: Schema
paranoiac = withDefaultSize 78 $ \ s -> replicateM s ascii
{- | Generates a password,
containing upper case letters,
lower case letters,
digits and symbols.
Entropy of about 115 bits for length 18. -}
safe :: Schema
safe = withDefaultSize 18 $ \ s -> do
let number = max s 4
lBound = max 1 $ floor $ number % 8
uBound = ceiling $ number % 4
nUpper <- inRange (lBound, uBound)
nDigit <- inRange (lBound, uBound)
nSpecial <- inRange (lBound, uBound)
let nLower = number - nUpper - nSpecial - nDigit
uppers <- replicateM nUpper upper
digits <- replicateM nDigit digit
specials <- replicateM nSpecial special
lowers <- replicateM nLower lower
shuffle (uppers <> digits <> specials <> lowers)
{- | Generates a password,
containing upper case letters,
lower case letters and
digits, but no symbols.
Entropy of about 104.2 bits for length 18. -}
alphanumeric :: Schema
alphanumeric = withDefaultSize 18 $ \ s -> do
let number = max s 4
lBound = max 1 $ floor $ number % 8
uBound = ceiling $ number % 4
nUpper <- inRange (lBound, uBound)
nDigit <- inRange (lBound, uBound)
let nLower = number - nUpper - nDigit
uppers <- replicateM nUpper upper
digits <- replicateM nDigit digit
lowers <- replicateM nLower lower
shuffle (uppers <> digits <> lowers)
{- | Generates a PIN number, of length `n`.
Entropy of about @3.32 * n@ bits. -}
pin :: Schema
pin = withDefaultSize 6 $ \ s -> replicateM s digit
-- | Generates an Android lock pattern.
androidPatternLock :: Schema
androidPatternLock = withDefaultSize 9 $ \ s -> do
xs <- loop (min s (height * width)) []
return $ intercalate " - " $ map showPosition xs
where
-- Gets `n` points.
loop :: Int -> [(Int, Int)] -> Builder [(Int, Int)]
loop n xs | n <= 0 = return $ reverse xs
loop n xs = do
x <- oneOf $ possibilities xs
loop (n - 1) (x : xs)
-- Grid dimensions.
height = 3
width = 3
-- Text representation for a position.
showPosition (1, 1) = "center"
showPosition (i, j) = vshow i ++ hshow j
where
vshow 0 = "north"
vshow 1 = ""
vshow _ = "south"
hshow 0 = "west"
hshow 1 = ""
hshow _ = "east"
-- All positions.
allPositions = [(i, j) | i <- [0 .. height - 1], j <- [0 .. width - 1]]
{- Possible positions given a list of already used ones.
The head of the list is the last used position. -}
possibilities [] = allPositions
possibilities pps@(p : ps) = filter isPossible candidates
where
candidates = allPositions \\ pps
isPossible q = all (`elem` ps) $ interfere p q
-- The list of positions that are on the way between two positions.
interfere (i, j) (k, l) = do
r <- [1 .. steps - 1]
return (i + r * vstep, j + r * hstep)
where
vdiff = k - i
hdiff = l - j
steps = gcd vdiff hdiff
vstep = vdiff `div` steps
hstep = hdiff `div` steps
{- | Generates a password with `s` of the original Pokemons and their level.
Entropy of about 55.5 bits for 4 pokemons. -}
pokemons :: IO Schema
pokemons = fromFile "pokemons.txt" $ \ vect ->
withDefaultSize 4 $ \ s -> do
ps <- replicateM s $ oneOfV vect
ls <- replicateM s $ inRange (1, 100 :: Int)
let ss = zipWith (\ p l -> p ++ " " ++ show l) ps ls
return $ intercalate ", " ss
{- | Generates a password with `s` words
from the Diceware list.
Entropy of about 64.6 bits for 5 words. -}
diceware :: IO Schema
diceware = fromFile "diceware.txt" $ \ vect ->
withDefaultSize 5 $ \ s -> do
ws <- replicateM s $ oneOfV vect
return $ unwords ws
-- | Feeds all lines of a file to a function and gets the result.
fromFile :: FilePath -> (Vector String -> a) -> IO a
fromFile fp bs = do
fp' <- getDataFileName fp
withFile fp' ReadMode $ \ h -> do
!vect <- (V.fromList . lines) <$> hGetContents h
return $ bs vect
| rnhmjoj/scat | src/Scat/Schemas.hs | bsd-3-clause | 5,552 | 0 | 18 | 1,550 | 1,539 | 804 | 735 | 114 | 8 |
-- Copyright (c) 2014 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -Wall -Werror #-}
module Control.Monad.Frontend(
MonadCommentBuffer(..),
MonadGenpos(..),
MonadGensym(..),
MonadPositions(..),
MonadKeywords(..),
MonadMessages(..),
FrontendT,
Frontend,
FrontendNoCommentsT,
FrontendNoComments,
FrontendNoBufferT,
FrontendNoBuffer,
FrontendNoCommentsNoBufferT,
FrontendNoCommentsNoBuffer,
runFrontendT,
runFrontend,
runFrontendNoCommentsT,
runFrontendNoComments,
runFrontendNoBufferT,
runFrontendNoBuffer,
runFrontendNoCommentsNoBufferT,
runFrontendNoCommentsNoBuffer
) where
import Control.Monad.CommentBuffer
import Control.Monad.Genpos
import Control.Monad.Gensym
import Control.Monad.Keywords
import Control.Monad.Messages
import Control.Monad.SourceBuffer
import Control.Monad.SkipComments
import Control.Monad.Trans
import Data.ByteString
-- | Frontend monad transformer
type FrontendT pos tok m =
(KeywordsT pos tok (CommentBufferT (SourceBufferT (GenposT (GensymT m)))))
-- | Frontend monad
type Frontend pos tok = FrontendT pos tok IO
-- | Frontend monad transformer without comment buffering
type FrontendNoCommentsT pos tok m a =
(KeywordsT pos tok (SkipCommentsT (SourceBufferT (GenposT (GensymT m))))) a
-- | Frontend monad without comment buffering
type FrontendNoComments pos tok a = FrontendNoCommentsT pos tok IO a
-- | Frontend monad transformer without source buffering
type FrontendNoBufferT pos tok m =
(KeywordsT pos tok (CommentBufferT (GenposT (GensymT m))))
-- | Frontend monad without source buffering
type FrontendNoBuffer pos tok = FrontendNoBufferT pos tok IO
-- | Frontend monad transformer without source buffering
type FrontendNoCommentsNoBufferT pos tok m =
(KeywordsT pos tok (SkipCommentsT (GenposT (GensymT m))))
-- | Frontend monad without source buffering
type FrontendNoCommentsNoBuffer pos tok = FrontendNoCommentsNoBufferT pos tok IO
runFrontendT :: MonadIO m => FrontendT pos tok m a
-> [(ByteString, pos -> tok)]
-> m a
runFrontendT lexer keywords =
let
commentbuffer = runKeywordsT lexer keywords
srcbuf = runCommentBufferT commentbuffer
genpos = runSourceBufferT srcbuf
gensym = startGenposT genpos
in
startGensymT gensym
runFrontend :: Frontend pos tok a
-> [(ByteString, pos -> tok)]
-> IO a
runFrontend = runFrontendT
runFrontendNoCommentsT :: MonadIO m => FrontendNoCommentsT pos tok m a
-> [(ByteString, pos -> tok)]
-> m a
runFrontendNoCommentsT lexer keywords =
let
commentbuffer = runKeywordsT lexer keywords
srcbuf = runSkipCommentsT commentbuffer
genpos = runSourceBufferT srcbuf
gensym = startGenposT genpos
in
startGensymT gensym
runFrontendNoComments :: FrontendNoComments pos tok a
-> [(ByteString, pos -> tok)]
-> IO a
runFrontendNoComments = runFrontendNoCommentsT
runFrontendNoBufferT :: MonadIO m => FrontendNoBufferT pos tok m a
-> [(ByteString, pos -> tok)]
-> m a
runFrontendNoBufferT lexer keywords =
let
commentbuffer = runKeywordsT lexer keywords
genpos = runCommentBufferT commentbuffer
gensym = startGenposT genpos
in
startGensymT gensym
runFrontendNoBuffer :: FrontendNoBuffer pos tok a
-> [(ByteString, pos -> tok)]
-> IO a
runFrontendNoBuffer = runFrontendNoBufferT
runFrontendNoCommentsNoBufferT :: MonadIO m => FrontendNoBufferT pos tok m a
-> [(ByteString, pos -> tok)]
-> m a
runFrontendNoCommentsNoBufferT lexer keywords =
let
commentbuffer = runKeywordsT lexer keywords
genpos = runCommentBufferT commentbuffer
gensym = startGenposT genpos
in
startGensymT gensym
runFrontendNoCommentsNoBuffer :: FrontendNoBuffer pos tok a
-> [(ByteString, pos -> tok)]
-> IO a
runFrontendNoCommentsNoBuffer = runFrontendNoBufferT
| emc2/compiler-misc | src/Control/Monad/Frontend.hs | bsd-3-clause | 5,714 | 0 | 15 | 1,312 | 957 | 543 | 414 | 99 | 1 |
{-# LANGUAGE CPP, TypeFamilies #-}
-- Type definitions for the constraint solver
module TcSMonad (
-- The work list
WorkList(..), isEmptyWorkList, emptyWorkList,
extendWorkListNonEq, extendWorkListCt, extendWorkListDerived,
extendWorkListCts, appendWorkList,
selectNextWorkItem,
workListSize, workListWantedCount,
updWorkListTcS,
-- The TcS monad
TcS, runTcS, runTcSWithEvBinds,
failTcS, tryTcS, nestTcS, nestImplicTcS, recoverTcS,
runTcPluginTcS, addUsedRdrNamesTcS, deferTcSForAllEq,
-- Tracing etc
panicTcS, traceTcS,
traceFireTcS, bumpStepCountTcS, csTraceTcS,
wrapErrTcS, wrapWarnTcS,
-- Evidence creation and transformation
Freshness(..), freshGoals, isFresh,
newTcEvBinds, newWantedEvVar, newWantedEvVarNC,
unifyTyVar, unflattenFmv, reportUnifications,
setEvBind, setWantedEvBind, setEvBindIfWanted,
newEvVar, newGivenEvVar, newGivenEvVars,
emitNewDerived, emitNewDeriveds, emitNewDerivedEq,
checkReductionDepth,
getInstEnvs, getFamInstEnvs, -- Getting the environments
getTopEnv, getGblEnv, getLclEnv, getTcEvBinds, getTcLevel,
getTcEvBindsMap,
tcLookupClass,
-- Inerts
InertSet(..), InertCans(..),
updInertTcS, updInertCans, updInertDicts, updInertIrreds,
getNoGivenEqs, setInertCans,
getInertEqs, getInertCans, getInertModel, getInertGivens,
emptyInert, getTcSInerts, setTcSInerts, takeGivenInsolubles,
matchableGivens, prohibitedSuperClassSolve,
getUnsolvedInerts,
removeInertCts,
addInertCan, addInertEq, insertFunEq,
emitInsoluble, emitWorkNC, emitWorkCt,
-- The Model
InertModel, kickOutAfterUnification,
-- Inert Safe Haskell safe-overlap failures
addInertSafehask, insertSafeOverlapFailureTcS, updInertSafehask,
getSafeOverlapFailures,
-- Inert CDictCans
lookupInertDict, findDictsByClass, addDict, addDictsByClass,
delDict, partitionDicts, foldDicts, filterDicts,
-- Inert CTyEqCans
EqualCtList, findTyEqs, foldTyEqs, isInInertEqs,
-- Inert solved dictionaries
addSolvedDict, lookupSolvedDict,
-- Irreds
foldIrreds,
-- The flattening cache
lookupFlatCache, extendFlatCache, newFlattenSkolem, -- Flatten skolems
-- Inert CFunEqCans
updInertFunEqs, findFunEq, sizeFunEqMap, filterFunEqs,
findFunEqsByTyCon, findFunEqs, partitionFunEqs, foldFunEqs,
instDFunType, -- Instantiation
-- MetaTyVars
newFlexiTcSTy, instFlexiTcS, instFlexiTcSHelperTcS,
cloneMetaTyVar, demoteUnfilledFmv,
TcLevel, isTouchableMetaTyVarTcS,
isFilledMetaTyVar_maybe, isFilledMetaTyVar,
zonkTyVarsAndFV, zonkTcType, zonkTcTypes, zonkTcTyVar, zonkSimples, zonkWC,
-- References
newTcRef, readTcRef, updTcRef,
-- Misc
getDefaultInfo, getDynFlags, getGlobalRdrEnvTcS,
matchFam, matchFamTcM,
checkWellStagedDFun,
pprEq -- Smaller utils, re-exported from TcM
-- TODO (DV): these are only really used in the
-- instance matcher in TcSimplify. I am wondering
-- if the whole instance matcher simply belongs
-- here
) where
#include "HsVersions.h"
import HscTypes
import qualified Inst as TcM
import InstEnv
import FamInst
import FamInstEnv
import qualified TcRnMonad as TcM
import qualified TcMType as TcM
import qualified TcEnv as TcM
( checkWellStaged, topIdLvl, tcGetDefaultTys, tcLookupClass )
import Kind
import TcType
import DynFlags
import Type
import Unify
import TcEvidence
import Class
import TyCon
import TcErrors ( solverDepthErrorTcS )
import Name
import RdrName (RdrName, GlobalRdrEnv)
import RnEnv (addUsedRdrNames)
import Var
import VarEnv
import VarSet
import Outputable
import Bag
import UniqSupply
import FastString
import Util
import TcRnTypes
import Unique
import UniqFM
import Maybes ( orElse, firstJusts )
import TrieMap
import Control.Arrow ( first )
import Control.Monad( ap, when, unless, MonadPlus(..) )
import MonadUtils
import Data.IORef
import Data.List ( foldl', partition )
#ifdef DEBUG
import Digraph
#endif
{-
************************************************************************
* *
* Worklists *
* Canonical and non-canonical constraints that the simplifier has to *
* work on. Including their simplification depths. *
* *
* *
************************************************************************
Note [WorkList priorities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
A WorkList contains canonical and non-canonical items (of all flavors).
Notice that each Ct now has a simplification depth. We may
consider using this depth for prioritization as well in the future.
As a simple form of priority queue, our worklist separates out
equalities (wl_eqs) from the rest of the canonical constraints,
so that it's easier to deal with them first, but the separation
is not strictly necessary. Notice that non-canonical constraints
are also parts of the worklist.
Note [Process derived items last]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We can often solve all goals without processing *any* derived constraints.
The derived constraints are just there to help us if we get stuck. So
we keep them in a separate list.
-}
-- See Note [WorkList priorities]
data WorkList
= WL { wl_eqs :: [Ct]
, wl_funeqs :: [Ct] -- LIFO stack of goals
, wl_rest :: [Ct]
, wl_deriv :: [CtEvidence] -- Implicitly non-canonical
-- See Note [Process derived items last]
, wl_implics :: Bag Implication -- See Note [Residual implications]
}
appendWorkList :: WorkList -> WorkList -> WorkList
appendWorkList
(WL { wl_eqs = eqs1, wl_funeqs = funeqs1, wl_rest = rest1
, wl_deriv = ders1, wl_implics = implics1 })
(WL { wl_eqs = eqs2, wl_funeqs = funeqs2, wl_rest = rest2
, wl_deriv = ders2, wl_implics = implics2 })
= WL { wl_eqs = eqs1 ++ eqs2
, wl_funeqs = funeqs1 ++ funeqs2
, wl_rest = rest1 ++ rest2
, wl_deriv = ders1 ++ ders2
, wl_implics = implics1 `unionBags` implics2 }
workListSize :: WorkList -> Int
workListSize (WL { wl_eqs = eqs, wl_funeqs = funeqs, wl_deriv = ders, wl_rest = rest })
= length eqs + length funeqs + length rest + length ders
workListWantedCount :: WorkList -> Int
workListWantedCount (WL { wl_eqs = eqs, wl_rest = rest })
= count isWantedCt eqs + count isWantedCt rest
extendWorkListEq :: Ct -> WorkList -> WorkList
extendWorkListEq ct wl = wl { wl_eqs = ct : wl_eqs wl }
extendWorkListEqs :: [Ct] -> WorkList -> WorkList
extendWorkListEqs cts wl = wl { wl_eqs = cts ++ wl_eqs wl }
extendWorkListFunEq :: Ct -> WorkList -> WorkList
extendWorkListFunEq ct wl = wl { wl_funeqs = ct : wl_funeqs wl }
extendWorkListNonEq :: Ct -> WorkList -> WorkList
-- Extension by non equality
extendWorkListNonEq ct wl = wl { wl_rest = ct : wl_rest wl }
extendWorkListDerived :: CtLoc -> CtEvidence -> WorkList -> WorkList
extendWorkListDerived loc ev wl
| isDroppableDerivedLoc loc = wl { wl_deriv = ev : wl_deriv wl }
| otherwise = extendWorkListEq (mkNonCanonical ev) wl
extendWorkListDeriveds :: CtLoc -> [CtEvidence] -> WorkList -> WorkList
extendWorkListDeriveds loc evs wl
| isDroppableDerivedLoc loc = wl { wl_deriv = evs ++ wl_deriv wl }
| otherwise = extendWorkListEqs (map mkNonCanonical evs) wl
extendWorkListImplic :: Implication -> WorkList -> WorkList
extendWorkListImplic implic wl = wl { wl_implics = implic `consBag` wl_implics wl }
extendWorkListCt :: Ct -> WorkList -> WorkList
-- Agnostic
extendWorkListCt ct wl
= case classifyPredType (ctPred ct) of
EqPred NomEq ty1 _
| Just (tc,_) <- tcSplitTyConApp_maybe ty1
, isTypeFamilyTyCon tc
-> extendWorkListFunEq ct wl
EqPred {}
-> extendWorkListEq ct wl
_ -> extendWorkListNonEq ct wl
extendWorkListCts :: [Ct] -> WorkList -> WorkList
-- Agnostic
extendWorkListCts cts wl = foldr extendWorkListCt wl cts
isEmptyWorkList :: WorkList -> Bool
isEmptyWorkList (WL { wl_eqs = eqs, wl_funeqs = funeqs
, wl_rest = rest, wl_deriv = ders, wl_implics = implics })
= null eqs && null rest && null funeqs && isEmptyBag implics && null ders
emptyWorkList :: WorkList
emptyWorkList = WL { wl_eqs = [], wl_rest = []
, wl_funeqs = [], wl_deriv = [], wl_implics = emptyBag }
selectWorkItem :: WorkList -> Maybe (Ct, WorkList)
selectWorkItem wl@(WL { wl_eqs = eqs, wl_funeqs = feqs
, wl_rest = rest })
| ct:cts <- eqs = Just (ct, wl { wl_eqs = cts })
| ct:fes <- feqs = Just (ct, wl { wl_funeqs = fes })
| ct:cts <- rest = Just (ct, wl { wl_rest = cts })
| otherwise = Nothing
selectDerivedWorkItem :: WorkList -> Maybe (Ct, WorkList)
selectDerivedWorkItem wl@(WL { wl_deriv = ders })
| ev:evs <- ders = Just (mkNonCanonical ev, wl { wl_deriv = evs })
| otherwise = Nothing
selectNextWorkItem :: TcS (Maybe Ct)
selectNextWorkItem
= do { wl_var <- getTcSWorkListRef
; wl <- wrapTcS (TcM.readTcRef wl_var)
; let try :: Maybe (Ct,WorkList) -> TcS (Maybe Ct) -> TcS (Maybe Ct)
try mb_work do_this_if_fail
| Just (ct, new_wl) <- mb_work
= do { checkReductionDepth (ctLoc ct) (ctPred ct)
; wrapTcS (TcM.writeTcRef wl_var new_wl)
; return (Just ct) }
| otherwise
= do_this_if_fail
; try (selectWorkItem wl) $
do { ics <- getInertCans
; if inert_count ics == 0
then return Nothing
else try (selectDerivedWorkItem wl) (return Nothing) } }
-- Pretty printing
instance Outputable WorkList where
ppr (WL { wl_eqs = eqs, wl_funeqs = feqs
, wl_rest = rest, wl_implics = implics, wl_deriv = ders })
= text "WL" <+> (braces $
vcat [ ppUnless (null eqs) $
ptext (sLit "Eqs =") <+> vcat (map ppr eqs)
, ppUnless (null feqs) $
ptext (sLit "Funeqs =") <+> vcat (map ppr feqs)
, ppUnless (null rest) $
ptext (sLit "Non-eqs =") <+> vcat (map ppr rest)
, ppUnless (null ders) $
ptext (sLit "Derived =") <+> vcat (map ppr ders)
, ppUnless (isEmptyBag implics) $
ptext (sLit "Implics =") <+> vcat (map ppr (bagToList implics))
])
{- *********************************************************************
* *
InertSet: the inert set
* *
* *
********************************************************************* -}
data InertSet
= IS { inert_cans :: InertCans
-- Canonical Given, Wanted, Derived (no Solved)
-- Sometimes called "the inert set"
, inert_flat_cache :: FunEqMap (TcCoercion, TcType, CtFlavour)
-- See Note [Type family equations]
-- If F tys :-> (co, ty, ev),
-- then co :: F tys ~ ty
--
-- Just a hash-cons cache for use when flattening only
-- These include entirely un-processed goals, so don't use
-- them to solve a top-level goal, else you may end up solving
-- (w:F ty ~ a) by setting w:=w! We just use the flat-cache
-- when allocating a new flatten-skolem.
-- Not necessarily inert wrt top-level equations (or inert_cans)
, inert_solved_dicts :: DictMap CtEvidence
-- Of form ev :: C t1 .. tn
-- See Note [Solved dictionaries]
-- and Note [Do not add superclasses of solved dictionaries]
}
instance Outputable InertSet where
ppr is = vcat [ ppr $ inert_cans is
, text "Solved dicts" <+> vcat (map ppr (bagToList (dictsToBag (inert_solved_dicts is)))) ]
emptyInert :: InertSet
emptyInert
= IS { inert_cans = IC { inert_count = 0
, inert_eqs = emptyVarEnv
, inert_dicts = emptyDicts
, inert_safehask = emptyDicts
, inert_funeqs = emptyFunEqs
, inert_irreds = emptyCts
, inert_insols = emptyCts
, inert_model = emptyVarEnv }
, inert_flat_cache = emptyFunEqs
, inert_solved_dicts = emptyDictMap }
{- Note [Solved dictionaries]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we apply a top-level instance declararation, we add the "solved"
dictionary to the inert_solved_dicts. In general, we use it to avoid
creating a new EvVar when we have a new goal that we have solved in
the past.
But in particular, we can use it to create *recursive* dicationaries.
The simplest, degnerate case is
instance C [a] => C [a] where ...
If we have
[W] d1 :: C [x]
then we can apply the instance to get
d1 = $dfCList d
[W] d2 :: C [x]
Now 'd1' goes in inert_solved_dicts, and we can solve d2 directly from d1.
d1 = $dfCList d
d2 = d1
See Note [Example of recursive dictionaries]
Other notes about solved dictionaries
* See also Note [Do not add superclasses of solved dictionaries]
* The inert_solved_dicts field is not rewritten by equalities, so it may
get out of date.
Note [Do not add superclasses of solved dictionaries]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Every member of inert_solved_dicts is the result of applying a dictionary
function, NOT of applying superclass selection to anything.
Consider
class Ord a => C a where
instance Ord [a] => C [a] where ...
Suppose we are trying to solve
[G] d1 : Ord a
[W] d2 : C [a]
Then we'll use the instance decl to give
[G] d1 : Ord a Solved: d2 : C [a] = $dfCList d3
[W] d3 : Ord [a]
We must not add d4 : Ord [a] to the 'solved' set (by taking the
superclass of d2), otherwise we'll use it to solve d3, without ever
using d1, which would be a catastrophe.
Solution: when extending the solved dictionaries, do not add superclasses.
That's why each element of the inert_solved_dicts is the result of applying
a dictionary function.
Note [Example of recursive dictionaries]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--- Example 1
data D r = ZeroD | SuccD (r (D r));
instance (Eq (r (D r))) => Eq (D r) where
ZeroD == ZeroD = True
(SuccD a) == (SuccD b) = a == b
_ == _ = False;
equalDC :: D [] -> D [] -> Bool;
equalDC = (==);
We need to prove (Eq (D [])). Here's how we go:
[W] d1 : Eq (D [])
By instance decl of Eq (D r):
[W] d2 : Eq [D []] where d1 = dfEqD d2
By instance decl of Eq [a]:
[W] d3 : Eq (D []) where d2 = dfEqList d3
d1 = dfEqD d2
Now this wanted can interact with our "solved" d1 to get:
d3 = d1
-- Example 2:
This code arises in the context of "Scrap Your Boilerplate with Class"
class Sat a
class Data ctx a
instance Sat (ctx Char) => Data ctx Char -- dfunData1
instance (Sat (ctx [a]), Data ctx a) => Data ctx [a] -- dfunData2
class Data Maybe a => Foo a
instance Foo t => Sat (Maybe t) -- dfunSat
instance Data Maybe a => Foo a -- dfunFoo1
instance Foo a => Foo [a] -- dfunFoo2
instance Foo [Char] -- dfunFoo3
Consider generating the superclasses of the instance declaration
instance Foo a => Foo [a]
So our problem is this
[G] d0 : Foo t
[W] d1 : Data Maybe [t] -- Desired superclass
We may add the given in the inert set, along with its superclasses
Inert:
[G] d0 : Foo t
[G] d01 : Data Maybe t -- Superclass of d0
WorkList
[W] d1 : Data Maybe [t]
Solve d1 using instance dfunData2; d1 := dfunData2 d2 d3
Inert:
[G] d0 : Foo t
[G] d01 : Data Maybe t -- Superclass of d0
Solved:
d1 : Data Maybe [t]
WorkList:
[W] d2 : Sat (Maybe [t])
[W] d3 : Data Maybe t
Now, we may simplify d2 using dfunSat; d2 := dfunSat d4
Inert:
[G] d0 : Foo t
[G] d01 : Data Maybe t -- Superclass of d0
Solved:
d1 : Data Maybe [t]
d2 : Sat (Maybe [t])
WorkList:
[W] d3 : Data Maybe t
[W] d4 : Foo [t]
Now, we can just solve d3 from d01; d3 := d01
Inert
[G] d0 : Foo t
[G] d01 : Data Maybe t -- Superclass of d0
Solved:
d1 : Data Maybe [t]
d2 : Sat (Maybe [t])
WorkList
[W] d4 : Foo [t]
Now, solve d4 using dfunFoo2; d4 := dfunFoo2 d5
Inert
[G] d0 : Foo t
[G] d01 : Data Maybe t -- Superclass of d0
Solved:
d1 : Data Maybe [t]
d2 : Sat (Maybe [t])
d4 : Foo [t]
WorkList:
[W] d5 : Foo t
Now, d5 can be solved! d5 := d0
Result
d1 := dfunData2 d2 d3
d2 := dfunSat d4
d3 := d01
d4 := dfunFoo2 d5
d5 := d0
-}
{- *********************************************************************
* *
InertCans: the canonical inerts
* *
* *
********************************************************************* -}
data InertCans -- See Note [Detailed InertCans Invariants] for more
= IC { inert_model :: InertModel
, inert_eqs :: TyVarEnv EqualCtList
-- All Given/Wanted CTyEqCans; index is the LHS tyvar
, inert_funeqs :: FunEqMap Ct
-- All CFunEqCans; index is the whole family head type.
-- Hence (by CFunEqCan invariants),
-- all Nominal, and all Given/Wanted (no Derived)
, inert_dicts :: DictMap Ct
-- Dictionaries only, index is the class
-- NB: index is /not/ the whole type because FD reactions
-- need to match the class but not necessarily the whole type.
, inert_safehask :: DictMap Ct
-- Failed dictionary resolution due to Safe Haskell overlapping
-- instances restriction. We keep this seperate from inert_dicts
-- as it doesn't cause compilation failure, just safe inference
-- failure.
--
-- ^ See Note [Safe Haskell Overlapping Instances Implementation]
-- in TcSimplify
, inert_irreds :: Cts
-- Irreducible predicates
, inert_insols :: Cts
-- Frozen errors (as non-canonicals)
, inert_count :: Int
-- Number of Wanted goals in
-- inert_eqs, inert_dicts, inert_safehask, inert_irreds
-- Does not include insolubles
-- When non-zero, keep trying to solved
}
type InertModel = TyVarEnv Ct
-- If a -> ct, then ct is a
-- nominal, Derived, canonical CTyEqCan for [D] (a ~N rhs)
-- The index of the TyVarEnv is the 'a'
-- All saturated info for Given, Wanted, Derived is here
{- Note [Detailed InertCans Invariants]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The InertCans represents a collection of constraints with the following properties:
* All canonical
* No two dictionaries with the same head
* No two CIrreds with the same type
* Family equations inert wrt top-level family axioms
* Dictionaries have no matching top-level instance
* Given family or dictionary constraints don't mention touchable
unification variables
* Non-CTyEqCan constraints are fully rewritten with respect
to the CTyEqCan equalities (modulo canRewrite of course;
eg a wanted cannot rewrite a given)
* CTyEqCan equalities: see Note [Applying the inert substitution]
in TcFlatten
Note [Type family equations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Type-family equations, of form (ev : F tys ~ ty), live in three places
* The work-list, of course
* The inert_flat_cache. This is used when flattening, to get maximal
sharing. It contains lots of things that are still in the work-list.
E.g Suppose we have (w1: F (G a) ~ Int), and (w2: H (G a) ~ Int) in the
work list. Then we flatten w1, dumping (w3: G a ~ f1) in the work
list. Now if we flatten w2 before we get to w3, we still want to
share that (G a).
Because it contains work-list things, DO NOT use the flat cache to solve
a top-level goal. Eg in the above example we don't want to solve w3
using w3 itself!
* The inert_funeqs are un-solved but fully processed and in the InertCans.
Note [inert_model: the inert model]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* Part of the inert set is the “model”
* The “Model” is an non-idempotent but no-occurs-check
substitution, reflecting *all* *Nominal* equalities (a ~N ty)
that are not immediately soluble by unification.
* The principal reason for maintaining the model is to generate equalities
that tell us how unify a variable: that is, what Mark Jones calls
"improvement". The same idea is sometimes also called "saturation";
find all the equalities that must hold in any solution.
* There are two sources of constraints in the model:
- Derived constraints arising from functional dependencies, or
decomposing injective arguments of type functions, and suchlike.
- A "shadow copy" for every Given or Wanted (a ~N ty) in
inert_eqs. We imagine that every G/W immediately generates its shadow
constraint, but we refrain from actually generating the constraint itself
until necessary. See (DShadow) and (GWShadow) in
Note [Adding an inert canonical constraint the InertCans]
* If (a -> ty) is in the model, then it is
as if we had an inert constraint [D] a ~N ty.
* Domain of the model = skolems + untouchables
* The inert_eqs are all Given/Wanted. The Derived ones are in the
inert_model only.
* However inert_dicts, inert_irreds may well contain derived costraints.
Note [inert_eqs: the inert equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Definition [Can-rewrite relation]
A "can-rewrite" relation between flavours, written f1 >= f2, is a
binary relation with the following properties
R1. >= is transitive
R2. If f1 >= f, and f2 >= f,
then either f1 >= f2 or f2 >= f1
Lemma. If f1 >= f then f1 >= f1
Proof. By property (R2), with f1=f2
Definition [Generalised substitution]
A "generalised substitution" S is a set of triples (a -f-> t), where
a is a type variable
t is a type
f is a flavour
such that
(WF1) if (a -f1-> t1) in S
(a -f2-> t2) in S
then neither (f1 >= f2) nor (f2 >= f1) hold
(WF2) if (a -f-> t) is in S, then t /= a
Definition [Applying a generalised substitution]
If S is a generalised substitution
S(f,a) = t, if (a -fs-> t) in S, and fs >= f
= a, otherwise
Application extends naturally to types S(f,t), modulo roles.
See Note [Flavours with roles].
Theorem: S(f,a) is well defined as a function.
Proof: Suppose (a -f1-> t1) and (a -f2-> t2) are both in S,
and f1 >= f and f2 >= f
Then by (R2) f1 >= f2 or f2 >= f1, which contradicts (WF)
Notation: repeated application.
S^0(f,t) = t
S^(n+1)(f,t) = S(f, S^n(t))
Definition: inert generalised substitution
A generalised substitution S is "inert" iff
(IG1) there is an n such that
for every f,t, S^n(f,t) = S^(n+1)(f,t)
(IG2) if (b -f-> t) in S, and f >= f, then S(f,t) = t
that is, each individual binding is "self-stable"
----------------------------------------------------------------
Our main invariant:
the inert CTyEqCans should be an inert generalised substitution
----------------------------------------------------------------
Note that inertness is not the same as idempotence. To apply S to a
type, you may have to apply it recursive. But inertness does
guarantee that this recursive use will terminate.
---------- The main theorem --------------
Suppose we have a "work item"
a -fw-> t
and an inert generalised substitution S,
such that
(T1) S(fw,a) = a -- LHS of work-item is a fixpoint of S(fw,_)
(T2) S(fw,t) = t -- RHS of work-item is a fixpoint of S(fw,_)
(T3) a not in t -- No occurs check in the work item
(K1) for every (a -fs-> s) in S, then not (fw >= fs)
(K2) for every (b -fs-> s) in S, where b /= a, then
(K2a) not (fs >= fs)
or (K2b) fs >= fw
or (K2c) not (fw >= fs)
or (K2d) a not in s
(K3) If (b -fs-> s) is in S with (fw >= fs), then
(K3a) If the role of fs is nominal: s /= a
(K3b) If the role of fs is representational: EITHER
a not in s, OR
the path from the top of s to a includes at least one non-newtype
then the extended substition T = S+(a -fw-> t)
is an inert generalised substitution.
The idea is that
* (T1-2) are guaranteed by exhaustively rewriting the work-item
with S(fw,_).
* T3 is guaranteed by a simple occurs-check on the work item.
* (K1-3) are the "kick-out" criteria. (As stated, they are really the
"keep" criteria.) If the current inert S contains a triple that does
not satisfy (K1-3), then we remove it from S by "kicking it out",
and re-processing it.
* Note that kicking out is a Bad Thing, because it means we have to
re-process a constraint. The less we kick out, the better.
TODO: Make sure that kicking out really *is* a Bad Thing. We've assumed
this but haven't done the empirical study to check.
* Assume we have G>=G, G>=W, D>=D, and that's all. Then, when performing
a unification we add a new given a -G-> ty. But doing so does NOT require
us to kick out an inert wanted that mentions a, because of (K2a). This
is a common case, hence good not to kick out.
* Lemma (L1): The conditions of the Main Theorem imply that there is no
(a -fs-> t) in S, s.t. (fs >= fw).
Proof. Suppose the contrary (fs >= fw). Then because of (T1),
S(fw,a)=a. But since fs>=fw, S(fw,a) = s, hence s=a. But now we
have (a -fs-> a) in S, which contradicts (WF2).
* The extended substitution satisfies (WF1) and (WF2)
- (K1) plus (L1) guarantee that the extended substitution satisfies (WF1).
- (T3) guarantees (WF2).
* (K2) is about inertness. Intuitively, any infinite chain T^0(f,t),
T^1(f,t), T^2(f,T).... must pass through the new work item infnitely
often, since the substution without the work item is inert; and must
pass through at least one of the triples in S infnitely often.
- (K2a): if not(fs>=fs) then there is no f that fs can rewrite (fs>=f),
and hence this triple never plays a role in application S(f,a).
It is always safe to extend S with such a triple.
(NB: we could strengten K1) in this way too, but see K3.
- (K2b): If this holds then, by (T2), b is not in t. So applying the
work item does not genenerate any new opportunities for applying S
- (K2c): If this holds, we can't pass through this triple infinitely
often, because if we did then fs>=f, fw>=f, hence by (R2)
* either fw>=fs, contradicting K2c
* or fs>=fw; so by the agument in K2b we can't have a loop
- (K2d): if a not in s, we hae no further opportunity to apply the
work item, similar to (K2b)
NB: Dimitrios has a PDF that does this in more detail
Key lemma to make it watertight.
Under the conditions of the Main Theorem,
forall f st fw >= f, a is not in S^k(f,t), for any k
Also, consider roles more carefully. See Note [Flavours with roles].
Completeness
~~~~~~~~~~~~~
K3: completeness. (K3) is not necessary for the extended substitution
to be inert. In fact K1 could be made stronger by saying
... then (not (fw >= fs) or not (fs >= fs))
But it's not enough for S to be inert; we also want completeness.
That is, we want to be able to solve all soluble wanted equalities.
Suppose we have
work-item b -G-> a
inert-item a -W-> b
Assuming (G >= W) but not (W >= W), this fulfills all the conditions,
so we could extend the inerts, thus:
inert-items b -G-> a
a -W-> b
But if we kicked-out the inert item, we'd get
work-item a -W-> b
inert-item b -G-> a
Then rewrite the work-item gives us (a -W-> a), which is soluble via Refl.
So we add one more clause to the kick-out criteria
Another way to understand (K3) is that we treat an inert item
a -f-> b
in the same way as
b -f-> a
So if we kick out one, we should kick out the other. The orientation
is somewhat accidental.
When considering roles, we also need the second clause (K3b). Consider
inert-item a -W/R-> b c
work-item c -G/N-> a
The work-item doesn't get rewritten by the inert, because (>=) doesn't hold.
We've satisfied conditions (T1)-(T3) and (K1) and (K2). If all we had were
condition (K3a), then we would keep the inert around and add the work item.
But then, consider if we hit the following:
work-item2 b -G/N-> Id
where
newtype Id x = Id x
For similar reasons, if we only had (K3a), we wouldn't kick the
representational inert out. And then, we'd miss solving the inert, which
now reduced to reflexivity. The solution here is to kick out representational
inerts whenever the tyvar of a work item is "exposed", where exposed means
not under some proper data-type constructor, like [] or Maybe. See
isTyVarExposed in TcType. This is encoded in (K3b).
Note [Stability of flattening]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The inert_eqs and inert_model, *considered separately* are each stable;
that is, substituting using them will terminate. Considered *together*
they are not. E.g.
Add: [G] a~[b] to inert set with model [D] b~[a]
We add [G] a~[b] to inert_eqs, and emit [D] a~[b]. At this point
the combination of inert_eqs and inert_model is not stable.
Then we canonicalise [D] a~[b] to [D] a~[[a]], and add that to
insolubles as an occurs check.
* When canonicalizing, the flattener respects flavours. In particular,
when flattening a type variable 'a':
* Derived: look up 'a' in the inert_model
* Given/Wanted: look up 'a' in the inert_eqs
Note [Flavours with roles]
~~~~~~~~~~~~~~~~~~~~~~~~~~
The system described in Note [The inert equalities] discusses an abstract
set of flavours. In GHC, flavours have two components: the flavour proper,
taken from {Wanted, Derived, Given}; and the equality relation (often called
role), taken from {NomEq, ReprEq}. When substituting w.r.t. the inert set,
as described in Note [The inert equalities], we must be careful to respect
roles. For example, if we have
inert set: a -G/R-> Int
b -G/R-> Bool
type role T nominal representational
and we wish to compute S(W/R, T a b), the correct answer is T a Bool, NOT
T Int Bool. The reason is that T's first parameter has a nominal role, and
thus rewriting a to Int in T a b is wrong. Indeed, this non-congruence of
subsitution means that the proof in Note [The inert equalities] may need
to be revisited, but we don't think that the end conclusion is wrong.
Note [Examples of how the inert_model helps completeness]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
----------- Example 2 (indexed-types/should_fail/T4093a)
Ambiguity check for f: (Foo e ~ Maybe e) => Foo e
We get [G] Foo e ~ Maybe e
[W] Foo e ~ Foo ee -- ee is a unification variable
[W] Foo ee ~ Maybe ee
Flatten: [G] Foo e ~ fsk
[G] fsk ~ Maybe e -- (A)
[W] Foo ee ~ fmv
[W] fmv ~ fsk -- (B) From Foo e ~ Foo ee
[W] fmv ~ Maybe ee
--> rewrite (B) with (A)
[W] Foo ee ~ fmv
[W] fmv ~ Maybe e
[W] fmv ~ Maybe ee
But now awe appear to be stuck, since we don't rewrite Wanteds with
Wanteds. But inert_model to the rescue. In the model we first added
fmv -> Maybe e
Then when adding [W] fmv -> Maybe ee to the inert set, we noticed
that the model can rewrite the constraint, and so emit [D] fmv ~ Maybe ee.
That canonicalises to
[D] Maybe e ~ Maybe ee
and that soon yields ee := e, and all is well
----------- Example 3 (typecheck/should_compile/Improvement.hs)
type instance F Int = Bool
instance (b~Int) => C Bool b
[W] w1 : C (F alpha) alpha, [W] w2 : F alpha ~ Bool
If we rewrote wanteds with wanteds, we could rewrite w1 to
C Bool alpha, use the instance to get alpha ~ Int, and solve
the whole thing.
And that is exactly what happens, in the *Derived* constraints.
In effect we get
[D] F alpha ~ fmv
[D] C fmv alpha
[D] fmv ~ Bool
and now we can rewrite (C fmv alpha) with (fmv ~ Bool), ane
we are off to the races.
----------- Example 4 (Trac #10009, a nasty example):
f :: (UnF (F b) ~ b) => F b -> ()
g :: forall a. (UnF (F a) ~ a) => a -> ()
g _ = f (undefined :: F a)
For g we get [G] UnF (F a) ~ a
[W] UnF (F beta) ~ beta
[W] F a ~ F beta
Flatten:
[G] g1: F a ~ fsk1 fsk1 := F a
[G] g2: UnF fsk1 ~ fsk2 fsk2 := UnF fsk1
[G] g3: fsk2 ~ a
[W] w1: F beta ~ fmv1
[W] w2: UnF fmv1 ~ fmv2
[W] w3: beta ~ fmv2
[W] w5: fmv1 ~ fsk1 -- From F a ~ F beta using flat-cache
-- and re-orient to put meta-var on left
Unify beta := fmv2
[W] w1: F fmv2 ~ fmv1
[W] w2: UnF fmv1 ~ fmv2
[W] w5: fmv1 ~ fsk1
In the model, we have the shadow Deriveds of w1 and w2
(I name them for convenience even though they are anonymous)
[D] d1: F fmv2 ~ fmv1d
[D] d2: fmv1d ~ fmv1
[D] d3: UnF fmv1 ~ fmv2d
[D] d4: fmv2d ~ fmv2
Now we can rewrite d3 with w5, and match with g2, to get
fmv2d := fsk2
[D] d1: F fmv2 ~ fmv1d
[D] d2: fmv1d ~ fmv1
[D] d4: fmv2 ~ fsk2
Use g2 to rewrite fsk2 to a.
[D] d1: F fmv2 ~ fmv1d
[D] d2: fmv1d ~ fmv1
[D] d4: fmv2 ~ a
Use d4 to rewrite d1, rewrite with g3,
match with g1, to get
fmv1d := fsk1
[D] d2: fmv1 ~ fsk1
[D] d4: fmv2 ~ a
At this point we are stuck so we unflatten this set:
See Note [Orientation of equalities with fmvs] in TcFlatten
[W] w1: F fmv2 ~ fmv1
[W] w2: UnF fmv1 ~ fmv2
[W] w5: fmv1 ~ fsk1
[D] d4: fmv2 ~ a
Unflattening will discharge w1: fmv1 := F fmv2
It can't discharge w2, so it is kept. But we can
unify fmv2 := fsk2, and that is "progress". Result
[W] w2: UnF (F a) ~ a
[W] w5: F a ~ fsk1
And now both of these are easily proved in the next iteration. Phew!
-}
instance Outputable InertCans where
ppr (IC { inert_model = model, inert_eqs = eqs
, inert_funeqs = funeqs, inert_dicts = dicts
, inert_safehask = safehask, inert_irreds = irreds
, inert_insols = insols, inert_count = count })
= braces $ vcat
[ ppUnless (isEmptyVarEnv eqs) $
ptext (sLit "Equalities:")
<+> pprCts (foldVarEnv (\eqs rest -> listToBag eqs `andCts` rest) emptyCts eqs)
, ppUnless (isEmptyTcAppMap funeqs) $
ptext (sLit "Type-function equalities =") <+> pprCts (funEqsToBag funeqs)
, ppUnless (isEmptyTcAppMap dicts) $
ptext (sLit "Dictionaries =") <+> pprCts (dictsToBag dicts)
, ppUnless (isEmptyTcAppMap safehask) $
ptext (sLit "Safe Haskell unsafe overlap =") <+> pprCts (dictsToBag safehask)
, ppUnless (isEmptyCts irreds) $
ptext (sLit "Irreds =") <+> pprCts irreds
, ppUnless (isEmptyCts insols) $
text "Insolubles =" <+> pprCts insols
, ppUnless (isEmptyVarEnv model) $
text "Model =" <+> pprCts (foldVarEnv consCts emptyCts model)
, text "Unsolved goals =" <+> int count
]
{- *********************************************************************
* *
Adding an inert
* *
************************************************************************
Note [Adding an inert canonical constraint the InertCans]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* Adding any constraint c *other* than a CTyEqCan (TcSMonad.addInertCan):
* If c can be rewritten by model, emit the shadow constraint [D] c
as NonCanonical. See Note [Emitting shadow constraints]
* Reason for non-canonical: a CFunEqCan has a unique fmv on the RHS,
so we must not duplicate it.
* Adding a *nominal* CTyEqCan (a ~N ty) to the inert set (TcSMonad.addInertEq).
* We always (G/W/D) kick out constraints that can be rewritten
(respecting flavours) by the new constraint.
- This is done by kickOutRewritable;
see Note [inert_eqs: the inert equalities].
- We do not need to kick anything out from the model; we only
add [D] constraints to the model (in effect) and they are
fully rewritten by the model, so (K2b) holds
- A Derived equality can kick out [D] constraints in inert_dicts,
inert_irreds etc. Nothing in inert_eqs because there are no
Derived constraints in inert_eqs (they are in the model)
Then, when adding:
* [Derived] a ~N ty
1. Add (a~ty) to the model
NB: 'a' cannot be in fv(ty), because the constraint is canonical.
2. (DShadow) Emit shadow-copies (emitDerivedShadows):
For every inert G/W constraint c, st
(a) (a~ty) can rewrite c (see Note [Emitting shadow constraints]),
and
(b) the model cannot rewrite c
kick out a Derived *copy*, leaving the original unchanged.
Reason for (b) if the model can rewrite c, then we have already
generated a shadow copy
* [Given/Wanted] a ~N ty
1. Add it to inert_eqs
2. If the model can rewrite (a~ty)
then (GWShadow) emit [D] a~ty
else (GWModel) Use emitDerivedShadows just like (DShadow)
and add a~ty to the model
(Reason:[D] a~ty is inert wrt model, and (K2b) holds)
* [Given/Wanted] a ~R ty: just add it to inert_eqs
* Unifying a:=ty, is like adding [G] a~ty, but we can't make a [D] a~ty, as in
step (1) of the [G/W] case above. So instead, do kickOutAfterUnification:
- Kick out from the model any equality (b~ty2) that mentions 'a'
(i.e. a=b or a in ty2). Example:
[G] a ~ [b], model [D] b ~ [a]
Note [Emitting shadow constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* Given a new model element [D] a ~ ty, we want to emit shadow
[D] constraints for any inert constraints 'c' that can be
rewritten [D] a-> ty
* And similarly given a new Given/Wanted 'c', we want to emit a
shadow 'c' if the model can rewrite [D] c
See modelCanRewrite.
NB the use of rewritableTyVars. ou might wonder whether, given the new
constraint [D] fmv ~ ty and the inert [W] F alpha ~ fmv, do we want to
emit a shadow constraint [D] F alpha ~ fmv? No, we don't, because
it'll literally be a duplicate (since we do not rewrite the RHS of a
CFunEqCan) and hence immediately eliminated again. Insetad we simply
want to *kick-out* the [W] F alpha ~ fmv, so that it is reconsidered
from a fudep point of view. See Note [Kicking out CFunEqCan for
fundeps]
Note [Kicking out CFunEqCan for fundeps]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider:
New: [D] fmv1 ~ fmv2
Inert: [W] F alpha ~ fmv1
[W] F beta ~ fmv2
The new (derived) equality certainly can't rewrite the inerts. But we
*must* kick out the first one, to get:
New: [W] F alpha ~ fmv1
Inert: [W] F beta ~ fmv2
Model: [D] fmv1 ~ fmv2
and now improvement will discover [D] alpha ~ beta. This is important;
eg in Trac #9587.
-}
addInertEq :: Ct -> TcS ()
-- This is a key function, because of the kick-out stuff
-- Precondition: item /is/ canonical
addInertEq ct@(CTyEqCan { cc_ev = ev, cc_eq_rel = eq_rel, cc_tyvar = tv })
= do { traceTcS "addInertEq {" $
text "Adding new inert equality:" <+> ppr ct
; ics <- getInertCans
; let (kicked_out, ics1) = kickOutRewritable (ctEvFlavour ev, eq_rel) tv ics
; ics2 <- add_inert_eq ics1 ct
; setInertCans ics2
; unless (isEmptyWorkList kicked_out) $
do { updWorkListTcS (appendWorkList kicked_out)
; csTraceTcS $
hang (ptext (sLit "Kick out, tv =") <+> ppr tv)
2 (vcat [ text "n-kicked =" <+> int (workListSize kicked_out)
, ppr kicked_out ]) }
; traceTcS "addInertEq }" $ empty }
addInertEq ct = pprPanic "addInertEq" (ppr ct)
add_inert_eq :: InertCans -> Ct -> TcS InertCans
add_inert_eq ics@(IC { inert_count = n
, inert_eqs = old_eqs
, inert_model = old_model })
ct@(CTyEqCan { cc_ev = ev, cc_eq_rel = eq_rel, cc_tyvar = tv })
| isDerived ev
= do { emitDerivedShadows ics tv
; return (ics { inert_model = extendVarEnv old_model tv ct }) }
| ReprEq <- eq_rel
= return new_ics
-- Nominal equality (tv ~N ty), Given/Wanted
-- See Note [Emitting shadow constraints]
| modelCanRewrite old_model rw_tvs -- Shadow of new constraint is
= do { emitNewDerivedEq loc pred -- not inert, so emit it
; return new_ics }
| otherwise -- Shadow of new constraint is inert wrt model
-- so extend model, and create shadows it can now rewrite
= do { emitDerivedShadows ics tv
; return (new_ics { inert_model = new_model }) }
where
loc = ctEvLoc ev
pred = ctEvPred ev
rw_tvs = tyVarsOfType pred
new_ics = ics { inert_eqs = addTyEq old_eqs tv ct
, inert_count = bumpUnsolvedCount ev n }
new_model = extendVarEnv old_model tv derived_ct
derived_ct = ct { cc_ev = CtDerived { ctev_loc = loc, ctev_pred = pred } }
add_inert_eq _ ct = pprPanic "addInertEq" (ppr ct)
emitDerivedShadows :: InertCans -> TcTyVar -> TcS ()
emitDerivedShadows IC { inert_eqs = tv_eqs
, inert_dicts = dicts
, inert_safehask = safehask
, inert_funeqs = funeqs
, inert_irreds = irreds
, inert_model = model } new_tv
= mapM_ emit_shadow shadows
where
emit_shadow ct = emitNewDerived loc pred
where
ev = ctEvidence ct
pred = ctEvPred ev
loc = ctEvLoc ev
shadows = foldDicts get_ct dicts $
foldDicts get_ct safehask $
foldFunEqs get_ct funeqs $
foldIrreds get_ct irreds $
foldTyEqs get_ct tv_eqs []
-- Ignore insolubles
get_ct ct cts | want_shadow ct = ct:cts
| otherwise = cts
want_shadow ct
= not (isDerivedCt ct) -- No need for a shadow of a Derived!
&& (new_tv `elemVarSet` rw_tvs) -- New tv can rewrite ct, yielding a
-- different ct
&& not (modelCanRewrite model rw_tvs)-- We have not alrady created a
-- shadow
where
rw_tvs = rewritableTyVars ct
modelCanRewrite :: InertModel -> TcTyVarSet -> Bool
-- See Note [Emitting shadow constraints]
-- True if there is any intersection between dom(model) and tvs
modelCanRewrite model tvs = not (disjointUFM model tvs)
-- The low-level use of disjointUFM might e surprising.
-- InertModel = TyVarEnv Ct, and we want to see if its domain
-- is disjoint from that of a TcTyVarSet. So we drop down
-- to the underlying UniqFM. A bit yukky, but efficient.
rewritableTyVars :: Ct -> TcTyVarSet
-- The tyvars of a Ct that can be rewritten
rewritableTyVars (CFunEqCan { cc_tyargs = tys }) = tyVarsOfTypes tys
rewritableTyVars ct = tyVarsOfType (ctPred ct)
--------------
addInertCan :: Ct -> TcS () -- Constraints *other than* equalities
addInertCan ct
= do { traceTcS "insertInertCan {" $
text "Trying to insert new inert item:" <+> ppr ct
; ics <- getInertCans
; setInertCans (add_item ics ct)
-- Emit shadow derived if necessary
-- See Note [Emitting shadow constraints]
; let ev = ctEvidence ct
pred = ctEvPred ev
rw_tvs = rewritableTyVars ct
; when (not (isDerived ev) && modelCanRewrite (inert_model ics) rw_tvs)
(emitNewDerived (ctEvLoc ev) pred)
; traceTcS "addInertCan }" $ empty }
add_item :: InertCans -> Ct -> InertCans
add_item ics item@(CFunEqCan { cc_fun = tc, cc_tyargs = tys })
= ics { inert_funeqs = insertFunEq (inert_funeqs ics) tc tys item }
add_item ics item@(CIrredEvCan { cc_ev = ev })
= ics { inert_irreds = inert_irreds ics `Bag.snocBag` item
, inert_count = bumpUnsolvedCount ev (inert_count ics) }
-- The 'False' is because the irreducible constraint might later instantiate
-- to an equality.
-- But since we try to simplify first, if there's a constraint function FC with
-- type instance FC Int = Show
-- we'll reduce a constraint (FC Int a) to Show a, and never add an inert irreducible
add_item ics item@(CDictCan { cc_ev = ev, cc_class = cls, cc_tyargs = tys })
= ics { inert_dicts = addDict (inert_dicts ics) cls tys item
, inert_count = bumpUnsolvedCount ev (inert_count ics) }
add_item _ item
= pprPanic "upd_inert set: can't happen! Inserting " $
ppr item -- CTyEqCan is dealt with by addInertEq
-- Can't be CNonCanonical, CHoleCan,
-- because they only land in inert_insols
bumpUnsolvedCount :: CtEvidence -> Int -> Int
bumpUnsolvedCount ev n | isWanted ev = n+1
| otherwise = n
-----------------------------------------
kickOutRewritable :: CtFlavourRole -- Flavour and role of the equality that is
-- being added to the inert set
-> TcTyVar -- The new equality is tv ~ ty
-> InertCans
-> (WorkList, InertCans)
-- NB: Notice that don't kick out constraints from
-- inert_solved_dicts, and inert_solved_funeqs
-- optimistically. But when we lookup we have to
-- take the substitution into account
kickOutRewritable new_fr new_tv ics@(IC { inert_funeqs = funeqmap })
| not (new_fr `eqCanRewriteFR` new_fr)
= if isFlattenTyVar new_tv
then (emptyWorkList { wl_funeqs = feqs_out }, ics { inert_funeqs = feqs_in })
else (emptyWorkList, ics)
-- If new_fr can't rewrite itself, it can't rewrite
-- anything else, so no need to kick out anything.
-- (This is a common case: wanteds can't rewrite wanteds)
--
-- ExCEPT (tiresomely) that we should kick out any CFunEqCans
-- that we should re-examine for their fundeps, even though
-- they can't be *rewrittten*.
-- See Note [Kicking out CFunEqCan for fundeps]
where
(feqs_out, feqs_in) = partitionFunEqs kick_out_fe funeqmap
kick_out_fe :: Ct -> Bool
kick_out_fe (CFunEqCan { cc_fsk = fsk }) = fsk == new_tv
kick_out_fe _ = False -- Can't happen
kickOutRewritable new_fr new_tv (IC { inert_eqs = tv_eqs
, inert_dicts = dictmap
, inert_safehask = safehask
, inert_funeqs = funeqmap
, inert_irreds = irreds
, inert_insols = insols
, inert_count = n
, inert_model = model })
= (kicked_out, inert_cans_in)
where
inert_cans_in = IC { inert_eqs = tv_eqs_in
, inert_dicts = dicts_in
, inert_safehask = safehask -- ??
, inert_funeqs = feqs_in
, inert_irreds = irs_in
, inert_insols = insols_in
, inert_count = n - workListWantedCount kicked_out
, inert_model = model }
-- Leave the model unchanged
kicked_out = WL { wl_eqs = tv_eqs_out
, wl_funeqs = feqs_out
, wl_deriv = []
, wl_rest = bagToList (dicts_out `andCts` irs_out
`andCts` insols_out)
, wl_implics = emptyBag }
(tv_eqs_out, tv_eqs_in) = foldVarEnv kick_out_eqs ([], emptyVarEnv) tv_eqs
(feqs_out, feqs_in) = partitionFunEqs kick_out_fe funeqmap
(dicts_out, dicts_in) = partitionDicts kick_out_ct dictmap
(irs_out, irs_in) = partitionBag kick_out_irred irreds
(insols_out, insols_in) = partitionBag kick_out_ct insols
-- Kick out even insolubles; see Note [Kick out insolubles]
can_rewrite :: CtEvidence -> Bool
can_rewrite = (new_fr `eqCanRewriteFR`) . ctEvFlavourRole
kick_out_ct :: Ct -> Bool
kick_out_ct ct = kick_out_ctev (ctEvidence ct)
kick_out_fe :: Ct -> Bool
kick_out_fe (CFunEqCan { cc_ev = ev, cc_fsk = fsk })
= kick_out_ctev ev || fsk == new_tv
kick_out_fe _ = False -- Can't happen
kick_out_ctev :: CtEvidence -> Bool
kick_out_ctev ev = can_rewrite ev
&& new_tv `elemVarSet` tyVarsOfType (ctEvPred ev)
-- See Note [Kicking out inert constraints]
kick_out_irred :: Ct -> Bool
kick_out_irred ct = can_rewrite (cc_ev ct)
&& new_tv `elemVarSet` closeOverKinds (TcM.tyVarsOfCt ct)
-- See Note [Kicking out Irreds]
kick_out_eqs :: EqualCtList -> ([Ct], TyVarEnv EqualCtList)
-> ([Ct], TyVarEnv EqualCtList)
kick_out_eqs eqs (acc_out, acc_in)
= (eqs_out ++ acc_out, case eqs_in of
[] -> acc_in
(eq1:_) -> extendVarEnv acc_in (cc_tyvar eq1) eqs_in)
where
(eqs_in, eqs_out) = partition keep_eq eqs
-- implements criteria K1-K3 in Note [The inert equalities] in TcFlatten
keep_eq (CTyEqCan { cc_tyvar = tv, cc_rhs = rhs_ty, cc_ev = ev
, cc_eq_rel = eq_rel })
| tv == new_tv
= not (can_rewrite ev) -- (K1)
| otherwise
= check_k2 && check_k3
where
ev_fr = ctEvFlavourRole ev
check_k2 = not (ev_fr `eqCanRewriteFR` ev_fr)
|| not (new_fr `eqCanRewriteFR` ev_fr)
|| (ev_fr `eqCanRewriteFR` new_fr)
|| not (new_tv `elemVarSet` tyVarsOfType rhs_ty)
check_k3
| new_fr `eqCanRewriteFR` ev_fr
= case eq_rel of
NomEq -> not (rhs_ty `eqType` mkTyVarTy new_tv)
ReprEq -> not (isTyVarExposed new_tv rhs_ty)
| otherwise
= True
keep_eq ct = pprPanic "keep_eq" (ppr ct)
kickOutAfterUnification :: TcTyVar -> TcS Int
kickOutAfterUnification new_tv
= do { ics <- getInertCans
; let (kicked_out1, ics1) = kickOutModel new_tv ics
(kicked_out2, ics2) = kickOutRewritable (Given,NomEq) new_tv ics1
-- Given because the tv := xi is given; NomEq because
-- only nominal equalities are solved by unification
kicked_out = appendWorkList kicked_out1 kicked_out2
; setInertCans ics2
; updWorkListTcS (appendWorkList kicked_out)
; unless (isEmptyWorkList kicked_out) $
csTraceTcS $
hang (ptext (sLit "Kick out (unify), tv =") <+> ppr new_tv)
2 (vcat [ text "n-kicked =" <+> int (workListSize kicked_out)
, text "kicked_out =" <+> ppr kicked_out
, text "Residual inerts =" <+> ppr ics2 ])
; return (workListSize kicked_out) }
kickOutModel :: TcTyVar -> InertCans -> (WorkList, InertCans)
kickOutModel new_tv ics@(IC { inert_model = model, inert_eqs = eqs })
= (foldVarEnv add emptyWorkList der_out, ics { inert_model = new_model })
where
(der_out, new_model) = partitionVarEnv kick_out_der model
kick_out_der :: Ct -> Bool
kick_out_der (CTyEqCan { cc_tyvar = tv, cc_rhs = rhs })
= new_tv == tv || new_tv `elemVarSet` tyVarsOfType rhs
kick_out_der _ = False
add :: Ct -> WorkList -> WorkList
-- Don't kick out a Derived if there is a Given or Wanted with
-- the same predicate. The model is just a shadow copy, and the
-- Given/Wanted will serve the purpose.
add (CTyEqCan { cc_ev = ev, cc_tyvar = tv, cc_rhs = rhs }) wl
| not (isInInertEqs eqs tv rhs) = extendWorkListDerived (ctEvLoc ev) ev wl
add _ wl = wl
{- Note [Kicking out inert constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Given a new (a -> ty) inert, we want to kick out an existing inert
constraint if
a) the new constraint can rewrite the inert one
b) 'a' is free in the inert constraint (so that it *will*)
rewrite it if we kick it out.
For (b) we use tyVarsOfCt, which returns the type variables /and
the kind variables/ that are directly visible in the type. Hence we
will have exposed all the rewriting we care about to make the most
precise kinds visible for matching classes etc. No need to kick out
constraints that mention type variables whose kinds contain this
variable! (Except see Note [Kicking out Irreds].)
Note [Kicking out Irreds]
~~~~~~~~~~~~~~~~~~~~~~~~~
There is an awkward special case for Irreds. When we have a
kind-mis-matched equality constraint (a:k1) ~ (ty:k2), we turn it into
an Irred (see Note [Equalities with incompatible kinds] in
TcCanonical). So in this case the free kind variables of k1 and k2
are not visible. More precisely, the type looks like
(~) k1 (a:k1) (ty:k2)
because (~) has kind forall k. k -> k -> Constraint. So the constraint
itself is ill-kinded. We can "see" k1 but not k2. That's why we use
closeOverKinds to make sure we see k2.
This is not pretty. Maybe (~) should have kind
(~) :: forall k1 k1. k1 -> k2 -> Constraint
Note [Kick out insolubles]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have an insoluble alpha ~ [alpha], which is insoluble
because an occurs check. And then we unify alpha := [Int].
Then we really want to rewrite the insoluble to [Int] ~ [[Int]].
Now it can be decomposed. Otherwise we end up with a "Can't match
[Int] ~ [[Int]]" which is true, but a bit confusing because the
outer type constructors match.
-}
--------------
addInertSafehask :: InertCans -> Ct -> InertCans
addInertSafehask ics item@(CDictCan { cc_class = cls, cc_tyargs = tys })
= ics { inert_safehask = addDict (inert_dicts ics) cls tys item }
addInertSafehask _ item
= pprPanic "addInertSafehask: can't happen! Inserting " $ ppr item
insertSafeOverlapFailureTcS :: Ct -> TcS ()
insertSafeOverlapFailureTcS item
= updInertCans (\ics -> addInertSafehask ics item)
getSafeOverlapFailures :: TcS Cts
getSafeOverlapFailures
= do { IC { inert_safehask = safehask } <- getInertCans
; return $ foldDicts consCts safehask emptyCts }
--------------
addSolvedDict :: CtEvidence -> Class -> [Type] -> TcS ()
-- Add a new item in the solved set of the monad
-- See Note [Solved dictionaries]
addSolvedDict item cls tys
| isIPPred (ctEvPred item) -- Never cache "solved" implicit parameters (not sure why!)
= return ()
| otherwise
= do { traceTcS "updSolvedSetTcs:" $ ppr item
; updInertTcS $ \ ics ->
ics { inert_solved_dicts = addDict (inert_solved_dicts ics) cls tys item } }
{- *********************************************************************
* *
Other inert-set operations
* *
********************************************************************* -}
updInertTcS :: (InertSet -> InertSet) -> TcS ()
-- Modify the inert set with the supplied function
updInertTcS upd_fn
= do { is_var <- getTcSInertsRef
; wrapTcS (do { curr_inert <- TcM.readTcRef is_var
; TcM.writeTcRef is_var (upd_fn curr_inert) }) }
getInertCans :: TcS InertCans
getInertCans = do { inerts <- getTcSInerts; return (inert_cans inerts) }
setInertCans :: InertCans -> TcS ()
setInertCans ics = updInertTcS $ \ inerts -> inerts { inert_cans = ics }
takeGivenInsolubles :: TcS Cts
-- See Note [The inert set after solving Givens]
takeGivenInsolubles
= updRetInertCans $ \ cans ->
( inert_insols cans
, cans { inert_insols = emptyBag
, inert_funeqs = filterFunEqs isGivenCt (inert_funeqs cans) } )
{- Note [The inert set after solving Givens]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
After solving the Givens we take two things out of the inert set
a) The insolubles; we return these to report inaccessible code
We return these separately. We don't want to leave them in
the inert set, lest we onfuse them with insolubles arising from
solving wanteds
b) Any Derived CFunEqCans. Derived CTyEqCans are in the
inert_model and do no harm. In contrast, Derived CFunEqCans
get mixed up with the Wanteds later and confuse the
post-solve-wanted unflattening (Trac #10507).
E.g. From [G] 1 <= m, [G] m <= n
We get [D] 1 <= n, and we must remove it!
Otherwise we unflatten it more then once, and assign
to its fmv more than once...disaster.
It's ok to remove them because they turned ont not to
yield an insoluble, and hence have now done their work.
-}
updRetInertCans :: (InertCans -> (a, InertCans)) -> TcS a
-- Modify the inert set with the supplied function
updRetInertCans upd_fn
= do { is_var <- getTcSInertsRef
; wrapTcS (do { inerts <- TcM.readTcRef is_var
; let (res, cans') = upd_fn (inert_cans inerts)
; TcM.writeTcRef is_var (inerts { inert_cans = cans' })
; return res }) }
updInertCans :: (InertCans -> InertCans) -> TcS ()
-- Modify the inert set with the supplied function
updInertCans upd_fn
= updInertTcS $ \ inerts -> inerts { inert_cans = upd_fn (inert_cans inerts) }
updInertDicts :: (DictMap Ct -> DictMap Ct) -> TcS ()
-- Modify the inert set with the supplied function
updInertDicts upd_fn
= updInertCans $ \ ics -> ics { inert_dicts = upd_fn (inert_dicts ics) }
updInertSafehask :: (DictMap Ct -> DictMap Ct) -> TcS ()
-- Modify the inert set with the supplied function
updInertSafehask upd_fn
= updInertCans $ \ ics -> ics { inert_safehask = upd_fn (inert_safehask ics) }
updInertFunEqs :: (FunEqMap Ct -> FunEqMap Ct) -> TcS ()
-- Modify the inert set with the supplied function
updInertFunEqs upd_fn
= updInertCans $ \ ics -> ics { inert_funeqs = upd_fn (inert_funeqs ics) }
updInertIrreds :: (Cts -> Cts) -> TcS ()
-- Modify the inert set with the supplied function
updInertIrreds upd_fn
= updInertCans $ \ ics -> ics { inert_irreds = upd_fn (inert_irreds ics) }
getInertEqs :: TcS (TyVarEnv EqualCtList)
getInertEqs = do { inert <- getInertCans; return (inert_eqs inert) }
getInertModel :: TcS InertModel
getInertModel = do { inert <- getInertCans; return (inert_model inert) }
getInertGivens :: TcS [Ct]
-- Returns the Given constraints in the inert set,
-- with type functions *not* unflattened
getInertGivens
= do { inerts <- getInertCans
; let all_cts = foldDicts (:) (inert_dicts inerts)
$ foldFunEqs (:) (inert_funeqs inerts)
$ concat (varEnvElts (inert_eqs inerts))
; return (filter isGivenCt all_cts) }
getUnsolvedInerts :: TcS ( Bag Implication
, Cts -- Tyvar eqs: a ~ ty
, Cts -- Fun eqs: F a ~ ty
, Cts -- Insoluble
, Cts ) -- All others
-- Post-condition: the returned simple constraints are all fully zonked
-- (because they come from the inert set)
-- the unsolved implics may not be
getUnsolvedInerts
= do { IC { inert_eqs = tv_eqs
, inert_funeqs = fun_eqs
, inert_irreds = irreds
, inert_dicts = idicts
, inert_insols = insols
, inert_model = model } <- getInertCans
; let der_tv_eqs = foldVarEnv (add_der tv_eqs) emptyCts model -- Want to float these
unsolved_tv_eqs = foldTyEqs add_if_unsolved tv_eqs der_tv_eqs
unsolved_fun_eqs = foldFunEqs add_if_unsolved fun_eqs emptyCts
unsolved_irreds = Bag.filterBag is_unsolved irreds
unsolved_dicts = foldDicts add_if_unsolved idicts emptyCts
others = unsolved_irreds `unionBags` unsolved_dicts
; implics <- getWorkListImplics
; traceTcS "getUnsolvedInerts" $
vcat [ text " tv eqs =" <+> ppr unsolved_tv_eqs
, text "fun eqs =" <+> ppr unsolved_fun_eqs
, text "insols =" <+> ppr insols
, text "others =" <+> ppr others
, text "implics =" <+> ppr implics ]
; return ( implics, unsolved_tv_eqs, unsolved_fun_eqs, insols, others) }
-- Keep even the given insolubles
-- so that we can report dead GADT pattern match branches
where
add_der tv_eqs ct cts
| CTyEqCan { cc_tyvar = tv, cc_rhs = rhs } <- ct
, not (isInInertEqs tv_eqs tv rhs) = ct `consBag` cts
| otherwise = cts
add_if_unsolved :: Ct -> Cts -> Cts
add_if_unsolved ct cts | is_unsolved ct = ct `consCts` cts
| otherwise = cts
is_unsolved ct = not (isGivenCt ct) -- Wanted or Derived
isInInertEqs :: TyVarEnv EqualCtList -> TcTyVar -> TcType -> Bool
-- True if (a ~N ty) is in the inert set, in either Given or Wanted
isInInertEqs eqs tv rhs
= case lookupVarEnv eqs tv of
Nothing -> False
Just cts -> any (same_pred rhs) cts
where
same_pred rhs ct
| CTyEqCan { cc_rhs = rhs2, cc_eq_rel = eq_rel } <- ct
, NomEq <- eq_rel
, rhs `eqType` rhs2 = True
| otherwise = False
getNoGivenEqs :: TcLevel -- TcLevel of this implication
-> [TcTyVar] -- Skolems of this implication
-> TcS Bool -- True <=> definitely no residual given equalities
-- See Note [When does an implication have given equalities?]
getNoGivenEqs tclvl skol_tvs
= do { inerts@(IC { inert_eqs = ieqs, inert_irreds = iirreds, inert_funeqs = funeqs })
<- getInertCans
; let local_fsks = foldFunEqs add_fsk funeqs emptyVarSet
has_given_eqs = foldrBag ((||) . ev_given_here . ctEvidence) False iirreds
|| foldVarEnv ((||) . eqs_given_here local_fsks) False ieqs
; traceTcS "getNoGivenEqs" (vcat [ppr has_given_eqs, ppr inerts])
; return (not has_given_eqs) }
where
eqs_given_here :: VarSet -> EqualCtList -> Bool
eqs_given_here local_fsks [CTyEqCan { cc_tyvar = tv, cc_ev = ev }]
-- Givens are always a sigleton
= not (skolem_bound_here local_fsks tv) && ev_given_here ev
eqs_given_here _ _ = False
ev_given_here :: CtEvidence -> Bool
-- True for a Given bound by the curent implication,
-- i.e. the current level
ev_given_here ev
= isGiven ev
&& tclvl == ctLocLevel (ctEvLoc ev)
add_fsk :: Ct -> VarSet -> VarSet
add_fsk ct fsks | CFunEqCan { cc_fsk = tv, cc_ev = ev } <- ct
, isGiven ev = extendVarSet fsks tv
| otherwise = fsks
skol_tv_set = mkVarSet skol_tvs
skolem_bound_here local_fsks tv -- See Note [Let-bound skolems]
= case tcTyVarDetails tv of
SkolemTv {} -> tv `elemVarSet` skol_tv_set
FlatSkol {} -> not (tv `elemVarSet` local_fsks)
_ -> False
-- | Returns Given constraints that might,
-- potentially, match the given pred. This is used when checking to see if a
-- Given might overlap with an instance. See Note [Instance and Given overlap]
-- in TcInteract.
matchableGivens :: CtLoc -> PredType -> InertSet -> Cts
matchableGivens loc_w pred (IS { inert_cans = inert_cans })
= filterBag matchable_given all_relevant_givens
where
-- just look in class constraints and irreds. matchableGivens does get called
-- for ~R constraints, but we don't need to look through equalities, because
-- canonical equalities are used for rewriting. We'll only get caught by
-- non-canonical -- that is, irreducible -- equalities.
all_relevant_givens :: Cts
all_relevant_givens
| Just (clas, _) <- getClassPredTys_maybe pred
= findDictsByClass (inert_dicts inert_cans) clas
`unionBags` inert_irreds inert_cans
| otherwise
= inert_irreds inert_cans
matchable_given :: Ct -> Bool
matchable_given ct
| CtGiven { ctev_loc = loc_g } <- ctev
, Just _ <- tcUnifyTys bind_meta_tv [ctEvPred ctev] [pred]
, not (prohibitedSuperClassSolve loc_g loc_w)
= True
| otherwise
= False
where
ctev = cc_ev ct
bind_meta_tv :: TcTyVar -> BindFlag
-- Any meta tyvar may be unified later, so we treat it as
-- bindable when unifying with givens. That ensures that we
-- conservatively assume that a meta tyvar might get unified with
-- something that matches the 'given', until demonstrated
-- otherwise.
bind_meta_tv tv | isMetaTyVar tv = BindMe
| otherwise = Skolem
prohibitedSuperClassSolve :: CtLoc -> CtLoc -> Bool
-- See Note [Solving superclass constraints] in TcInstDcls
prohibitedSuperClassSolve from_loc solve_loc
| GivenOrigin (InstSC given_size) <- ctLocOrigin from_loc
, ScOrigin wanted_size <- ctLocOrigin solve_loc
= given_size >= wanted_size
| otherwise
= False
{-
Note [When does an implication have given equalities?]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider an implication
beta => alpha ~ Int
where beta is a unification variable that has already been unified
to () in an outer scope. Then we can float the (alpha ~ Int) out
just fine. So when deciding whether the givens contain an equality,
we should canonicalise first, rather than just looking at the original
givens (Trac #8644).
So we simply look at the inert, canonical Givens and see if there are
any equalities among them, the calculation of has_given_eqs. There
are some wrinkles:
* We must know which ones are bound in *this* implication and which
are bound further out. We can find that out from the TcLevel
of the Given, which is itself recorded in the tcl_tclvl field
of the TcLclEnv stored in the Given (ev_given_here).
What about interactions between inner and outer givens?
- Outer given is rewritten by an inner given, then there must
have been an inner given equality, hence the “given-eq” flag
will be true anyway.
- Inner given rewritten by outer, retains its level (ie. The inner one)
* We must take account of *potential* equalities, like the one above:
beta => ...blah...
If we still don't know what beta is, we conservatively treat it as potentially
becoming an equality. Hence including 'irreds' in the calculation or has_given_eqs.
* When flattening givens, we generate Given equalities like
<F [a]> : F [a] ~ f,
with Refl evidence, and we *don't* want those to count as an equality
in the givens! After all, the entire flattening business is just an
internal matter, and the evidence does not mention any of the 'givens'
of this implication. So we do not treat inert_funeqs as a 'given equality'.
* See Note [Let-bound skolems] for another wrinkle
* We do *not* need to worry about representational equalities, because
these do not affect the ability to float constraints.
Note [Let-bound skolems]
~~~~~~~~~~~~~~~~~~~~~~~~
If * the inert set contains a canonical Given CTyEqCan (a ~ ty)
and * 'a' is a skolem bound in this very implication, b
then:
a) The Given is pretty much a let-binding, like
f :: (a ~ b->c) => a -> a
Here the equality constraint is like saying
let a = b->c in ...
It is not adding any new, local equality information,
and hence can be ignored by has_given_eqs
b) 'a' will have been completely substituted out in the inert set,
so we can safely discard it. Notably, it doesn't need to be
returned as part of 'fsks'
For an example, see Trac #9211.
-}
removeInertCts :: [Ct] -> InertCans -> InertCans
-- ^ Remove inert constraints from the 'InertCans', for use when a
-- typechecker plugin wishes to discard a given.
removeInertCts cts icans = foldl' removeInertCt icans cts
removeInertCt :: InertCans -> Ct -> InertCans
removeInertCt is ct =
case ct of
CDictCan { cc_class = cl, cc_tyargs = tys } ->
is { inert_dicts = delDict (inert_dicts is) cl tys }
CFunEqCan { cc_fun = tf, cc_tyargs = tys } ->
is { inert_funeqs = delFunEq (inert_funeqs is) tf tys }
CTyEqCan { cc_tyvar = x, cc_rhs = ty } ->
is { inert_eqs = delTyEq (inert_eqs is) x ty }
CIrredEvCan {} -> panic "removeInertCt: CIrredEvCan"
CNonCanonical {} -> panic "removeInertCt: CNonCanonical"
CHoleCan {} -> panic "removeInertCt: CHoleCan"
lookupFlatCache :: TyCon -> [Type] -> TcS (Maybe (TcCoercion, TcType, CtFlavour))
lookupFlatCache fam_tc tys
= do { IS { inert_flat_cache = flat_cache
, inert_cans = IC { inert_funeqs = inert_funeqs } } <- getTcSInerts
; return (firstJusts [lookup_inerts inert_funeqs,
lookup_flats flat_cache]) }
where
lookup_inerts inert_funeqs
| Just (CFunEqCan { cc_ev = ctev, cc_fsk = fsk })
<- findFunEqs inert_funeqs fam_tc tys
= Just (ctEvCoercion ctev, mkTyVarTy fsk, ctEvFlavour ctev)
| otherwise = Nothing
lookup_flats flat_cache = findFunEq flat_cache fam_tc tys
lookupInInerts :: TcPredType -> TcS (Maybe CtEvidence)
-- Is this exact predicate type cached in the solved or canonicals of the InertSet?
lookupInInerts pty
| ClassPred cls tys <- classifyPredType pty
= do { inerts <- getTcSInerts
; return (lookupSolvedDict inerts cls tys `mplus`
lookupInertDict (inert_cans inerts) cls tys) }
| otherwise -- NB: No caching for equalities, IPs, holes, or errors
= return Nothing
lookupInertDict :: InertCans -> Class -> [Type] -> Maybe CtEvidence
lookupInertDict (IC { inert_dicts = dicts }) cls tys
= case findDict dicts cls tys of
Just ct -> Just (ctEvidence ct)
_ -> Nothing
lookupSolvedDict :: InertSet -> Class -> [Type] -> Maybe CtEvidence
-- Returns just if exactly this predicate type exists in the solved.
lookupSolvedDict (IS { inert_solved_dicts = solved }) cls tys
= case findDict solved cls tys of
Just ev -> Just ev
_ -> Nothing
{- *********************************************************************
* *
Irreds
* *
********************************************************************* -}
foldIrreds :: (Ct -> b -> b) -> Cts -> b -> b
foldIrreds k irreds z = foldrBag k z irreds
{- *********************************************************************
* *
Type equalities
* *
********************************************************************* -}
type EqualCtList = [Ct]
{- Note [EqualCtList invariants]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* All are equalities
* All these equalities have the same LHS
* The list is never empty
* No element of the list can rewrite any other
From the fourth invariant it follows that the list is
- A single Given, or
- Any number of Wanteds and/or Deriveds
-}
addTyEq :: TyVarEnv EqualCtList -> TcTyVar -> Ct -> TyVarEnv EqualCtList
addTyEq old_list tv it = extendVarEnv_C (\old_eqs _new_eqs -> it : old_eqs)
old_list tv [it]
foldTyEqs :: (Ct -> b -> b) -> TyVarEnv EqualCtList -> b -> b
foldTyEqs k eqs z
= foldVarEnv (\cts z -> foldr k z cts) z eqs
findTyEqs :: InertCans -> TyVar -> EqualCtList
findTyEqs icans tv = lookupVarEnv (inert_eqs icans) tv `orElse` []
delTyEq :: TyVarEnv EqualCtList -> TcTyVar -> TcType -> TyVarEnv EqualCtList
delTyEq m tv t = modifyVarEnv (filter (not . isThisOne)) m tv
where isThisOne (CTyEqCan { cc_rhs = t1 }) = eqType t t1
isThisOne _ = False
{- *********************************************************************
* *
TcAppMap
* *
********************************************************************* -}
type TcAppMap a = UniqFM (ListMap TypeMap a)
-- Indexed by tycon then the arg types
-- Used for types and classes; hence UniqFM
isEmptyTcAppMap :: TcAppMap a -> Bool
isEmptyTcAppMap m = isNullUFM m
emptyTcAppMap :: TcAppMap a
emptyTcAppMap = emptyUFM
findTcApp :: TcAppMap a -> Unique -> [Type] -> Maybe a
findTcApp m u tys = do { tys_map <- lookupUFM m u
; lookupTM tys tys_map }
delTcApp :: TcAppMap a -> Unique -> [Type] -> TcAppMap a
delTcApp m cls tys = adjustUFM (deleteTM tys) m cls
insertTcApp :: TcAppMap a -> Unique -> [Type] -> a -> TcAppMap a
insertTcApp m cls tys ct = alterUFM alter_tm m cls
where
alter_tm mb_tm = Just (insertTM tys ct (mb_tm `orElse` emptyTM))
-- mapTcApp :: (a->b) -> TcAppMap a -> TcAppMap b
-- mapTcApp f = mapUFM (mapTM f)
filterTcAppMap :: (Ct -> Bool) -> TcAppMap Ct -> TcAppMap Ct
filterTcAppMap f m
= mapUFM do_tm m
where
do_tm tm = foldTM insert_mb tm emptyTM
insert_mb ct tm
| f ct = insertTM tys ct tm
| otherwise = tm
where
tys = case ct of
CFunEqCan { cc_tyargs = tys } -> tys
CDictCan { cc_tyargs = tys } -> tys
_ -> pprPanic "filterTcAppMap" (ppr ct)
tcAppMapToBag :: TcAppMap a -> Bag a
tcAppMapToBag m = foldTcAppMap consBag m emptyBag
foldTcAppMap :: (a -> b -> b) -> TcAppMap a -> b -> b
foldTcAppMap k m z = foldUFM (foldTM k) z m
{- *********************************************************************
* *
DictMap
* *
********************************************************************* -}
type DictMap a = TcAppMap a
emptyDictMap :: DictMap a
emptyDictMap = emptyTcAppMap
-- sizeDictMap :: DictMap a -> Int
-- sizeDictMap m = foldDicts (\ _ x -> x+1) m 0
findDict :: DictMap a -> Class -> [Type] -> Maybe a
findDict m cls tys = findTcApp m (getUnique cls) tys
findDictsByClass :: DictMap a -> Class -> Bag a
findDictsByClass m cls
| Just tm <- lookupUFM m cls = foldTM consBag tm emptyBag
| otherwise = emptyBag
delDict :: DictMap a -> Class -> [Type] -> DictMap a
delDict m cls tys = delTcApp m (getUnique cls) tys
addDict :: DictMap a -> Class -> [Type] -> a -> DictMap a
addDict m cls tys item = insertTcApp m (getUnique cls) tys item
addDictsByClass :: DictMap Ct -> Class -> Bag Ct -> DictMap Ct
addDictsByClass m cls items
= addToUFM m cls (foldrBag add emptyTM items)
where
add ct@(CDictCan { cc_tyargs = tys }) tm = insertTM tys ct tm
add ct _ = pprPanic "addDictsByClass" (ppr ct)
filterDicts :: (Ct -> Bool) -> DictMap Ct -> DictMap Ct
filterDicts f m = filterTcAppMap f m
partitionDicts :: (Ct -> Bool) -> DictMap Ct -> (Bag Ct, DictMap Ct)
partitionDicts f m = foldTcAppMap k m (emptyBag, emptyDicts)
where
k ct (yeses, noes) | f ct = (ct `consBag` yeses, noes)
| otherwise = (yeses, add ct noes)
add ct@(CDictCan { cc_class = cls, cc_tyargs = tys }) m
= addDict m cls tys ct
add ct _ = pprPanic "partitionDicts" (ppr ct)
dictsToBag :: DictMap a -> Bag a
dictsToBag = tcAppMapToBag
foldDicts :: (a -> b -> b) -> DictMap a -> b -> b
foldDicts = foldTcAppMap
emptyDicts :: DictMap a
emptyDicts = emptyTcAppMap
{- *********************************************************************
* *
FunEqMap
* *
********************************************************************* -}
type FunEqMap a = TcAppMap a -- A map whose key is a (TyCon, [Type]) pair
emptyFunEqs :: TcAppMap a
emptyFunEqs = emptyTcAppMap
sizeFunEqMap :: FunEqMap a -> Int
sizeFunEqMap m = foldFunEqs (\ _ x -> x+1) m 0
findFunEq :: FunEqMap a -> TyCon -> [Type] -> Maybe a
findFunEq m tc tys = findTcApp m (getUnique tc) tys
findFunEqs :: FunEqMap a -> TyCon -> [Type] -> Maybe a
findFunEqs m tc tys = findTcApp m (getUnique tc) tys
funEqsToBag :: FunEqMap a -> Bag a
funEqsToBag m = foldTcAppMap consBag m emptyBag
findFunEqsByTyCon :: FunEqMap a -> TyCon -> [a]
-- Get inert function equation constraints that have the given tycon
-- in their head. Not that the constraints remain in the inert set.
-- We use this to check for derived interactions with built-in type-function
-- constructors.
findFunEqsByTyCon m tc
| Just tm <- lookupUFM m tc = foldTM (:) tm []
| otherwise = []
foldFunEqs :: (a -> b -> b) -> FunEqMap a -> b -> b
foldFunEqs = foldTcAppMap
-- mapFunEqs :: (a -> b) -> FunEqMap a -> FunEqMap b
-- mapFunEqs = mapTcApp
filterFunEqs :: (Ct -> Bool) -> FunEqMap Ct -> FunEqMap Ct
filterFunEqs = filterTcAppMap
insertFunEq :: FunEqMap a -> TyCon -> [Type] -> a -> FunEqMap a
insertFunEq m tc tys val = insertTcApp m (getUnique tc) tys val
-- insertFunEqCt :: FunEqMap Ct -> Ct -> FunEqMap Ct
-- insertFunEqCt m ct@(CFunEqCan { cc_fun = tc, cc_tyargs = tys })
-- = insertFunEq m tc tys ct
-- insertFunEqCt _ ct = pprPanic "insertFunEqCt" (ppr ct)
partitionFunEqs :: (Ct -> Bool) -> FunEqMap Ct -> ([Ct], FunEqMap Ct)
-- Optimise for the case where the predicate is false
-- partitionFunEqs is called only from kick-out, and kick-out usually
-- kicks out very few equalities, so we want to optimise for that case
partitionFunEqs f m = (yeses, foldr del m yeses)
where
yeses = foldTcAppMap k m []
k ct yeses | f ct = ct : yeses
| otherwise = yeses
del (CFunEqCan { cc_fun = tc, cc_tyargs = tys }) m
= delFunEq m tc tys
del ct _ = pprPanic "partitionFunEqs" (ppr ct)
delFunEq :: FunEqMap a -> TyCon -> [Type] -> FunEqMap a
delFunEq m tc tys = delTcApp m (getUnique tc) tys
{-
************************************************************************
* *
* The TcS solver monad *
* *
************************************************************************
Note [The TcS monad]
~~~~~~~~~~~~~~~~~~~~
The TcS monad is a weak form of the main Tc monad
All you can do is
* fail
* allocate new variables
* fill in evidence variables
Filling in a dictionary evidence variable means to create a binding
for it, so TcS carries a mutable location where the binding can be
added. This is initialised from the innermost implication constraint.
-}
data TcSEnv
= TcSEnv {
tcs_ev_binds :: EvBindsVar,
tcs_unified :: IORef Int,
-- The number of unification variables we have filled
-- The important thing is whether it is non-zero
tcs_count :: IORef Int, -- Global step count
tcs_inerts :: IORef InertSet, -- Current inert set
-- The main work-list and the flattening worklist
-- See Note [Work list priorities] and
tcs_worklist :: IORef WorkList -- Current worklist
}
---------------
newtype TcS a = TcS { unTcS :: TcSEnv -> TcM a }
instance Functor TcS where
fmap f m = TcS $ fmap f . unTcS m
instance Applicative TcS where
pure = return
(<*>) = ap
instance Monad TcS where
return x = TcS (\_ -> return x)
fail err = TcS (\_ -> fail err)
m >>= k = TcS (\ebs -> unTcS m ebs >>= \r -> unTcS (k r) ebs)
instance MonadUnique TcS where
getUniqueSupplyM = wrapTcS getUniqueSupplyM
-- Basic functionality
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
wrapTcS :: TcM a -> TcS a
-- Do not export wrapTcS, because it promotes an arbitrary TcM to TcS,
-- and TcS is supposed to have limited functionality
wrapTcS = TcS . const -- a TcM action will not use the TcEvBinds
wrapErrTcS :: TcM a -> TcS a
-- The thing wrapped should just fail
-- There's no static check; it's up to the user
-- Having a variant for each error message is too painful
wrapErrTcS = wrapTcS
wrapWarnTcS :: TcM a -> TcS a
-- The thing wrapped should just add a warning, or no-op
-- There's no static check; it's up to the user
wrapWarnTcS = wrapTcS
failTcS, panicTcS :: SDoc -> TcS a
failTcS = wrapTcS . TcM.failWith
panicTcS doc = pprPanic "TcCanonical" doc
traceTcS :: String -> SDoc -> TcS ()
traceTcS herald doc = wrapTcS (TcM.traceTc herald doc)
runTcPluginTcS :: TcPluginM a -> TcS a
runTcPluginTcS m = wrapTcS . runTcPluginM m . Just =<< getTcEvBinds
instance HasDynFlags TcS where
getDynFlags = wrapTcS getDynFlags
getGlobalRdrEnvTcS :: TcS GlobalRdrEnv
getGlobalRdrEnvTcS = wrapTcS TcM.getGlobalRdrEnv
bumpStepCountTcS :: TcS ()
bumpStepCountTcS = TcS $ \env -> do { let ref = tcs_count env
; n <- TcM.readTcRef ref
; TcM.writeTcRef ref (n+1) }
csTraceTcS :: SDoc -> TcS ()
csTraceTcS doc
= wrapTcS $ csTraceTcM 1 (return doc)
traceFireTcS :: CtEvidence -> SDoc -> TcS ()
-- Dump a rule-firing trace
traceFireTcS ev doc
= TcS $ \env -> csTraceTcM 1 $
do { n <- TcM.readTcRef (tcs_count env)
; tclvl <- TcM.getTcLevel
; return (hang (int n <> brackets (ptext (sLit "U:") <> ppr tclvl
<> ppr (ctLocDepth (ctEvLoc ev)))
<+> doc <> colon)
4 (ppr ev)) }
csTraceTcM :: Int -> TcM SDoc -> TcM ()
-- Constraint-solver tracing, -ddump-cs-trace
csTraceTcM trace_level mk_doc
= do { dflags <- getDynFlags
; when ( (dopt Opt_D_dump_cs_trace dflags || dopt Opt_D_dump_tc_trace dflags)
&& trace_level <= traceLevel dflags ) $
do { msg <- mk_doc
; TcM.traceTcRn Opt_D_dump_cs_trace msg } }
runTcS :: TcS a -- What to run
-> TcM (a, Bag EvBind)
runTcS tcs
= do { ev_binds_var <- TcM.newTcEvBinds
; res <- runTcSWithEvBinds ev_binds_var tcs
; ev_binds <- TcM.getTcEvBinds ev_binds_var
; return (res, ev_binds) }
runTcSWithEvBinds :: EvBindsVar
-> TcS a
-> TcM a
runTcSWithEvBinds ev_binds_var tcs
= do { unified_var <- TcM.newTcRef 0
; step_count <- TcM.newTcRef 0
; inert_var <- TcM.newTcRef is
; wl_var <- TcM.newTcRef emptyWorkList
; let env = TcSEnv { tcs_ev_binds = ev_binds_var
, tcs_unified = unified_var
, tcs_count = step_count
, tcs_inerts = inert_var
, tcs_worklist = wl_var }
-- Run the computation
; res <- unTcS tcs env
; count <- TcM.readTcRef step_count
; when (count > 0) $
csTraceTcM 0 $ return (ptext (sLit "Constraint solver steps =") <+> int count)
#ifdef DEBUG
; ev_binds <- TcM.getTcEvBinds ev_binds_var
; checkForCyclicBinds ev_binds
#endif
; return res }
where
is = emptyInert
#ifdef DEBUG
checkForCyclicBinds :: Bag EvBind -> TcM ()
checkForCyclicBinds ev_binds
| null cycles
= return ()
| null coercion_cycles
= TcM.traceTc "Cycle in evidence binds" $ ppr cycles
| otherwise
= pprPanic "Cycle in coercion bindings" $ ppr coercion_cycles
where
cycles :: [[EvBind]]
cycles = [c | CyclicSCC c <- stronglyConnCompFromEdgedVertices edges]
coercion_cycles = [c | c <- cycles, any is_co_bind c]
is_co_bind (EvBind { eb_lhs = b }) = isEqVar b
edges :: [(EvBind, EvVar, [EvVar])]
edges = [(bind, bndr, varSetElems (evVarsOfTerm rhs))
| bind@(EvBind { eb_lhs = bndr, eb_rhs = rhs }) <- bagToList ev_binds]
#endif
nestImplicTcS :: EvBindsVar -> TcLevel -> TcS a -> TcS a
nestImplicTcS ref inner_tclvl (TcS thing_inside)
= TcS $ \ TcSEnv { tcs_unified = unified_var
, tcs_inerts = old_inert_var
, tcs_count = count } ->
do { inerts <- TcM.readTcRef old_inert_var
; let nest_inert = inerts { inert_flat_cache = emptyFunEqs }
-- See Note [Do not inherit the flat cache]
; new_inert_var <- TcM.newTcRef nest_inert
; new_wl_var <- TcM.newTcRef emptyWorkList
; let nest_env = TcSEnv { tcs_ev_binds = ref
, tcs_unified = unified_var
, tcs_count = count
, tcs_inerts = new_inert_var
, tcs_worklist = new_wl_var }
; res <- TcM.setTcLevel inner_tclvl $
thing_inside nest_env
#ifdef DEBUG
-- Perform a check that the thing_inside did not cause cycles
; ev_binds <- TcM.getTcEvBinds ref
; checkForCyclicBinds ev_binds
#endif
; return res }
{- Note [Do not inherit the flat cache]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We do not want to inherit the flat cache when processing nested
implications. Consider
a ~ F b, forall c. b~Int => blah
If we have F b ~ fsk in the flat-cache, and we push that into the
nested implication, we might miss that F b can be rewritten to F Int,
and hence perhpas solve it. Moreover, the fsk from outside is
flattened out after solving the outer level, but and we don't
do that flattening recursively.
-}
recoverTcS :: TcS a -> TcS a -> TcS a
recoverTcS (TcS recovery_code) (TcS thing_inside)
= TcS $ \ env ->
TcM.recoverM (recovery_code env) (thing_inside env)
nestTcS :: TcS a -> TcS a
-- Use the current untouchables, augmenting the current
-- evidence bindings, and solved dictionaries
-- But have no effect on the InertCans, or on the inert_flat_cache
-- (the latter because the thing inside a nestTcS does unflattening)
nestTcS (TcS thing_inside)
= TcS $ \ env@(TcSEnv { tcs_inerts = inerts_var }) ->
do { inerts <- TcM.readTcRef inerts_var
; new_inert_var <- TcM.newTcRef inerts
; new_wl_var <- TcM.newTcRef emptyWorkList
; let nest_env = env { tcs_inerts = new_inert_var
, tcs_worklist = new_wl_var }
; res <- thing_inside nest_env
; new_inerts <- TcM.readTcRef new_inert_var
-- we want to propogate the safe haskell failures
; let old_ic = inert_cans inerts
new_ic = inert_cans new_inerts
nxt_ic = old_ic { inert_safehask = inert_safehask new_ic }
; TcM.writeTcRef inerts_var -- See Note [Propagate the solved dictionaries]
(inerts { inert_solved_dicts = inert_solved_dicts new_inerts
, inert_cans = nxt_ic })
; return res }
tryTcS :: TcS a -> TcS a
-- Like runTcS, but from within the TcS monad
-- Completely fresh inerts and worklist, be careful!
-- Moreover, we will simply throw away all the evidence generated.
tryTcS (TcS thing_inside)
= TcS $ \env ->
do { is_var <- TcM.newTcRef emptyInert
; unified_var <- TcM.newTcRef 0
; ev_binds_var <- TcM.newTcEvBinds
; wl_var <- TcM.newTcRef emptyWorkList
; let nest_env = env { tcs_ev_binds = ev_binds_var
, tcs_unified = unified_var
, tcs_inerts = is_var
, tcs_worklist = wl_var }
; thing_inside nest_env }
{-
Note [Propagate the solved dictionaries]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's really quite important that nestTcS does not discard the solved
dictionaries from the thing_inside.
Consider
Eq [a]
forall b. empty => Eq [a]
We solve the simple (Eq [a]), under nestTcS, and then turn our attention to
the implications. It's definitely fine to use the solved dictionaries on
the inner implications, and it can make a signficant performance difference
if you do so.
-}
-- Getters and setters of TcEnv fields
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Getter of inerts and worklist
getTcSInertsRef :: TcS (IORef InertSet)
getTcSInertsRef = TcS (return . tcs_inerts)
getTcSWorkListRef :: TcS (IORef WorkList)
getTcSWorkListRef = TcS (return . tcs_worklist)
getTcSInerts :: TcS InertSet
getTcSInerts = getTcSInertsRef >>= wrapTcS . (TcM.readTcRef)
setTcSInerts :: InertSet -> TcS ()
setTcSInerts ics = do { r <- getTcSInertsRef; wrapTcS (TcM.writeTcRef r ics) }
getWorkListImplics :: TcS (Bag Implication)
getWorkListImplics
= do { wl_var <- getTcSWorkListRef
; wl_curr <- wrapTcS (TcM.readTcRef wl_var)
; return (wl_implics wl_curr) }
updWorkListTcS :: (WorkList -> WorkList) -> TcS ()
updWorkListTcS f
= do { wl_var <- getTcSWorkListRef
; wl_curr <- wrapTcS (TcM.readTcRef wl_var)
; let new_work = f wl_curr
; wrapTcS (TcM.writeTcRef wl_var new_work) }
emitWorkNC :: [CtEvidence] -> TcS ()
emitWorkNC evs
| null evs
= return ()
| otherwise
= do { traceTcS "Emitting fresh work" (vcat (map ppr evs))
; updWorkListTcS (extendWorkListCts (map mkNonCanonical evs)) }
emitWorkCt :: Ct -> TcS ()
emitWorkCt ct
= do { traceTcS "Emitting fresh (canonical) work" (ppr ct)
; updWorkListTcS (extendWorkListCt ct) }
emitInsoluble :: Ct -> TcS ()
-- Emits a non-canonical constraint that will stand for a frozen error in the inerts.
emitInsoluble ct
= do { traceTcS "Emit insoluble" (ppr ct $$ pprCtLoc (ctLoc ct))
; updInertTcS add_insol }
where
this_pred = ctPred ct
add_insol is@(IS { inert_cans = ics@(IC { inert_insols = old_insols }) })
| already_there = is
| otherwise = is { inert_cans = ics { inert_insols = old_insols `snocCts` ct } }
where
already_there = not (isWantedCt ct) && anyBag (tcEqType this_pred . ctPred) old_insols
-- See Note [Do not add duplicate derived insolubles]
newTcRef :: a -> TcS (TcRef a)
newTcRef x = wrapTcS (TcM.newTcRef x)
readTcRef :: TcRef a -> TcS a
readTcRef ref = wrapTcS (TcM.readTcRef ref)
updTcRef :: TcRef a -> (a->a) -> TcS ()
updTcRef ref upd_fn = wrapTcS (TcM.updTcRef ref upd_fn)
getTcEvBinds :: TcS EvBindsVar
getTcEvBinds = TcS (return . tcs_ev_binds)
getTcLevel :: TcS TcLevel
getTcLevel = wrapTcS TcM.getTcLevel
getTcEvBindsMap :: TcS EvBindMap
getTcEvBindsMap
= do { EvBindsVar ev_ref _ <- getTcEvBinds
; wrapTcS $ TcM.readTcRef ev_ref }
unifyTyVar :: TcTyVar -> TcType -> TcS ()
-- Unify a meta-tyvar with a type
-- We keep track of how many unifications have happened in tcs_unified,
--
-- We should never unify the same variable twice!
unifyTyVar tv ty
= ASSERT2( isMetaTyVar tv, ppr tv )
TcS $ \ env ->
do { TcM.traceTc "unifyTyVar" (ppr tv <+> text ":=" <+> ppr ty)
; TcM.writeMetaTyVar tv ty
; TcM.updTcRef (tcs_unified env) (+ 1) }
unflattenFmv :: TcTyVar -> TcType -> TcS ()
-- Fill a flatten-meta-var, simply by unifying it.
-- This does NOT count as a unification in tcs_unified.
unflattenFmv tv ty
= ASSERT2( isMetaTyVar tv, ppr tv )
TcS $ \ _ ->
do { TcM.traceTc "unflattenFmv" (ppr tv <+> text ":=" <+> ppr ty)
; TcM.writeMetaTyVar tv ty }
reportUnifications :: TcS a -> TcS (Int, a)
reportUnifications (TcS thing_inside)
= TcS $ \ env ->
do { inner_unified <- TcM.newTcRef 0
; res <- thing_inside (env { tcs_unified = inner_unified })
; n_unifs <- TcM.readTcRef inner_unified
; TcM.updTcRef (tcs_unified env) (+ n_unifs) -- Inner unifications affect
; return (n_unifs, res) } -- the outer scope too
getDefaultInfo :: TcS ([Type], (Bool, Bool))
getDefaultInfo = wrapTcS TcM.tcGetDefaultTys
-- Just get some environments needed for instance looking up and matching
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
getInstEnvs :: TcS InstEnvs
getInstEnvs = wrapTcS $ TcM.tcGetInstEnvs
getFamInstEnvs :: TcS (FamInstEnv, FamInstEnv)
getFamInstEnvs = wrapTcS $ FamInst.tcGetFamInstEnvs
getTopEnv :: TcS HscEnv
getTopEnv = wrapTcS $ TcM.getTopEnv
getGblEnv :: TcS TcGblEnv
getGblEnv = wrapTcS $ TcM.getGblEnv
getLclEnv :: TcS TcLclEnv
getLclEnv = wrapTcS $ TcM.getLclEnv
tcLookupClass :: Name -> TcS Class
tcLookupClass c = wrapTcS $ TcM.tcLookupClass c
-- Setting names as used (used in the deriving of Coercible evidence)
-- Too hackish to expose it to TcS? In that case somehow extract the used
-- constructors from the result of solveInteract
addUsedRdrNamesTcS :: [RdrName] -> TcS ()
addUsedRdrNamesTcS names = wrapTcS $ addUsedRdrNames names
-- Various smaller utilities [TODO, maybe will be absorbed in the instance matcher]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
checkWellStagedDFun :: PredType -> DFunId -> CtLoc -> TcS ()
checkWellStagedDFun pred dfun_id loc
= wrapTcS $ TcM.setCtLocM loc $
do { use_stage <- TcM.getStage
; TcM.checkWellStaged pp_thing bind_lvl (thLevel use_stage) }
where
pp_thing = ptext (sLit "instance for") <+> quotes (ppr pred)
bind_lvl = TcM.topIdLvl dfun_id
pprEq :: TcType -> TcType -> SDoc
pprEq ty1 ty2 = pprParendType ty1 <+> char '~' <+> pprParendType ty2
isTouchableMetaTyVarTcS :: TcTyVar -> TcS Bool
isTouchableMetaTyVarTcS tv
= do { tclvl <- getTcLevel
; return $ isTouchableMetaTyVar tclvl tv }
isFilledMetaTyVar_maybe :: TcTyVar -> TcS (Maybe Type)
isFilledMetaTyVar_maybe tv
= ASSERT2( isTcTyVar tv, ppr tv )
case tcTyVarDetails tv of
MetaTv { mtv_ref = ref }
-> do { cts <- wrapTcS (TcM.readTcRef ref)
; case cts of
Indirect ty -> return (Just ty)
Flexi -> return Nothing }
_ -> return Nothing
isFilledMetaTyVar :: TcTyVar -> TcS Bool
isFilledMetaTyVar tv = wrapTcS (TcM.isFilledMetaTyVar tv)
zonkTyVarsAndFV :: TcTyVarSet -> TcS TcTyVarSet
zonkTyVarsAndFV tvs = wrapTcS (TcM.zonkTyVarsAndFV tvs)
zonkTcType :: TcType -> TcS TcType
zonkTcType ty = wrapTcS (TcM.zonkTcType ty)
zonkTcTypes :: [TcType] -> TcS [TcType]
zonkTcTypes tys = wrapTcS (TcM.zonkTcTypes tys)
zonkTcTyVar :: TcTyVar -> TcS TcType
zonkTcTyVar tv = wrapTcS (TcM.zonkTcTyVar tv)
zonkSimples :: Cts -> TcS Cts
zonkSimples cts = wrapTcS (TcM.zonkSimples cts)
zonkWC :: WantedConstraints -> TcS WantedConstraints
zonkWC wc = wrapTcS (TcM.zonkWC wc)
{-
Note [Do not add duplicate derived insolubles]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general we *must* add an insoluble (Int ~ Bool) even if there is
one such there already, because they may come from distinct call
sites. Not only do we want an error message for each, but with
-fdefer-type-errors we must generate evidence for each. But for
*derived* insolubles, we only want to report each one once. Why?
(a) A constraint (C r s t) where r -> s, say, may generate the same fundep
equality many times, as the original constraint is sucessively rewritten.
(b) Ditto the successive iterations of the main solver itself, as it traverses
the constraint tree. See example below.
Also for *given* insolubles we may get repeated errors, as we
repeatedly traverse the constraint tree. These are relatively rare
anyway, so removing duplicates seems ok. (Alternatively we could take
the SrcLoc into account.)
Note that the test does not need to be particularly efficient because
it is only used if the program has a type error anyway.
Example of (b): assume a top-level class and instance declaration:
class D a b | a -> b
instance D [a] [a]
Assume we have started with an implication:
forall c. Eq c => { wc_simple = D [c] c [W] }
which we have simplified to:
forall c. Eq c => { wc_simple = D [c] c [W]
, wc_insols = (c ~ [c]) [D] }
For some reason, e.g. because we floated an equality somewhere else,
we might try to re-solve this implication. If we do not do a
dropDerivedWC, then we will end up trying to solve the following
constraints the second time:
(D [c] c) [W]
(c ~ [c]) [D]
which will result in two Deriveds to end up in the insoluble set:
wc_simple = D [c] c [W]
wc_insols = (c ~ [c]) [D], (c ~ [c]) [D]
-}
-- Flatten skolems
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
newFlattenSkolem :: CtFlavour -> CtLoc
-> TcType -- F xis
-> TcS (CtEvidence, TcTyVar) -- [W] x:: F xis ~ fsk
newFlattenSkolem Given loc fam_ty
= do { fsk <- newFsk fam_ty
; ev <- newGivenEvVar loc (mkTcEqPred fam_ty (mkTyVarTy fsk),
EvCoercion (mkTcNomReflCo fam_ty))
; return (ev, fsk) }
newFlattenSkolem Wanted loc fam_ty
= do { fmv <- newFmv fam_ty
; ev <- newWantedEvVarNC loc (mkTcEqPred fam_ty (mkTyVarTy fmv))
; return (ev, fmv) }
newFlattenSkolem Derived loc fam_ty
= do { fmv <- newFmv fam_ty
; ev <- newDerivedNC loc (mkTcEqPred fam_ty (mkTyVarTy fmv))
; return (ev, fmv) }
newFsk, newFmv :: TcType -> TcS TcTyVar
newFsk fam_ty
= wrapTcS $ do { uniq <- TcM.newUnique
; let name = TcM.mkTcTyVarName uniq (fsLit "fsk")
; return (mkTcTyVar name (typeKind fam_ty) (FlatSkol fam_ty)) }
newFmv fam_ty
= wrapTcS $ do { uniq <- TcM.newUnique
; ref <- TcM.newMutVar Flexi
; cur_lvl <- TcM.getTcLevel
; let details = MetaTv { mtv_info = FlatMetaTv
, mtv_ref = ref
, mtv_tclvl = fmvTcLevel cur_lvl }
name = TcM.mkTcTyVarName uniq (fsLit "s")
; return (mkTcTyVar name (typeKind fam_ty) details) }
extendFlatCache :: TyCon -> [Type] -> (TcCoercion, TcType, CtFlavour) -> TcS ()
extendFlatCache tc xi_args stuff
= do { dflags <- getDynFlags
; when (gopt Opt_FlatCache dflags) $
updInertTcS $ \ is@(IS { inert_flat_cache = fc }) ->
is { inert_flat_cache = insertFunEq fc tc xi_args stuff } }
-- Instantiations
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
instDFunType :: DFunId -> [DFunInstType] -> TcS ([TcType], TcThetaType)
instDFunType dfun_id inst_tys
= wrapTcS $ TcM.instDFunType dfun_id inst_tys
newFlexiTcSTy :: Kind -> TcS TcType
newFlexiTcSTy knd = wrapTcS (TcM.newFlexiTyVarTy knd)
cloneMetaTyVar :: TcTyVar -> TcS TcTyVar
cloneMetaTyVar tv = wrapTcS (TcM.cloneMetaTyVar tv)
demoteUnfilledFmv :: TcTyVar -> TcS ()
-- If a flatten-meta-var is still un-filled,
-- turn it into an ordinary meta-var
demoteUnfilledFmv fmv
= wrapTcS $ do { is_filled <- TcM.isFilledMetaTyVar fmv
; unless is_filled $
do { tv_ty <- TcM.newFlexiTyVarTy (tyVarKind fmv)
; TcM.writeMetaTyVar fmv tv_ty } }
instFlexiTcS :: [TKVar] -> TcS (TvSubst, [TcType])
instFlexiTcS tvs = wrapTcS (mapAccumLM inst_one emptyTvSubst tvs)
where
inst_one subst tv
= do { ty' <- instFlexiTcSHelper (tyVarName tv)
(substTy subst (tyVarKind tv))
; return (extendTvSubst subst tv ty', ty') }
instFlexiTcSHelper :: Name -> Kind -> TcM TcType
instFlexiTcSHelper tvname kind
= do { uniq <- TcM.newUnique
; details <- TcM.newMetaDetails TauTv
; let name = setNameUnique tvname uniq
; return (mkTyVarTy (mkTcTyVar name kind details)) }
instFlexiTcSHelperTcS :: Name -> Kind -> TcS TcType
instFlexiTcSHelperTcS n k = wrapTcS (instFlexiTcSHelper n k)
-- Creating and setting evidence variables and CtFlavors
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
data Freshness = Fresh | Cached
isFresh :: Freshness -> Bool
isFresh Fresh = True
isFresh Cached = False
freshGoals :: [(CtEvidence, Freshness)] -> [CtEvidence]
freshGoals mns = [ ctev | (ctev, Fresh) <- mns ]
setEvBind :: EvBind -> TcS ()
setEvBind ev_bind
= do { tc_evbinds <- getTcEvBinds
; wrapTcS $ TcM.addTcEvBind tc_evbinds ev_bind }
setWantedEvBind :: EvVar -> EvTerm -> TcS ()
setWantedEvBind ev_id tm = setEvBind (mkWantedEvBind ev_id tm)
setEvBindIfWanted :: CtEvidence -> EvTerm -> TcS ()
setEvBindIfWanted ev tm
= case ev of
CtWanted { ctev_evar = ev_id } -> setWantedEvBind ev_id tm
_ -> return ()
newTcEvBinds :: TcS EvBindsVar
newTcEvBinds = wrapTcS TcM.newTcEvBinds
newEvVar :: TcPredType -> TcS EvVar
newEvVar pred = wrapTcS (TcM.newEvVar pred)
newGivenEvVar :: CtLoc -> (TcPredType, EvTerm) -> TcS CtEvidence
-- Make a new variable of the given PredType,
-- immediately bind it to the given term
-- and return its CtEvidence
-- See Note [Bind new Givens immediately] in TcRnTypes
-- Precondition: this is not a kind equality
-- See Note [Do not create Given kind equalities]
newGivenEvVar loc (pred, rhs)
= ASSERT2( not (isKindEquality pred), ppr pred $$ pprCtOrigin (ctLocOrigin loc) )
do { -- checkReductionDepth loc pred
; new_ev <- newEvVar pred
; setEvBind (mkGivenEvBind new_ev rhs)
; return (CtGiven { ctev_pred = pred, ctev_evar = new_ev, ctev_loc = loc }) }
newGivenEvVars :: CtLoc -> [(TcPredType, EvTerm)] -> TcS [CtEvidence]
-- Like newGivenEvVar, but automatically discard kind equalities
-- See Note [Do not create Given kind equalities]
newGivenEvVars loc pts = mapM (newGivenEvVar loc) (filterOut (isKindEquality . fst) pts)
isKindEquality :: TcPredType -> Bool
-- See Note [Do not create Given kind equalities]
isKindEquality pred = case classifyPredType pred of
EqPred _ t1 _ -> isKind t1
_ -> False
{- Note [Do not create Given kind equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We do not want to create a Given kind equality like
[G] kv ~ k -- kv is a skolem kind variable
-- Reason we don't yet support non-Refl kind equalities
This showed up in Trac #8566, where we had a data type
data I (u :: U *) (r :: [*]) :: * where
A :: I (AA t as) r -- Existential k
so A has type
A :: forall (u:U *) (r:[*]) Universal
(k:BOX) (t:k) (as:[U *]). Existential
(u ~ AA * k t as) => I u r
There is no direct kind equality, but in a pattern match where 'u' is
instantiated to, say, (AA * kk (t1:kk) as1), we'd decompose to get
k ~ kk, t ~ t1, as ~ as1
This is bad. We "fix" this by simply ignoring the Given kind equality
But the Right Thing is to add kind equalities!
But note (Trac #8705) that we *do* create Given (non-canonical) equalities
with un-equal kinds, e.g.
[G] t1::k1 ~ t2::k2 -- k1 and k2 are un-equal kinds
Reason: k1 or k2 might be unification variables that have already been
unified (at this point we have not canonicalised the types), so we want
to emit this t1~t2 as a (non-canonical) Given in the work-list. If k1/k2
have been unified, we'll find that when we canonicalise it, and the
t1~t2 information may be crucial (Trac #8705 is an example).
If it turns out that k1 and k2 are really un-equal, then it'll end up
as an Irreducible (see Note [Equalities with incompatible kinds] in
TcCanonical), and will do no harm.
-}
newWantedEvVarNC :: CtLoc -> TcPredType -> TcS CtEvidence
-- Don't look up in the solved/inerts; we know it's not there
newWantedEvVarNC loc pty
= do { -- checkReductionDepth loc pty
; new_ev <- newEvVar pty
; traceTcS "Emitting new wanted" (ppr new_ev <+> dcolon <+> ppr pty $$
pprCtLoc loc)
; return (CtWanted { ctev_pred = pty, ctev_evar = new_ev, ctev_loc = loc })}
newWantedEvVar :: CtLoc -> TcPredType -> TcS (CtEvidence, Freshness)
-- For anything except ClassPred, this is the same as newWantedEvVarNC
newWantedEvVar loc pty
= do { mb_ct <- lookupInInerts pty
; case mb_ct of
Just ctev | not (isDerived ctev)
-> do { traceTcS "newWantedEvVar/cache hit" $ ppr ctev
; return (ctev, Cached) }
_ -> do { ctev <- newWantedEvVarNC loc pty
; return (ctev, Fresh) } }
emitNewDerived :: CtLoc -> TcPredType -> TcS ()
emitNewDerived loc pred
= do { ev <- newDerivedNC loc pred
; traceTcS "Emitting new derived" (ppr ev)
; updWorkListTcS (extendWorkListDerived loc ev) }
emitNewDeriveds :: CtLoc -> [TcPredType] -> TcS ()
emitNewDeriveds loc preds
| null preds
= return ()
| otherwise
= do { evs <- mapM (newDerivedNC loc) preds
; traceTcS "Emitting new deriveds" (ppr evs)
; updWorkListTcS (extendWorkListDeriveds loc evs) }
emitNewDerivedEq :: CtLoc -> TcPredType -> TcS ()
-- Create new equality Derived and put it in the work list
-- There's no caching, no lookupInInerts
emitNewDerivedEq loc pred
= do { ev <- newDerivedNC loc pred
; traceTcS "Emitting new derived equality" (ppr ev $$ pprCtLoc loc)
; updWorkListTcS (extendWorkListDerived loc ev) }
newDerivedNC :: CtLoc -> TcPredType -> TcS CtEvidence
newDerivedNC loc pred
= do { -- checkReductionDepth loc pred
; return (CtDerived { ctev_pred = pred, ctev_loc = loc }) }
-- --------- Check done in TcInteract.selectNewWorkItem???? ---------
-- | Checks if the depth of the given location is too much. Fails if
-- it's too big, with an appropriate error message.
checkReductionDepth :: CtLoc -> TcType -- ^ type being reduced
-> TcS ()
checkReductionDepth loc ty
= do { dflags <- getDynFlags
; when (subGoalDepthExceeded dflags (ctLocDepth loc)) $
wrapErrTcS $
solverDepthErrorTcS loc ty }
matchFam :: TyCon -> [Type] -> TcS (Maybe (TcCoercion, TcType))
matchFam tycon args = wrapTcS $ matchFamTcM tycon args
matchFamTcM :: TyCon -> [Type] -> TcM (Maybe (TcCoercion, TcType))
-- Given (F tys) return (ty, co), where co :: F tys ~ ty
matchFamTcM tycon args
= do { fam_envs <- FamInst.tcGetFamInstEnvs
; return $ fmap (first TcCoercion) $
reduceTyFamApp_maybe fam_envs Nominal tycon args }
{-
Note [Residual implications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The wl_implics in the WorkList are the residual implication
constraints that are generated while solving or canonicalising the
current worklist. Specifically, when canonicalising
(forall a. t1 ~ forall a. t2)
from which we get the implication
(forall a. t1 ~ t2)
See TcSMonad.deferTcSForAllEq
-}
-- Deferring forall equalities as implications
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
deferTcSForAllEq :: Role -- Nominal or Representational
-> CtLoc -- Original wanted equality flavor
-> ([TyVar],TcType) -- ForAll tvs1 body1
-> ([TyVar],TcType) -- ForAll tvs2 body2
-> TcS EvTerm
-- Some of this functionality is repeated from TcUnify,
-- consider having a single place where we create fresh implications.
deferTcSForAllEq role loc (tvs1,body1) (tvs2,body2)
= do { (subst1, skol_tvs) <- wrapTcS $ TcM.tcInstSkolTyVars tvs1
; let tys = mkTyVarTys skol_tvs
phi1 = Type.substTy subst1 body1
phi2 = Type.substTy (zipTopTvSubst tvs2 tys) body2
skol_info = UnifyForAllSkol skol_tvs phi1
eq_pred = case role of
Nominal -> mkTcEqPred phi1 phi2
Representational -> mkCoerciblePred phi1 phi2
Phantom -> panic "deferTcSForAllEq Phantom"
; (ctev, freshness) <- newWantedEvVar loc eq_pred
; coe_inside <- case freshness of
Cached -> return (ctEvCoercion ctev)
Fresh -> do { ev_binds_var <- newTcEvBinds
; env <- getLclEnv
; let ev_binds = TcEvBinds ev_binds_var
new_ct = mkNonCanonical ctev
new_co = ctEvCoercion ctev
new_tclvl = pushTcLevel (tcl_tclvl env)
; let wc = WC { wc_simple = singleCt new_ct
, wc_impl = emptyBag
, wc_insol = emptyCts }
imp = Implic { ic_tclvl = new_tclvl
, ic_skols = skol_tvs
, ic_no_eqs = True
, ic_given = []
, ic_wanted = wc
, ic_status = IC_Unsolved
, ic_binds = ev_binds_var
, ic_env = env
, ic_info = skol_info }
; updWorkListTcS (extendWorkListImplic imp)
; return (TcLetCo ev_binds new_co) }
; return $ EvCoercion (foldr mkTcForAllCo coe_inside skol_tvs) }
| anton-dessiatov/ghc | compiler/typecheck/TcSMonad.hs | bsd-3-clause | 110,988 | 42 | 24 | 31,351 | 17,791 | 9,401 | 8,390 | -1 | -1 |
module Model where
import Prelude
import Yesod
import Data.Text (Text)
import Database.Persist.Quasi
import Data.Typeable (Typeable)
import Data.Time
import Yesod.Auth.HashDB (HashDBUser(..))
-- You can define all of your database entities in the entities file.
-- You can find more information on persistent and how to declare entities
-- at:
-- http://www.yesodweb.com/book/persistent/
share [mkPersist sqlOnlySettings, mkMigrate "migrateAll"]
$(persistFileWith lowerCaseSettings "config/models")
instance HashDBUser User where
userPasswordHash = userPassword
userPasswordSalt = Just . userSalt
setSaltAndPasswordHash s h p = p { userSalt = s,
userPassword = Just h
} | ikeda-yuko/nomnichi-haskell | Model.hs | bsd-2-clause | 742 | 0 | 8 | 157 | 140 | 81 | 59 | -1 | -1 |
-- GSoC 2015 - Haskell bindings for OpenCog.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
-- | This Module defines some useful data types for proper interaction
-- with the AtomSpace C wrapper library.
-- Intended for internal use only.
module OpenCog.AtomSpace.Internal (
Handle(..)
, HandleSeq(..)
, TVRaw(..)
, fromTVRaw
, toTVRaw
, tvMAX_PARAMS
) where
import Foreign (Ptr)
import Data.Functor ((<$>))
import Data.Typeable (cast,Typeable)
import OpenCog.AtomSpace.Sugar (noTv)
import OpenCog.AtomSpace.Types (AtomName(..),AtomType(..)
,Atom(..),TruthVal(..))
type Handle = Ptr ()
type HandleSeq = Ptr Handle
-- Constant with the maximum number of parameters in any type of TV.
tvMAX_PARAMS :: Int
tvMAX_PARAMS = 5
data TVRaw = TVRaw String [Double] deriving (Eq,Show)
toTVRaw :: TruthVal -> TVRaw
toTVRaw (SimpleTV a b ) = TVRaw "SimpleTruthValue" [a,b]
toTVRaw (CountTV a b c ) = TVRaw "CountTruthValue" [a,b,c]
toTVRaw (IndefTV a b c d e) = TVRaw "IndefiniteTruthValue" [a,b,c,d,e]
toTVRaw (FuzzyTV a b ) = TVRaw "FuzzyTruthValue" [a,b]
toTVRaw (ProbTV a b c ) = TVRaw "ProbabilisticTruthValue" [a,b,c]
fromTVRaw :: TVRaw -> TruthVal
fromTVRaw (TVRaw "SimpleTruthValue" (a:b:_) ) = SimpleTV a b
fromTVRaw (TVRaw "CountTruthValue" (a:b:c:_) ) = CountTV a b c
fromTVRaw (TVRaw "IndefiniteTruthValue" (a:b:c:d:e:_)) = IndefTV a b c d e
fromTVRaw (TVRaw "FuzzyTruthValue" (a:b:_) ) = FuzzyTV a b
fromTVRaw (TVRaw "ProbabilisticTruthValue" (a:b:c:_) ) = ProbTV a b c
fromTVRaw tv = error $ "Don't know hot to handel TV of type: " ++ (show tv)
| inflector/atomspace | opencog/haskell/OpenCog/AtomSpace/Internal.hs | agpl-3.0 | 1,812 | 0 | 13 | 471 | 572 | 320 | 252 | 34 | 1 |
{-# LANGUAGE DatatypeContexts #-}
import Data.Array
type IndirectDfa a = (Int, [IndirectState a])
data IndirectState a =
IndirectState Bool [(a, Int)]
data DirectDfa a
= DirectState Bool [(a, DirectDfa a)]
runDfa :: (Eq a) => DirectDfa a -> [a] -> Bool
runDfa (DirectState final trans) []
= final
runDfa (DirectState final trans) (x:xs)
= case [ s | (x',s) <- trans, x == x' ] of
[] -> False
(s:_) -> runDfa s xs
indirectToDirect :: IndirectDfa a -> DirectDfa a
indirectToDirect (start, states)
= tieArray ! start
where
tieArray = array (0,length states - 1)
[ (i,direct s) | (i,s) <- zip [0..] states ]
direct (IndirectState final trans)
= DirectState final [ (x, tieArray ! s) | (x,s) <- trans ]
data (Ord l,Show l) => FaState l =
FaState {label :: l, acceptQ :: Bool,
trans0:: [FaState l],
trans1:: [FaState l]}
type FinAu l = [FaState l]
-- Missing pattern for SelectorClosure
dom18 = [one]
where one = FaState 1 True [one,two] []
two = FaState 2 True [two,one] [one,two]
| bitemyapp/ghc-vis | docs/dfa.hs | bsd-3-clause | 1,133 | 0 | 12 | 330 | 472 | 263 | 209 | 29 | 2 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="it-IT">
<title>Windows WebDrivers</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/webdrivers/webdriverwindows/src/main/javahelp/org/zaproxy/zap/extension/webdriverwindows/resources/help_it_IT/helpset_it_IT.hs | apache-2.0 | 963 | 77 | 66 | 156 | 407 | 206 | 201 | -1 | -1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1998
\section[ConLike]{@ConLike@: Constructor-like things}
-}
{-# LANGUAGE CPP #-}
module ConLike (
ConLike(..)
, conLikeArity
, conLikeFieldLabels
, conLikeInstOrigArgTys
, conLikeExTyVars
, conLikeName
, conLikeStupidTheta
, conLikeWrapId_maybe
, conLikeImplBangs
, conLikeFullSig
, conLikeResTy
, conLikeFieldType
, conLikesWithFields
, conLikeIsInfix
) where
#include "HsVersions.h"
import GhcPrelude
import DataCon
import PatSyn
import Outputable
import Unique
import Util
import Name
import BasicTypes
import TyCoRep (Type, ThetaType)
import Var
import Type (mkTyConApp)
import qualified Data.Data as Data
{-
************************************************************************
* *
\subsection{Constructor-like things}
* *
************************************************************************
-}
-- | A constructor-like thing
data ConLike = RealDataCon DataCon
| PatSynCon PatSyn
{-
************************************************************************
* *
\subsection{Instances}
* *
************************************************************************
-}
instance Eq ConLike where
(==) = eqConLike
eqConLike :: ConLike -> ConLike -> Bool
eqConLike x y = getUnique x == getUnique y
-- There used to be an Ord ConLike instance here that used Unique for ordering.
-- It was intentionally removed to prevent determinism problems.
-- See Note [Unique Determinism] in Unique.
instance Uniquable ConLike where
getUnique (RealDataCon dc) = getUnique dc
getUnique (PatSynCon ps) = getUnique ps
instance NamedThing ConLike where
getName (RealDataCon dc) = getName dc
getName (PatSynCon ps) = getName ps
instance Outputable ConLike where
ppr (RealDataCon dc) = ppr dc
ppr (PatSynCon ps) = ppr ps
instance OutputableBndr ConLike where
pprInfixOcc (RealDataCon dc) = pprInfixOcc dc
pprInfixOcc (PatSynCon ps) = pprInfixOcc ps
pprPrefixOcc (RealDataCon dc) = pprPrefixOcc dc
pprPrefixOcc (PatSynCon ps) = pprPrefixOcc ps
instance Data.Data ConLike where
-- don't traverse?
toConstr _ = abstractConstr "ConLike"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "ConLike"
-- | Number of arguments
conLikeArity :: ConLike -> Arity
conLikeArity (RealDataCon data_con) = dataConSourceArity data_con
conLikeArity (PatSynCon pat_syn) = patSynArity pat_syn
-- | Names of fields used for selectors
conLikeFieldLabels :: ConLike -> [FieldLabel]
conLikeFieldLabels (RealDataCon data_con) = dataConFieldLabels data_con
conLikeFieldLabels (PatSynCon pat_syn) = patSynFieldLabels pat_syn
-- | Returns just the instantiated /value/ argument types of a 'ConLike',
-- (excluding dictionary args)
conLikeInstOrigArgTys :: ConLike -> [Type] -> [Type]
conLikeInstOrigArgTys (RealDataCon data_con) tys =
dataConInstOrigArgTys data_con tys
conLikeInstOrigArgTys (PatSynCon pat_syn) tys =
patSynInstArgTys pat_syn tys
-- | Existentially quantified type variables
conLikeExTyVars :: ConLike -> [TyVar]
conLikeExTyVars (RealDataCon dcon1) = dataConExTyVars dcon1
conLikeExTyVars (PatSynCon psyn1) = patSynExTyVars psyn1
conLikeName :: ConLike -> Name
conLikeName (RealDataCon data_con) = dataConName data_con
conLikeName (PatSynCon pat_syn) = patSynName pat_syn
-- | The \"stupid theta\" of the 'ConLike', such as @data Eq a@ in:
--
-- > data Eq a => T a = ...
-- It is empty for `PatSynCon` as they do not allow such contexts.
conLikeStupidTheta :: ConLike -> ThetaType
conLikeStupidTheta (RealDataCon data_con) = dataConStupidTheta data_con
conLikeStupidTheta (PatSynCon {}) = []
-- | Returns the `Id` of the wrapper. This is also known as the builder in
-- some contexts. The value is Nothing only in the case of unidirectional
-- pattern synonyms.
conLikeWrapId_maybe :: ConLike -> Maybe Id
conLikeWrapId_maybe (RealDataCon data_con) = Just $ dataConWrapId data_con
conLikeWrapId_maybe (PatSynCon pat_syn) = fst <$> patSynBuilder pat_syn
-- | Returns the strictness information for each constructor
conLikeImplBangs :: ConLike -> [HsImplBang]
conLikeImplBangs (RealDataCon data_con) = dataConImplBangs data_con
conLikeImplBangs (PatSynCon pat_syn) =
replicate (patSynArity pat_syn) HsLazy
-- | Returns the type of the whole pattern
conLikeResTy :: ConLike -> [Type] -> Type
conLikeResTy (RealDataCon con) tys = mkTyConApp (dataConTyCon con) tys
conLikeResTy (PatSynCon ps) tys = patSynInstResTy ps tys
-- | The \"full signature\" of the 'ConLike' returns, in order:
--
-- 1) The universally quantified type variables
--
-- 2) The existentially quantified type variables
--
-- 3) The equality specification
--
-- 4) The provided theta (the constraints provided by a match)
--
-- 5) The required theta (the constraints required for a match)
--
-- 6) The original argument types (i.e. before
-- any change of the representation of the type)
--
-- 7) The original result type
conLikeFullSig :: ConLike
-> ([TyVar], [TyVar], [EqSpec]
, ThetaType, ThetaType, [Type], Type)
conLikeFullSig (RealDataCon con) =
let (univ_tvs, ex_tvs, eq_spec, theta, arg_tys, res_ty) = dataConFullSig con
-- Required theta is empty as normal data cons require no additional
-- constraints for a match
in (univ_tvs, ex_tvs, eq_spec, theta, [], arg_tys, res_ty)
conLikeFullSig (PatSynCon pat_syn) =
let (univ_tvs, req, ex_tvs, prov, arg_tys, res_ty) = patSynSig pat_syn
-- eqSpec is empty
in (univ_tvs, ex_tvs, [], prov, req, arg_tys, res_ty)
-- | Extract the type for any given labelled field of the 'ConLike'
conLikeFieldType :: ConLike -> FieldLabelString -> Type
conLikeFieldType (PatSynCon ps) label = patSynFieldType ps label
conLikeFieldType (RealDataCon dc) label = dataConFieldType dc label
-- | The ConLikes that have *all* the given fields
conLikesWithFields :: [ConLike] -> [FieldLabelString] -> [ConLike]
conLikesWithFields con_likes lbls = filter has_flds con_likes
where has_flds dc = all (has_fld dc) lbls
has_fld dc lbl = any (\ fl -> flLabel fl == lbl) (conLikeFieldLabels dc)
conLikeIsInfix :: ConLike -> Bool
conLikeIsInfix (RealDataCon dc) = dataConIsInfix dc
conLikeIsInfix (PatSynCon ps) = patSynIsInfix ps
| ezyang/ghc | compiler/basicTypes/ConLike.hs | bsd-3-clause | 6,688 | 0 | 11 | 1,430 | 1,288 | 695 | 593 | 101 | 1 |
module T13585 where
import T13585b (extractZonedTime)
main :: IO ()
main = print extractZonedTime
| ezyang/ghc | testsuite/tests/typecheck/should_compile/T13585.hs | bsd-3-clause | 99 | 0 | 6 | 15 | 30 | 17 | 13 | 4 | 1 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
\section[StgLint]{A ``lint'' pass to check for Stg correctness}
-}
{-# LANGUAGE CPP #-}
module StgLint ( lintStgBindings ) where
import StgSyn
import Bag ( Bag, emptyBag, isEmptyBag, snocBag, bagToList )
import Id ( Id, idType, isLocalId )
import VarSet
import DataCon
import CoreSyn ( AltCon(..) )
import PrimOp ( primOpType )
import Literal ( literalType )
import Maybes
import Name ( getSrcLoc )
import ErrUtils ( MsgDoc, Severity(..), mkLocMessage )
import TypeRep
import Type
import TyCon
import Util
import SrcLoc
import Outputable
import FastString
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative ( Applicative(..) )
#endif
import Control.Monad
import Data.Function
#include "HsVersions.h"
{-
Checks for
(a) *some* type errors
(b) locally-defined variables used but not defined
Note: unless -dverbose-stg is on, display of lint errors will result
in "panic: bOGUS_LVs".
WARNING:
~~~~~~~~
This module has suffered bit-rot; it is likely to yield lint errors
for Stg code that is currently perfectly acceptable for code
generation. Solution: don't use it! (KSW 2000-05).
************************************************************************
* *
\subsection{``lint'' for various constructs}
* *
************************************************************************
@lintStgBindings@ is the top-level interface function.
-}
lintStgBindings :: String -> [StgBinding] -> [StgBinding]
lintStgBindings whodunnit binds
= {-# SCC "StgLint" #-}
case (initL (lint_binds binds)) of
Nothing -> binds
Just msg -> pprPanic "" (vcat [
ptext (sLit "*** Stg Lint ErrMsgs: in") <+>
text whodunnit <+> ptext (sLit "***"),
msg,
ptext (sLit "*** Offending Program ***"),
pprStgBindings binds,
ptext (sLit "*** End of Offense ***")])
where
lint_binds :: [StgBinding] -> LintM ()
lint_binds [] = return ()
lint_binds (bind:binds) = do
binders <- lintStgBinds bind
addInScopeVars binders $
lint_binds binds
lintStgArg :: StgArg -> LintM (Maybe Type)
lintStgArg (StgLitArg lit) = return (Just (literalType lit))
lintStgArg (StgVarArg v) = lintStgVar v
lintStgVar :: Id -> LintM (Maybe Kind)
lintStgVar v = do checkInScope v
return (Just (idType v))
lintStgBinds :: StgBinding -> LintM [Id] -- Returns the binders
lintStgBinds (StgNonRec binder rhs) = do
lint_binds_help (binder,rhs)
return [binder]
lintStgBinds (StgRec pairs)
= addInScopeVars binders $ do
mapM_ lint_binds_help pairs
return binders
where
binders = [b | (b,_) <- pairs]
lint_binds_help :: (Id, StgRhs) -> LintM ()
lint_binds_help (binder, rhs)
= addLoc (RhsOf binder) $ do
-- Check the rhs
_maybe_rhs_ty <- lintStgRhs rhs
-- Check binder doesn't have unlifted type
checkL (not (isUnLiftedType binder_ty))
(mkUnLiftedTyMsg binder rhs)
-- Check match to RHS type
-- Actually we *can't* check the RHS type, because
-- unsafeCoerce means it really might not match at all
-- notably; eg x::Int = (error @Bool "urk") |> unsafeCoerce...
-- case maybe_rhs_ty of
-- Nothing -> return ()
-- Just rhs_ty -> checkTys binder_ty
-- rhs_ty
--- (mkRhsMsg binder rhs_ty)
return ()
where
binder_ty = idType binder
lintStgRhs :: StgRhs -> LintM (Maybe Type) -- Just ty => type is exact
lintStgRhs (StgRhsClosure _ _ _ _ _ [] expr)
= lintStgExpr expr
lintStgRhs (StgRhsClosure _ _ _ _ _ binders expr)
= addLoc (LambdaBodyOf binders) $
addInScopeVars binders $ runMaybeT $ do
body_ty <- MaybeT $ lintStgExpr expr
return (mkFunTys (map idType binders) body_ty)
lintStgRhs (StgRhsCon _ con args) = runMaybeT $ do
arg_tys <- mapM (MaybeT . lintStgArg) args
MaybeT $ checkFunApp con_ty arg_tys (mkRhsConMsg con_ty arg_tys)
where
con_ty = dataConRepType con
lintStgExpr :: StgExpr -> LintM (Maybe Type) -- Just ty => type is exact
lintStgExpr (StgLit l) = return (Just (literalType l))
lintStgExpr e@(StgApp fun args) = runMaybeT $ do
fun_ty <- MaybeT $ lintStgVar fun
arg_tys <- mapM (MaybeT . lintStgArg) args
MaybeT $ checkFunApp fun_ty arg_tys (mkFunAppMsg fun_ty arg_tys e)
lintStgExpr e@(StgConApp con args) = runMaybeT $ do
arg_tys <- mapM (MaybeT . lintStgArg) args
MaybeT $ checkFunApp con_ty arg_tys (mkFunAppMsg con_ty arg_tys e)
where
con_ty = dataConRepType con
lintStgExpr e@(StgOpApp (StgPrimOp op) args _) = runMaybeT $ do
arg_tys <- mapM (MaybeT . lintStgArg) args
MaybeT $ checkFunApp op_ty arg_tys (mkFunAppMsg op_ty arg_tys e)
where
op_ty = primOpType op
lintStgExpr (StgOpApp _ args res_ty) = runMaybeT $ do
-- We don't have enough type information to check
-- the application for StgFCallOp and StgPrimCallOp; ToDo
_maybe_arg_tys <- mapM (MaybeT . lintStgArg) args
return res_ty
lintStgExpr (StgLam bndrs _) = do
addErrL (ptext (sLit "Unexpected StgLam") <+> ppr bndrs)
return Nothing
lintStgExpr (StgLet binds body) = do
binders <- lintStgBinds binds
addLoc (BodyOfLetRec binders) $
addInScopeVars binders $
lintStgExpr body
lintStgExpr (StgLetNoEscape _ _ binds body) = do
binders <- lintStgBinds binds
addLoc (BodyOfLetRec binders) $
addInScopeVars binders $
lintStgExpr body
lintStgExpr (StgTick _ expr) = lintStgExpr expr
lintStgExpr (StgCase scrut _ _ bndr _ alts_type alts) = runMaybeT $ do
_ <- MaybeT $ lintStgExpr scrut
in_scope <- MaybeT $ liftM Just $
case alts_type of
AlgAlt tc -> check_bndr tc >> return True
PrimAlt tc -> check_bndr tc >> return True
UbxTupAlt _ -> return False -- Binder is always dead in this case
PolyAlt -> return True
MaybeT $ addInScopeVars [bndr | in_scope] $
lintStgAlts alts scrut_ty
where
scrut_ty = idType bndr
UnaryRep scrut_rep = repType scrut_ty -- Not used if scrutinee is unboxed tuple
check_bndr tc = case tyConAppTyCon_maybe scrut_rep of
Just bndr_tc -> checkL (tc == bndr_tc) bad_bndr
Nothing -> addErrL bad_bndr
where
bad_bndr = mkDefltMsg bndr tc
lintStgAlts :: [StgAlt]
-> Type -- Type of scrutinee
-> LintM (Maybe Type) -- Just ty => type is accurage
lintStgAlts alts scrut_ty = do
maybe_result_tys <- mapM (lintAlt scrut_ty) alts
-- Check the result types
case catMaybes (maybe_result_tys) of
[] -> return Nothing
(first_ty:_tys) -> do -- mapM_ check tys
return (Just first_ty)
where
-- check ty = checkTys first_ty ty (mkCaseAltMsg alts)
-- We can't check that the alternatives have the
-- same type, because they don't, with unsafeCoerce#
lintAlt :: Type -> (AltCon, [Id], [Bool], StgExpr) -> LintM (Maybe Type)
lintAlt _ (DEFAULT, _, _, rhs)
= lintStgExpr rhs
lintAlt scrut_ty (LitAlt lit, _, _, rhs) = do
checkTys (literalType lit) scrut_ty (mkAltMsg1 scrut_ty)
lintStgExpr rhs
lintAlt scrut_ty (DataAlt con, args, _, rhs) = do
case splitTyConApp_maybe scrut_ty of
Just (tycon, tys_applied) | isAlgTyCon tycon &&
not (isNewTyCon tycon) -> do
let
cons = tyConDataCons tycon
arg_tys = dataConInstArgTys con tys_applied
-- This does not work for existential constructors
checkL (con `elem` cons) (mkAlgAltMsg2 scrut_ty con)
checkL (length args == dataConRepArity con) (mkAlgAltMsg3 con args)
when (isVanillaDataCon con) $
mapM_ check (zipEqual "lintAlgAlt:stg" arg_tys args)
return ()
_ ->
addErrL (mkAltMsg1 scrut_ty)
addInScopeVars args $
lintStgExpr rhs
where
check (ty, arg) = checkTys ty (idType arg) (mkAlgAltMsg4 ty arg)
-- elem: yes, the elem-list here can sometimes be long-ish,
-- but as it's use-once, probably not worth doing anything different
-- We give it its own copy, so it isn't overloaded.
elem _ [] = False
elem x (y:ys) = x==y || elem x ys
{-
************************************************************************
* *
\subsection[lint-monad]{The Lint monad}
* *
************************************************************************
-}
newtype LintM a = LintM
{ unLintM :: [LintLocInfo] -- Locations
-> IdSet -- Local vars in scope
-> Bag MsgDoc -- Error messages so far
-> (a, Bag MsgDoc) -- Result and error messages (if any)
}
data LintLocInfo
= RhsOf Id -- The variable bound
| LambdaBodyOf [Id] -- The lambda-binder
| BodyOfLetRec [Id] -- One of the binders
dumpLoc :: LintLocInfo -> (SrcSpan, SDoc)
dumpLoc (RhsOf v) =
(srcLocSpan (getSrcLoc v), ptext (sLit " [RHS of ") <> pp_binders [v] <> char ']' )
dumpLoc (LambdaBodyOf bs) =
(srcLocSpan (getSrcLoc (head bs)), ptext (sLit " [in body of lambda with binders ") <> pp_binders bs <> char ']' )
dumpLoc (BodyOfLetRec bs) =
(srcLocSpan (getSrcLoc (head bs)), ptext (sLit " [in body of letrec with binders ") <> pp_binders bs <> char ']' )
pp_binders :: [Id] -> SDoc
pp_binders bs
= sep (punctuate comma (map pp_binder bs))
where
pp_binder b
= hsep [ppr b, dcolon, ppr (idType b)]
initL :: LintM a -> Maybe MsgDoc
initL (LintM m)
= case (m [] emptyVarSet emptyBag) of { (_, errs) ->
if isEmptyBag errs then
Nothing
else
Just (vcat (punctuate blankLine (bagToList errs)))
}
instance Functor LintM where
fmap = liftM
instance Applicative LintM where
pure = return
(<*>) = ap
instance Monad LintM where
return a = LintM $ \_loc _scope errs -> (a, errs)
(>>=) = thenL
(>>) = thenL_
thenL :: LintM a -> (a -> LintM b) -> LintM b
thenL m k = LintM $ \loc scope errs
-> case unLintM m loc scope errs of
(r, errs') -> unLintM (k r) loc scope errs'
thenL_ :: LintM a -> LintM b -> LintM b
thenL_ m k = LintM $ \loc scope errs
-> case unLintM m loc scope errs of
(_, errs') -> unLintM k loc scope errs'
checkL :: Bool -> MsgDoc -> LintM ()
checkL True _ = return ()
checkL False msg = addErrL msg
addErrL :: MsgDoc -> LintM ()
addErrL msg = LintM $ \loc _scope errs -> ((), addErr errs msg loc)
addErr :: Bag MsgDoc -> MsgDoc -> [LintLocInfo] -> Bag MsgDoc
addErr errs_so_far msg locs
= errs_so_far `snocBag` mk_msg locs
where
mk_msg (loc:_) = let (l,hdr) = dumpLoc loc
in mkLocMessage SevWarning l (hdr $$ msg)
mk_msg [] = msg
addLoc :: LintLocInfo -> LintM a -> LintM a
addLoc extra_loc m = LintM $ \loc scope errs
-> unLintM m (extra_loc:loc) scope errs
addInScopeVars :: [Id] -> LintM a -> LintM a
addInScopeVars ids m = LintM $ \loc scope errs
-> -- We check if these "new" ids are already
-- in scope, i.e., we have *shadowing* going on.
-- For now, it's just a "trace"; we may make
-- a real error out of it...
let
new_set = mkVarSet ids
in
-- After adding -fliberate-case, Simon decided he likes shadowed
-- names after all. WDP 94/07
-- (if isEmptyVarSet shadowed
-- then id
-- else pprTrace "Shadowed vars:" (ppr (varSetElems shadowed))) $
unLintM m loc (scope `unionVarSet` new_set) errs
{-
Checking function applications: we only check that the type has the
right *number* of arrows, we don't actually compare the types. This
is because we can't expect the types to be equal - the type
applications and type lambdas that we use to calculate accurate types
have long since disappeared.
-}
checkFunApp :: Type -- The function type
-> [Type] -- The arg type(s)
-> MsgDoc -- Error message
-> LintM (Maybe Type) -- Just ty => result type is accurate
checkFunApp fun_ty arg_tys msg
= do { case mb_msg of
Just msg -> addErrL msg
Nothing -> return ()
; return mb_ty }
where
(mb_ty, mb_msg) = cfa True fun_ty arg_tys
cfa :: Bool -> Type -> [Type] -> (Maybe Type -- Accurate result?
, Maybe MsgDoc) -- Errors?
cfa accurate fun_ty [] -- Args have run out; that's fine
= (if accurate then Just fun_ty else Nothing, Nothing)
cfa accurate fun_ty arg_tys@(arg_ty':arg_tys')
| Just (arg_ty, res_ty) <- splitFunTy_maybe fun_ty
= if accurate && not (arg_ty `stgEqType` arg_ty')
then (Nothing, Just msg) -- Arg type mismatch
else cfa accurate res_ty arg_tys'
| Just (_, fun_ty') <- splitForAllTy_maybe fun_ty
= cfa False fun_ty' arg_tys
| Just (tc,tc_args) <- splitTyConApp_maybe fun_ty
, isNewTyCon tc
= if length tc_args < tyConArity tc
then WARN( True, text "cfa: unsaturated newtype" <+> ppr fun_ty $$ msg )
(Nothing, Nothing) -- This is odd, but I've seen it
else cfa False (newTyConInstRhs tc tc_args) arg_tys
| Just tc <- tyConAppTyCon_maybe fun_ty
, not (isTypeFamilyTyCon tc) -- Definite error
= (Nothing, Just msg) -- Too many args
| otherwise
= (Nothing, Nothing)
stgEqType :: Type -> Type -> Bool
-- Compare types, but crudely because we have discarded
-- both casts and type applications, so types might look
-- different but be the same. So reply "True" if in doubt.
-- "False" means that the types are definitely different.
--
-- Fundamentally this is a losing battle because of unsafeCoerce
stgEqType orig_ty1 orig_ty2
= gos (repType orig_ty1) (repType orig_ty2)
where
gos :: RepType -> RepType -> Bool
gos (UbxTupleRep tys1) (UbxTupleRep tys2)
= equalLength tys1 tys2 && and (zipWith go tys1 tys2)
gos (UnaryRep ty1) (UnaryRep ty2) = go ty1 ty2
gos _ _ = False
go :: UnaryType -> UnaryType -> Bool
go ty1 ty2
| Just (tc1, tc_args1) <- splitTyConApp_maybe ty1
, Just (tc2, tc_args2) <- splitTyConApp_maybe ty2
, let res = if tc1 == tc2
then equalLength tc_args1 tc_args2 && and (zipWith (gos `on` repType) tc_args1 tc_args2)
else -- TyCons don't match; but don't bleat if either is a
-- family TyCon because a coercion might have made it
-- equal to something else
(isFamilyTyCon tc1 || isFamilyTyCon tc2)
= if res then True
else
pprTrace "stgEqType: unequal" (vcat [ppr ty1, ppr ty2])
False
| otherwise = True -- Conservatively say "fine".
-- Type variables in particular
checkInScope :: Id -> LintM ()
checkInScope id = LintM $ \loc scope errs
-> if isLocalId id && not (id `elemVarSet` scope) then
((), addErr errs (hsep [ppr id, ptext (sLit "is out of scope")]) loc)
else
((), errs)
checkTys :: Type -> Type -> MsgDoc -> LintM ()
checkTys ty1 ty2 msg = LintM $ \loc _scope errs
-> if (ty1 `stgEqType` ty2)
then ((), errs)
else ((), addErr errs msg loc)
_mkCaseAltMsg :: [StgAlt] -> MsgDoc
_mkCaseAltMsg _alts
= ($$) (text "In some case alternatives, type of alternatives not all same:")
(Outputable.empty) -- LATER: ppr alts
mkDefltMsg :: Id -> TyCon -> MsgDoc
mkDefltMsg bndr tc
= ($$) (ptext (sLit "Binder of a case expression doesn't match type of scrutinee:"))
(ppr bndr $$ ppr (idType bndr) $$ ppr tc)
mkFunAppMsg :: Type -> [Type] -> StgExpr -> MsgDoc
mkFunAppMsg fun_ty arg_tys expr
= vcat [text "In a function application, function type doesn't match arg types:",
hang (ptext (sLit "Function type:")) 4 (ppr fun_ty),
hang (ptext (sLit "Arg types:")) 4 (vcat (map (ppr) arg_tys)),
hang (ptext (sLit "Expression:")) 4 (ppr expr)]
mkRhsConMsg :: Type -> [Type] -> MsgDoc
mkRhsConMsg fun_ty arg_tys
= vcat [text "In a RHS constructor application, con type doesn't match arg types:",
hang (ptext (sLit "Constructor type:")) 4 (ppr fun_ty),
hang (ptext (sLit "Arg types:")) 4 (vcat (map (ppr) arg_tys))]
mkAltMsg1 :: Type -> MsgDoc
mkAltMsg1 ty
= ($$) (text "In a case expression, type of scrutinee does not match patterns")
(ppr ty)
mkAlgAltMsg2 :: Type -> DataCon -> MsgDoc
mkAlgAltMsg2 ty con
= vcat [
text "In some algebraic case alternative, constructor is not a constructor of scrutinee type:",
ppr ty,
ppr con
]
mkAlgAltMsg3 :: DataCon -> [Id] -> MsgDoc
mkAlgAltMsg3 con alts
= vcat [
text "In some algebraic case alternative, number of arguments doesn't match constructor:",
ppr con,
ppr alts
]
mkAlgAltMsg4 :: Type -> Id -> MsgDoc
mkAlgAltMsg4 ty arg
= vcat [
text "In some algebraic case alternative, type of argument doesn't match data constructor:",
ppr ty,
ppr arg
]
_mkRhsMsg :: Id -> Type -> MsgDoc
_mkRhsMsg binder ty
= vcat [hsep [ptext (sLit "The type of this binder doesn't match the type of its RHS:"),
ppr binder],
hsep [ptext (sLit "Binder's type:"), ppr (idType binder)],
hsep [ptext (sLit "Rhs type:"), ppr ty]
]
mkUnLiftedTyMsg :: Id -> StgRhs -> SDoc
mkUnLiftedTyMsg binder rhs
= (ptext (sLit "Let(rec) binder") <+> quotes (ppr binder) <+>
ptext (sLit "has unlifted type") <+> quotes (ppr (idType binder)))
$$
(ptext (sLit "RHS:") <+> ppr rhs)
| forked-upstream-packages-for-ghcjs/ghc | compiler/stgSyn/StgLint.hs | bsd-3-clause | 18,280 | 0 | 19 | 5,286 | 4,898 | 2,496 | 2,402 | -1 | -1 |
{-# LANGUAGE RankNTypes #-}
-- This program must be called with GHC's libdir as the single command line
-- argument.
module Main where
-- import Data.Generics
import Data.Data
import Data.List
import System.IO
import GHC
import DynFlags
import MonadUtils
import Outputable
import Bag (filterBag,isEmptyBag)
import System.Directory (removeFile)
import System.Environment( getArgs )
import qualified Data.Map as Map
import Data.Dynamic ( fromDynamic,Dynamic )
main::IO()
main = do
[libdir] <- getArgs
testOneFile libdir "LiteralsTest"
testOneFile libdir fileName = do
p <- runGhc (Just libdir) $ do
dflags <- getSessionDynFlags
setSessionDynFlags dflags
let mn =mkModuleName fileName
addTarget Target { targetId = TargetModule mn
, targetAllowObjCode = True
, targetContents = Nothing }
load LoadAllTargets
modSum <- getModSummary mn
p <- GHC.parseModule modSum
return p
let res = gq (pm_parsed_source p)
putStrLn (intercalate "\n" res)
where
gq ast = everything (++) ([] `mkQ` doHsLit `extQ` doOverLit) ast
doHsLit :: HsLit -> [String]
doHsLit (HsChar src c) = ["HsChar [" ++ src ++ "] " ++ show c]
doHsLit (HsCharPrim src c) = ["HsCharPrim [" ++ src ++ "] " ++ show c]
doHsLit (HsString src c) = ["HsString [" ++ src ++ "] " ++ show c]
doHsLit (HsStringPrim src c) = ["HsStringPrim [" ++ src ++ "] " ++ show c]
doHsLit (HsInt src c) = ["HsInt [" ++ src ++ "] " ++ show c]
doHsLit (HsIntPrim src c) = ["HsIntPrim [" ++ src ++ "] " ++ show c]
doHsLit (HsWordPrim src c) = ["HsWordPrim [" ++ src ++ "] " ++ show c]
doHsLit (HsInt64Prim src c) = ["HsInt64Prim [" ++ src ++ "] " ++ show c]
doHsLit (HsWord64Prim src c) = ["HsWord64Prim [" ++ src ++ "] " ++ show c]
doHsLit (HsInteger src c _) = ["HsInteger [" ++ src ++ "] " ++ show c]
doHsLit _ = []
doOverLit :: OverLitVal -> [String]
doOverLit (HsIntegral src c) = ["HsIntegral [" ++ src ++ "] " ++ show c]
doOverLit (HsIsString src c) = ["HsIsString [" ++ src ++ "] " ++ show c]
doOverLit _ = []
pp a = showPpr unsafeGlobalDynFlags a
-- ---------------------------------------------------------------------
-- Copied from syb for the test
-- | Generic queries of type \"r\",
-- i.e., take any \"a\" and return an \"r\"
--
type GenericQ r = forall a. Data a => a -> r
-- | Make a generic query;
-- start from a type-specific case;
-- return a constant otherwise
--
mkQ :: ( Typeable a
, Typeable b
)
=> r
-> (b -> r)
-> a
-> r
(r `mkQ` br) a = case cast a of
Just b -> br b
Nothing -> r
-- | Extend a generic query by a type-specific case
extQ :: ( Typeable a
, Typeable b
)
=> (a -> q)
-> (b -> q)
-> a
-> q
extQ f g a = maybe (f a) g (cast a)
-- | Summarise all nodes in top-down, left-to-right order
everything :: (r -> r -> r) -> GenericQ r -> GenericQ r
-- Apply f to x to summarise top-level node;
-- use gmapQ to recurse into immediate subterms;
-- use ordinary foldl to reduce list of intermediate results
everything k f x = foldl k (f x) (gmapQ (everything k f) x)
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/ghc-api/annotations-literals/parsed.hs | bsd-3-clause | 3,322 | 1 | 14 | 943 | 1,041 | 540 | 501 | 69 | 13 |
module R where
import P
r = "R: " ++ p
| mydaum/cabal | cabal-testsuite/PackageTests/InternalLibraries/r/R.hs | bsd-3-clause | 39 | 0 | 5 | 11 | 16 | 10 | 6 | 3 | 1 |
module T11462_Plugin(plugin) where
import TcRnMonad ( TcPlugin(..), TcPluginResult(..) )
import Plugins ( defaultPlugin, Plugin(..), CommandLineOption )
plugin :: Plugin
plugin = defaultPlugin { tcPlugin = Just . thePlugin }
thePlugin :: [CommandLineOption] -> TcPlugin
thePlugin opts = TcPlugin
{ tcPluginInit = return ()
, tcPluginSolve = \_ _ _ _ -> return $ TcPluginOk [] []
, tcPluginStop = \_ -> return ()
}
| olsner/ghc | testsuite/tests/typecheck/should_compile/T11462_Plugin.hs | bsd-3-clause | 427 | 0 | 10 | 77 | 147 | 87 | 60 | 10 | 1 |
{-# LANGUAGE PatternGuards #-}
module Text.BraVal.Types
( Symbolic (..)
, Symbol (..)
, SymbolPrimitive (..)
, Cursor (..), startingCursor, advanceLine, advanceColumn
, table
) where
import Control.Arrow ((>>>))
import Data.List (groupBy)
import Data.Monoid ((<>))
data SymbolPrimitive = ORound | OSquare | OCurled | CRound | CSquare | CCurled | Blank String
deriving (Eq, Show, Read)
data Cursor = Cursor { line :: Integer, column :: Integer } deriving Show
startingCursor = Cursor { line = 1, column = 1 }
advanceLine p = p { line = (line p + 1), column = 0 }
advanceColumn p = p { column = (column p + 1) }
data Symbol = Symbol Cursor SymbolPrimitive deriving Show
class Symbolic s where
isOpen, isClose, isBlank :: s -> Bool
isMatching :: s -> s -> Bool
lexer :: String -> [s]
table = [ ( '(', ORound )
, ( '[', OSquare )
, ( '{', OCurled )
, ( '}', CCurled )
, ( ']', CSquare )
, ( ')', CRound )
]
fromChar char
| Just symbol <- lookup char table = symbol
| otherwise = Blank [char]
smap f (Symbol p s) = f s
instance Symbolic SymbolPrimitive where
isOpen x = case x of
ORound -> True
OSquare -> True
OCurled -> True
_ -> False
isClose x = case x of
CRound -> True
CSquare -> True
CCurled -> True
_ -> False
isBlank x = case x of
Blank _ -> True
_ -> False
o `isMatching` c = case (o,c) of
(ORound, CRound) -> True
(OSquare, CSquare) -> True
(OCurled, CCurled) -> True
_ -> False
lexer [] = []
lexer (x:xs)
| (not . isBlank . fromChar) x = proceed $ fromChar x
| otherwise = case lexer xs of
(Blank string) : _ -> Blank (x:string) : lexer (drop (length string) xs) -- Lookahead!
_ -> proceed $ Blank (x:[])
where proceed = (: lexer xs)
instance Symbolic Symbol where
isOpen = smap isOpen
isClose = smap isClose
isBlank = smap isBlank
(Symbol p s) `isMatching` (Symbol q t) = s `isMatching` t
lexer = lines >>> (zipMatrix grid) >>> concat
>>> (fmap mkSymbol) >>> (groupBy eq) >>> (fmap glue)
where
eq x y
| (not . isBlank) x || (not . isBlank) y = False
| otherwise = True
zipMatrix = zipWith $ zipWith (,)
grid = fmap (\x -> fmap (\y -> (x,y)) [1..] ) [1..]
mkSymbol ((line,char),s) = Symbol (Cursor { line = line, column = char }) (fromChar s)
glue :: [Symbol] -> Symbol
glue [] = undefined -- Should never happen.
glue [x] = x
glue ((Symbol c (Blank s)) : xs)
= Symbol c (Blank (s ++ (extract . glue $ xs)))
glue xs = undefined -- Should never happen.
extract (Symbol _ (Blank x)) = x
| kindaro/BraVal | src/Text/BraVal/Types.hs | isc | 2,896 | 0 | 15 | 973 | 1,131 | 619 | 512 | 77 | 1 |
module GHCJS.DOM.StorageQuotaCallback (
) where
| manyoo/ghcjs-dom | ghcjs-dom-webkit/src/GHCJS/DOM/StorageQuotaCallback.hs | mit | 50 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
module TicTacToe (Board, Player, gameOver, winner, tie, move, initBoard, yourTurn) where
import Data.Maybe (isNothing, isJust)
data Player = Xer | Oer
deriving (Eq, Show)
data Cell = X | O | E
deriving (Eq, Show)
type Board = [Cell]
rows :: Board -> [[Cell]]
rows b = [[b!!0 , b!!1, b!!2],
[b!!3, b!!4, b!!5],
[b!!6, b!!7, b!!8]]
columns :: Board -> [[Cell]]
columns b = [[b!!0, b!!3, b!!6],
[b!!1, b!!4, b!!5],
[b!!2, b!!5, b!!8]]
diagonals :: Board -> [[Cell]]
diagonals b = [[b!!0, b!!4, b!!8],
[b!!2, b!!4, b!!6]]
playerToCell :: Player -> Cell
playerToCell Xer = X
playerToCell Oer = O
playerToString :: Player -> Char
playerToString Xer = 'X'
playerToString Oer = 'O'
control :: (Board -> [[Cell]]) -> Board -> Player -> Bool
control f b p = c' spss c
where spss = f b
c = playerToCell p
c' [] _ = False
c' (sp:sps) otherCell = all (c ==) sp || c' sps otherCell
rowControl :: Board -> Player -> Bool
rowControl = control rows
columnControl :: Board -> Player -> Bool
columnControl = control columns
diagonalControl :: Board -> Player -> Bool
diagonalControl = control diagonals
win :: Board -> Player -> Bool
win b p = rowControl b p || columnControl b p || diagonalControl b p
winner :: Board -> Maybe Player
winner b | win b Xer = Just Xer
| win b Oer = Just Oer
| otherwise = Nothing
boardFull :: Board -> Bool
boardFull b = numEs b == 0
gameOver :: Board -> Bool
gameOver b = tie b || isJust (winner b)
tie :: Board -> Bool
tie b = isNothing (winner b) && boardFull b
whoseTurn :: Board -> Player
whoseTurn b | even $ numEs b = Oer
| otherwise = Xer
numCells :: Board -> Cell -> Int
numCells bs c = length (filter (== c) bs)
numEs :: Board -> Int
numEs b = numCells b E
replace :: Board -> Int -> Player -> Board
replace b n p = replace' b n 0 p
replace' :: Board -> Int -> Int -> Player -> Board
replace' [] _ _ _ = []
replace' (c:cs) n z p | z == n = (playerToCell p) : cs
| otherwise = c : replace' cs n (z+1) p
move :: Board -> Int -> Maybe Board
move b n | b!!n /= E = Nothing
| n > 8 || n < 0 = Nothing
| gameOver b = Nothing
| otherwise = Just (replace b n p)
where p = whoseTurn b
yourTurn :: Board -> String
yourTurn board = "It is " ++ playerToString (whoseTurn board) : "'s turn"
initBoard :: Board
initBoard = [E, E, E,
E, E, E,
E, E, E]
--randomBoard :: Board
--randomBoard = [O, X, O,
-- X, O, E,
-- X, E, E]
--oWin :: Board
--oWin = [X, X, O,
-- E, X, O,
-- E, E, O]
--xWin :: Board
--xWin = [X, O, X,
-- O, X, O,
-- E, E, X]
--tieBoard :: Board
--tieBoard = [X, O, X,
-- X, O, O,
-- O, X, X]
--xFullBoard :: Board
--xFullBoard = [X, O, O,
-- X, O, O,
-- X, X, X]
| CementTheBlock/TicTacToe | src/TicTacToe.hs | mit | 2,945 | 0 | 10 | 899 | 1,221 | 652 | 569 | 73 | 2 |
module Main where
import Control.Monad (forever)
import Reactive.Banana
import Reactive.Banana.Frameworks
import System.IO (BufferMode(..), hSetEcho, hSetBuffering, stdin)
import Render
import Bananasnake
main :: IO ()
main = do
turnOffInputBuffering
(addKeyEvent, fireKey) <- newAddHandler
network <- compile $ makeNetworkDescription addKeyEvent
actuate network
forever (getChar >>= fireKey)
makeNetworkDescription :: Frameworks t => AddHandler Char -> Moment t ()
makeNetworkDescription addKeyEvent = do
keyEvents <- fromAddHandler addKeyEvent
game <- changes $ snake keyEvents
reactimate' $ fmap (fmap render) game
turnOffInputBuffering :: IO ()
turnOffInputBuffering = do
hSetEcho stdin False
hSetBuffering stdin NoBuffering
| CRogers/bananasnake | src/Main.hs | mit | 754 | 0 | 10 | 113 | 225 | 113 | 112 | 23 | 1 |
module Mozart.Composition (compose) where
import Control.Concurrent.Async
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Lazy.Char8 as CL
import Data.List
import Data.String.Utils (replace)
import Mozart.Configuration
import Mozart.Envelope as E
import Network.HTTP
import Network.URI (parseURI)
compose :: BL.ByteString -> BL.ByteString -> IO String
compose sourceConfig template = do
case decodeConfiguration sourceConfig of
Left err ->
error $ "Invalid Configuration: " ++ err
Right config -> do
envelopes <- mapConcurrently fetchComponent (contents config)
return $ renderComponents template envelopes
renderComponents :: BL.ByteString -> [Envelope] -> String
renderComponents template envelopes = replaceVars $ CL.unpack template
where
heads = concatMap (++ "") $ combineComponents E.head envelopes
bodyInlines = concatMap (++ "") $ map bodyInline envelopes
bodyLasts = concatMap (++ "") $ combineComponents bodyLast envelopes
replaceVars = replace "{{head}}" heads . replace "{{bodyInline}}" bodyInlines . replace "{{bodyLast}}" bodyLasts
combineComponents :: (Envelope -> [String]) -> [Envelope] -> [String]
combineComponents f envelopes = nub $ concat $ map f envelopes
fetchComponent :: Component -> IO Envelope
fetchComponent cmp =
let uri = endpoint cmp
in do
res <- simpleHTTP (makeLazyRequest uri)
body <- getResponseBody res
case decodeEnvelope body of
Left _ ->
error $ "Invalid response from " ++ uri
Right envelope ->
return envelope
makeLazyRequest :: String -> Request BL.ByteString
makeLazyRequest url =
case parseURI url of
Nothing -> error $ "Invalid component endpoint: " ++ url
Just uri -> mkRequest GET uri
| wildlyinaccurate/mozart | src/Mozart/Composition.hs | mit | 1,878 | 0 | 15 | 440 | 515 | 263 | 252 | 42 | 2 |
module Problem25 where
main = print $ fst . head $ dropWhile (\(c, n) -> digitCount n < 1000) $
zip [1..] fib
digitCount :: Integer -> Int
digitCount = length . show
fib :: [Integer]
fib = 1 : 1 : zipWith (+) fib (tail fib)
| DevJac/haskell-project-euler | src/Problem25.hs | mit | 237 | 0 | 11 | 60 | 111 | 61 | 50 | 7 | 1 |
module LSys.Sierpinski (sierpinski) where
import LSys.LSys
import Turtle
import Data.Set (fromList)
sierpinski :: LSys Char
sierpinski = LSys {
_alph = fromList "AB+-",
_state = "A",
_rule = sieRule,
_display = sieDraw
}
sieRule :: Char -> String
sieRule 'A' = "+B-A-B+"
sieRule 'B' = "-A+B+A-"
sieRule c = return c
sieDraw :: [Char] -> IO ()
sieDraw = turtleDraw "Sierpinski" (Turtle (0,0) 0) . map toCommand
toCommand :: Char -> Command
toCommand 'A' = Draw baseLength
toCommand 'B' = Draw baseLength
toCommand '+' = Turn (pi/3)
toCommand '-' = Turn (-pi/3)
baseLength :: Double
baseLength = 2
| lesguillemets/lsyst.hs | src/LSys/Sierpinski.hs | mit | 617 | 0 | 9 | 119 | 234 | 126 | 108 | 23 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
import Data.Conduit
import qualified Data.Conduit.Binary as CB
import qualified Data.Conduit.Combinators as CC
import Control.Monad.Trans.Resource
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BSC
import Data.Maybe
import Data.Monoid
import qualified Data.Text as DT
import qualified Data.Text.Encoding as DTE
import qualified Data.Trie as BST
import qualified Data.Trie.Convenience as BSTC
type Str = BS.ByteString
type Index = BST.Trie Int
countWds :: FilePath -> IO Int
countWds p =
runResourceT $ CB.sourceFile
("/home/danl/p/l/melang/lang/de/base/" ++ p ++ "-wds.txt")
$$ CB.lines
=$ CC.foldl countFold 0
where
countFold !size !line = size + count
where
(_wd:countStr:_) = BSC.split '\t' line
Just (count, _) = BSC.readInt countStr
loadWds :: FilePath -> IO (Int, Index)
loadWds p =
runResourceT $ CB.sourceFile
("/home/danl/p/l/melang/lang/de/base/" ++ p ++ "-wds.txt")
$$ CB.lines
=$ CC.foldl loadFold (0, BST.empty)
where
loadFold (!size, !index) !line =
seq size2 $ seq index2
(size2, index2)
where
(wd:countStr:_) = BSC.split '\t' line
Just (count, _) = BSC.readInt countStr
size2 = size + count
wd2 = DTE.encodeUtf8 . DT.toLower $ DTE.decodeUtf8 wd
index2 = seq wd2 $ BSTC.insertWith (+) wd2 count index
main :: IO ()
main = do
gbSize <- countWds "gb"
putStrLn $ "Did gb: " ++ show gbSize
(wiktSize, wiktIndex) <- loadWds "wikt"
putStrLn $ "Did wikt: " ++ show wiktSize
(wikiSize, wikiIndex) <- loadWds "wiki"
putStrLn $ "Did wiki: " ++ show wikiSize
let myLookup :: Str -> Maybe (Str, Float)
myLookup line =
case (BST.lookup wd2 wiktIndex, BST.lookup wd2 wikiIndex) of
(Just wiktCount, Just wikiCount) ->
Just (wd, minimum [gbFrac, wiktFrac, wikiFrac])
where
wiktFrac = fromIntegral wiktCount / fromIntegral wiktSize
wikiFrac = fromIntegral wikiCount / fromIntegral wikiSize
_ -> Nothing
where
(wd:countStr:_) = BSC.split '\t' line
wd2 = DTE.encodeUtf8 . DT.toLower $ DTE.decodeUtf8 wd
Just (count, _) = BSC.readInt countStr
gbFrac = fromIntegral count / fromIntegral gbSize
myMap :: (Str, Float) -> Str
myMap (wd, count) = wd <> "\t" <>
BSC.pack (show $ log (1 / count) / log 10) <> "\n"
runResourceT $
CC.sourceFile "/home/danl/p/l/melang/lang/de/base/gb-wds.txt"
$$ CB.lines
=$ CC.map myLookup
=$ CC.filter isJust
=$ CC.map (myMap . fromJust)
=$ CC.stdout
| dancor/melang | src/Main/gww-word-count.hs | mit | 2,817 | 0 | 18 | 792 | 887 | 464 | 423 | 69 | 2 |
isPalindrome :: Eq a => [a] -> Bool
isPalindrome xs = all (uncurry (==)) $ zip xs $ reverse xs
| tamasgal/haskell_exercises | 99questions/Problem06.hs | mit | 96 | 0 | 9 | 21 | 53 | 26 | 27 | 2 | 1 |
{-# LANGUAGE CPP #-}
{- arch-tag: Path utilities main file
Copyright (C) 2004-2011 John Goerzen <[email protected]>
All rights reserved.
For license and copyright information, see the file LICENSE
-}
{- |
Module : System.Path
Copyright : Copyright (C) 2004-2011 John Goerzen
License : BSD3
Maintainer : John Goerzen <[email protected]>
Stability : provisional
Portability: portable
This module provides various helpful utilities for dealing with path and
file names, directories, and related support.
Written by John Goerzen, jgoerzen\@complete.org
-}
module System.Path(-- * Name processing
splitExt, absNormPath, secureAbsNormPath,
-- * Directory Processing
recurseDir, recurseDirStat, recursiveRemove,
bracketCWD,
-- * Temporary Directories
mktmpdir, brackettmpdir, brackettmpdirCWD
)
where
import Data.List
import Data.List.Utils
#if !(defined(mingw32_HOST_OS) || defined(mingw32_TARGET_OS) || defined(__MINGW32__))
import System.Posix.Files
import System.Posix.Directory (createDirectory)
import System.Posix.Temp
import System.Directory hiding (createDirectory)
#else
import System.Directory
#endif
import System.FilePath ((</>), pathSeparator, isPathSeparator)
import Control.Exception
import System.IO
import System.Path.NameManip
import System.IO.HVFS.Utils
{- | Splits a pathname into a tuple representing the root of the name and
the extension. The extension is considered to be all characters from the last
dot after the last slash to the end. Either returned string may be empty. -}
-- FIXME: See 6.4 API when released.
splitExt :: String -> (String, String)
splitExt path =
let dotindex = alwaysElemRIndex '.' path
slashindex = alwaysElemRIndex pathSeparator path
in
if dotindex <= slashindex
then (path, "")
else ((take dotindex path), (drop dotindex path))
{- | Make an absolute, normalized version of a path with all double slashes,
dot, and dotdot entries removed.
The first parameter is the base for the absolut calculation; in many cases,
it would correspond to the current working directory.
The second parameter is the pathname to transform. If it is already absolute,
the first parameter is ignored.
Nothing may be returned if there's an error; for instance, too many @..@ entries
for the given path.
-}
absNormPath :: String -- ^ Absolute path for use with starting directory
-> String -- ^ The path name to make absolute
-> Maybe String -- ^ Result
absNormPath base thepath =
let abs = absolute_path_by base thepath
in case guess_dotdot (normalise_path abs) of
Just "." -> Just [pathSeparator]
x -> x
{- | Like absNormPath, but returns Nothing if the generated result is not
the passed base path or a subdirectory thereof. -}
secureAbsNormPath :: String -- ^ Absolute path for use with starting directory
-> String -- ^ The path to make absolute
-> Maybe String
secureAbsNormPath base s = do p <- absNormPath base s
if startswith base p
then return p
else fail ""
{- | Creates a temporary directory for your use.
The passed string should be a template suitable for mkstemp; that is, end with
@\"XXXXXX\"@.
Your string should probably start with the value returned from
System.Directory.getTemporaryDirectory.
The name of the directory created will be returned.
-}
mktmpdir :: String -> IO String
#if !(defined(mingw32_HOST_OS) || defined(mingw32_TARGET_OS) || defined(__MINGW32__))
mktmpdir x =
do y <- mkstemp x
let (dirname, h) = y
hClose h
removeFile dirname
createDirectory dirname 0o700
return dirname
#else
#ifdef __GLASGOW_HASKELL__
mktmpdir x =
do (fp, h) <- openTempFile "" x
hClose h
removeFile fp
createDirectory fp
return fp
#else
mktmpdir _ = fail "mktmpdir not supported on Windows unless you have GHC"
#endif
#endif
{- | Creates a temporary directory for your use via 'mktmpdir',
runs the specified action (passing in the directory name), then
removes the directory and all its contents when the action completes (or raises
an exception. -}
brackettmpdir :: String -> (String -> IO a) -> IO a
brackettmpdir x action = do tmpdir <- mktmpdir x
finally (action tmpdir)
(recursiveRemove SystemFS tmpdir)
{- | Changes the current working directory to the given path,
executes the given I\/O action, then changes back to the original directory,
even if the I\/O action raised an exception. -}
bracketCWD :: FilePath -> IO a -> IO a
bracketCWD fp action =
do oldcwd <- getCurrentDirectory
setCurrentDirectory fp
finally action (setCurrentDirectory oldcwd)
{- | Runs the given I\/O action with the CWD set to the given tmp dir,
removing the tmp dir and changing CWD back afterwards, even if there
was an exception. -}
brackettmpdirCWD :: String -> IO a -> IO a
brackettmpdirCWD template action =
brackettmpdir template (\newdir -> bracketCWD newdir action)
| haskellbr/missingh | missingh-all/src/System/Path.hs | mit | 5,378 | 0 | 11 | 1,387 | 618 | 330 | 288 | 59 | 2 |
-- | The user module, for user logic.
module ChatCore.ChatUser
( ChatUser (..)
, chatUserName
, bUserNetworks
, bUserClients
, chatUser
) where
import Control.Applicative
import Control.Lens
import Control.Monad
import Data.Acid
import qualified Data.IxSet as I
import qualified Data.Map as M
import Data.Monoid
import FRP.Sodium
import FRP.Sodium.IO
import ChatCore.Events
import ChatCore.ChatNetwork
import ChatCore.Protocol
import ChatCore.State
import ChatCore.Types
import ChatCore.Util.FRP
-- | The main state object for users.
data ChatUser = ChatUser
{ _chatUserName :: ChatUserName
, _bUserNetworks :: Behavior (I.IxSet ChatNetwork)
, _bUserClients :: Behavior [RemoteClientInfo]
, cleanupChatUser :: IO ()
}
$(makeLenses ''ChatUser)
--------------------------------------------------------------------------------
-- Networks
--------------------------------------------------------------------------------
behNetworks :: AcidState ChatCoreState
-> ChatUserName
-> Event ChatCoreNetwork
-> Event ClientCommand
-> Reactive (Behavior (I.IxSet ChatNetwork))
behNetworks acid uName eNewNetwork eClientCmd =
accum I.empty eDoAddNetwork
where
initNet netSt = chatNetwork uName netSt acid
$ filterE (isForNetwork (netSt ^. netStName)) eClientCmd
eDoAddNetwork = I.insert <$> execute (initNet <$> eNewNetwork)
-- | True if the given client command should be handled by the given network.
isForNetwork :: ChatNetworkName -> ClientCommand -> Bool
isForNetwork netName (SendMessage { sendMsgNetwork = netName' }) = netName == netName'
isForNetwork netName (JoinChannel netName' _) = netName == netName'
isForNetwork netName (PartChannel netName' _ _) = netName == netName'
-- isForNetwork _ _ = False
--------------------------------------------------------------------------------
-- Clients
--------------------------------------------------------------------------------
behRemoteClients :: ChatUserName
-> Event RemoteClient
-> Behavior (I.IxSet ChatNetwork)
-> Event CoreEvent
-> Reactive (Behavior (M.Map Int RemoteClientInfo), IO ())
behRemoteClients uName eNewConn bNetworks eCoreEvent = do
rec
-- Fires when a client disconnects.
let eClientDisconnect :: Event (Int, RemoteClientInfo)
eClientDisconnect = switchMergeWith mkDCEvent (M.toList <$> bClientMap)
-- Fires the ID of a client when it disconnects.
let eClientDisconnectId :: Event Int
eClientDisconnectId = fst <$> eClientDisconnect
-- Event streams of functions to add and remove clients.
let eDoAddClient = uncurry M.insert <$> eNewClient
eDoRemoveClient = M.delete <$> eClientDisconnectId
-- Starts new clients and fires events for each client started.
eNewClient <- tagIds $ executeAsyncIO (($ clientCtx) <$> eNewConn)
-- The client list is a map that is modified by functions coming out of the
-- `eDo(Add|Remove)Client` events.
bClientMap <- accum M.empty (eDoAddClient <> eDoRemoveClient)
-- Cleanup clients when they disconnect.
clean <- listen eClientDisconnect cleanClient
return (bClientMap, clean)
where
clientCtx = RemoteClientCtx uName eCoreEvent bNetworks
-- | Fires an event when the given client disconnects.
mkDCEvent :: (Int, RemoteClientInfo) -> Event (Int, RemoteClientInfo)
mkDCEvent (cid, client) = const (cid, client) <$> rcDisconnect client
-- | Runs cleanup actions for the given client.
cleanClient :: (Int, RemoteClientInfo) -> IO ()
cleanClient (cid, client) = do
putStrLn ("Client " <> show cid <> " disconnected.")
-- FIXME: If the client's cleanup action calls sync, this will lock up.
cleanupRemoteClient client
--------------------------------------------------------------------------------
-- Main
--------------------------------------------------------------------------------
-- | Set up behaviors and events for the given user.
chatUser :: ChatCoreUser
-> AcidState ChatCoreState
-> Event RemoteClient -- ^ New clients for this user.
-> Reactive ChatUser
chatUser user acid eNewConn = do
(eNewNetwork, pushNewNetwork) <- newEvent
let uName = user ^. usrStName
rec
-- Networks.
bNetworks <- behNetworks acid uName eNewNetwork eClientCmd
let bNetworkList = I.toList <$> bNetworks
-- Receive core events from networks.
let eCoreEvent = switchMergeWith (view eNetworkCoreEvt) bNetworkList
-- Remote clients.
(bClients, cleanClients) <- behRemoteClients uName eNewConn bNetworks eCoreEvent
let bClientList = map snd <$> M.toList <$> bClients
-- Receive client commands from all clients.
let eClientCmd = switchMergeWith rcCommands bClientList
-- FIXME: Removing this line causes core events to not be received by clients.
_ <- listen eCoreEvent print
let bCleanupNetworks = map cleanupChatNetwork <$> bNetworkList
cleanup = do
cleanClients
-- Run cleanup actions for all the networks.
join (sequence_ <$> sync (sample bCleanupNetworks))
-- Push an add network event for all the networks.
mapM_ pushNewNetwork $ I.toList (user ^. usrStNetworks)
return $ ChatUser uName bNetworks bClientList cleanup
| Forkk/ChatCore | ChatCore/ChatUser.hs | mit | 5,458 | 8 | 15 | 1,179 | 1,039 | 552 | 487 | -1 | -1 |
module Experiments where
import InterBasics(join)
import Control.Monad(replicateM)
import System.IO(hSetBuffering,stdout,BufferMode(LineBuffering))
{- getting n lines - still consumes the whole of the input :-( -}
getNlines :: Int -> IO String
getNlines 0 =
return []
getNlines n =
do l <- getLine
ls <- getNlines (n-1)
return (l++ls)
{- using getNlines in an interaction -}
interactN :: Int -> (String -> String) -> IO ()
interactN n f =
do input <- replicateM n getLine
putStr (f (join input))
{- setting buffering before doing an interaction -}
interactL :: (String -> String) -> IO ()
interactL f =
do hSetBuffering stdout LineBuffering
interact f
{- "bare" interactions -}
necho :: String -> String
necho ~(x:xs) = "Prompt: " ++ [x] ++ "\n" ++ necho xs
| simonjohnthompson/Interaction | Experiments.hs | mit | 830 | 0 | 11 | 196 | 267 | 137 | 130 | 21 | 1 |
module AI.Lab1.Lab1 where
import Data.Maybe
import System.IO
import qualified Data.Vector.Unboxed as VU
-- | Sampling time for all samples in milliseconds
samplingTime :: Int
samplingTime = 300
-- | Calculate the sampling rate from a signal
samplingRate :: [Int] -> Double
samplingRate samples = fromIntegral (length samples) /
(fromIntegral (samplingTime) / 1000.0)
-- Apply ideal operator delay to a signal (S[n]) for
-- a delay of m ms
idealOperatorDelay :: [Int] -> Int -> [Int]
--Delaying by 0 ms gives back the same signal
idealOperatorDelay signal 0 = signal
idealOperatorDelay signal ms =
-- Remove last n ms from signal
-- Pad the front n ms with 0
take n (repeat 0) ++
take (length signal - n) signal
where
samplesPerMs = (length signal `div` samplingTime)
n = samplesPerMs * ms
-- Apply moving average with k1, k2 ms
-- to a given signal
movingAverage :: [Int] -> Int -> Int -> [Double]
movingAverage signal 0 0 = map fromIntegral signal
movingAverage signal k1 k2 =
map y [0,1..(length signal - 1)]
where
samplesPerMs = (length signal `div` samplingTime)
nk1 = samplesPerMs * k1
nk2 = samplesPerMs * k2
y :: Int -> Double
y n = let a = max 1 (n - nk1)
b = min (length signal - 1) (n + nk2)
in fromIntegral (sum $ (take (b - a + 1)) $
drop (a - 1) signal) /
fromIntegral (b - a + 1)
-- Rectangular Window function which takes the size of the
-- window "win_sz" and "n", returns 1 if n is inside the window,
-- 0 otherwise
rectWindow :: Int -> Int -> Int
rectWindow win_sz n
| n < 0 = 0 -- Left of Window
| n >= win_sz = 0 -- Right of Window
| otherwise = 1 -- Inside the Window
-- Apply convolution to a signal with a window
-- of given length in milliseconds
convolute :: VU.Vector Int -> Int -> VU.Vector Int
convolute signal win_sz =
VU.map y $ VU.fromList [0..(numSamples - 1)]
where y :: Int -> Int
y n = VU.foldl' (+) 0 $ -- Sum the results
-- s[k] * w[n - k]
VU.map (\k -> (signal VU.! k) *
(rectWindow win_sz_samples (n - k))) $
-- k values
VU.fromList [(max (n - win_sz_samples + 1) 0) ..
(min n (numSamples - 1))] -- k values
-- Number of samples in the Window of win_sz milliseconds
win_sz_samples = (numSamples `div` samplingTime) * win_sz
-- Total number of samples supplied to the convolution function
numSamples = VU.length signal
-- Calculate energy for a given signal with a window
-- of given length in milliseconds
energy :: VU.Vector Int -> Int -> VU.Vector Double
energy signal win_sz = VU.map e $ VU.fromList [0..(numSamples - 1)]
where e :: Int -> Double
-- sum(s[k]^2 * w[n-k])/N
e n = (fromIntegral $ sumRes n) / (fromIntegral win_sz_samples)
sumRes :: Int -> Int
sumRes n =
VU.foldl'(+) 0 $ -- Sum results
-- s[k]^2 * w[n-k]
VU.map (\k -> ((signal VU.! k) ^ 2) *
(rectWindow win_sz_samples (n - k))) $
-- k values
VU.fromList [(max (n - win_sz_samples + 1) 0) ..
(min n (numSamples - 1))]
-- Number of samples in the Window of win_sz milliseconds
win_sz_samples = (numSamples `div` samplingTime) * win_sz
-- Total number of samples supplied to the convolution function
numSamples = VU.length signal
-- Calculate magnitude for a given signal with a window
-- of given length in milliseconds
magnitude :: VU.Vector Int -> Int -> VU.Vector Double
magnitude signal win_sz = VU.map m $ VU.fromList [0..(numSamples - 1)]
where m :: Int -> Double
-- sum(|s[k]| * w[n-k])/N
m n = (fromIntegral $ sumRes n) / (fromIntegral win_sz_samples)
sumRes n = VU.foldl'(+) 0 $ -- Sum results
-- |s[k]| * w[n-k]
VU.map (\k -> ((abs (signal VU.! k))) *
(rectWindow win_sz_samples (n - k))) $
-- k values
VU.fromList [(max (n - win_sz_samples + 1) 0) ..
(min n (numSamples - 1))]
-- Number of samples in the Window of win_sz milliseconds
win_sz_samples = (numSamples `div` samplingTime) * win_sz
-- Total number of samples supplied to the convolution function
numSamples = VU.length signal
-- Calculate zero crossing rate for a given signal with a window
-- of given length in milliseconds
zeroCrossingRate :: VU.Vector Int -> Int -> VU.Vector Double
zeroCrossingRate signal win_sz =
VU.map m $ VU.fromList [0..(numSamples - 1)]
where m :: Int -> Double
-- sum(|s[k]| * w[n-k])/N
m n = (fromIntegral $ sumRes n) /
(2 * (fromIntegral win_sz_samples))
sumRes n = VU.foldl'(+) 0 $ -- Sum results
-- |sgn(s[k] - sgn(s[k-1])| * w[n-k]
VU.map (\k -> (abs (signum (signal VU.! k) -
(signum (signal VU.! (k - 1))))) *
(rectWindow win_sz_samples (n - k)))
-- k values
$ VU.fromList
[(max (n - win_sz_samples + 1) 1) ..
(min n (numSamples - 1))]
-- Number of samples in the Window of win_sz milliseconds
win_sz_samples = (numSamples `div` samplingTime) * win_sz
-- Total number of samples supplied to the convolution function
numSamples = VU.length signal
-- Write Samples to CSV file
plotToCsv :: Real a => String -> [a] -> IO ()
plotToCsv name graph =
writeFile (name ++ ".csv") $
unlines $ map (\(x, y) -> concat [show x, ",", show y]) $
points $ map realToDouble graph
where
-- Generate (x,y) points from a signal
points :: [Double] -> [(Double, Double)]
points samples = zip
-- x coodinates
(map (\x -> fromIntegral x *
(fromIntegral samplingTime /
(fromIntegral $ length samples))
) [0..])
-- y coordinates
samples
realToDouble :: (Real a) => a -> Double
realToDouble = fromRational . toRational
| RossMeikleham/AI | src/AI/Lab1/Lab1.hs | mit | 6,810 | 0 | 24 | 2,537 | 1,728 | 933 | 795 | 98 | 1 |
{-# OPTIONS_GHC -Wall #-}
-- Dependant on cabal packages: sdl2, wave.
module Main where
import qualified Control.Concurrent as C
--import qualified Control.Monad as M
import qualified Data.Vector.Storable.Mutable as V
import qualified Data.Set as S
import Foreign.ForeignPtr as P
import qualified SDL
import qualified SDL.Audio as A
import qualified Codec.Audio.Wave as W
import qualified System.IO as IO
import qualified Statistics.Sample as St
micSpec :: IO.Handle -> A.OpenDeviceSpec
micSpec h = A.OpenDeviceSpec {A.openDeviceFreq = A.Mandate 48000
,A.openDeviceFormat = A.Mandate A.Signed16BitNativeAudio
,A.openDeviceChannels = A.Mandate A.Mono
,A.openDeviceSamples = 4096
,A.openDeviceCallback = \_ (V.MVector size ptr) -> P.withForeignPtr ptr (\p -> IO.hPutBuf h p size)
,A.openDeviceUsage = A.ForCapture
,A.openDeviceName = Nothing}
waveSpec :: W.Wave
waveSpec = W.Wave {W.waveFileFormat = W.WaveVanilla
, W.waveSampleRate = 48000
, W.waveSampleFormat = W.SampleFormatPcmInt 16
, W.waveChannelMask = S.singleton W.SpeakerFrontCenter
, W.waveDataOffset = 0
, W.waveDataSize = 0
, W.waveSamplesTotal = 0
, W.waveOtherChunks = []}
record :: IO.Handle -> IO ()
record h = do
SDL.initialize [SDL.InitAudio]
(dev, _) <- A.openAudioDevice $ micSpec h
A.setAudioDevicePlaybackState dev A.Play
-- _ <- M.forever (C.threadDelay maxBound)
_ <- C.threadDelay 10000000
return ()
main :: IO ()
main = W.writeWaveFile "mic.rec" waveSpec record
| MiroslavVitkov/voiceid | src/Main.hs | mit | 1,767 | 0 | 12 | 519 | 432 | 248 | 184 | 37 | 1 |
{-# LANGUAGE
GADTs,
MultiParamTypeClasses,
TypeSynonymInstances,
FunctionalDependencies,
FlexibleInstances,
FlexibleContexts,
UndecidableInstances
#-}
-- NOTE - This module isn't even used in this project. This was an experimental
-- next move that I was overzealously planning while I was playing with
-- typeclasses. Looking back years later, it probably adds no value to this
-- program so I will probably delete it, but I will doublecheck first.
module Units where
-- The nature of units
class (Num num_type) => Unit unit_type num_type | unit_type -> num_type where
__ :: num_type -> (UnitValue unit_type num_type)
__ = UnitValue
type SafeValue = Float -- This will be a re-definition when we bring it back to the other code, so remove it then.
data UnitValue unit_type num_type where
UnitValue :: (Unit unit_type num_type, Num num_type) => num_type -> UnitValue unit_type num_type
-- The nature of sorts of units
-- Order of operations
infixr 6 +:
infixr 7 *:
infixr 7 /:
-- Starting off with Progressions, with (+:), but we can have various functions over time that operate on Progressions,
-- since they'd be used as a marker of how far along a list of items (samples, etc).
class (Unit u num_type) => Progression u num_type where
__dummy :: u -> u
-- Not implemented as a typeclass function because it works on Units the same way
(+:) :: (Progression unit_type num_type) => UnitValue unit_type num_type-> UnitValue unit_type num_type -> UnitValue unit_type num_type
(+:) (UnitValue a) (UnitValue b) = UnitValue (a + b)
-- Next with how units interoperate.
class (Num t_num, Num b_num, Num r_num, Unit top t_num, Unit bottom b_num, Unit result r_num, UnitRelationshipDefault t_num b_num r_num)
=> UnitRelationship top bottom result t_num b_num r_num | top bottom -> result, top -> t_num, bottom -> b_num, result -> r_num where
(*:) :: UnitValue bottom b_num -> UnitValue result r_num -> UnitValue top t_num
(*:) (UnitValue a) (UnitValue b) = UnitValue $ default_mult a b
(/:) :: UnitValue top t_num -> UnitValue bottom b_num -> UnitValue result r_num
(/:) (UnitValue a) (UnitValue b) = UnitValue $ default_div a b
class (Num t_num, Num b_num, Num r_num) => UnitRelationshipDefault t_num b_num r_num where
default_mult :: b_num -> r_num -> t_num
default_div :: t_num -> b_num -> r_num
instance UnitRelationshipDefault SafeValue SafeValue SafeValue where
default_mult b r = b * r
default_div t b = t / b
instance UnitRelationshipDefault SafeValue SafeValue Integer where
default_mult b r = b * (fromIntegral r)
default_div t b = floor $ t / b
instance UnitRelationshipDefault SafeValue Integer SafeValue where
default_mult b r = (fromIntegral b) * r
default_div t b = t / (fromIntegral b)
instance UnitRelationshipDefault Integer Integer SafeValue where
default_mult b r = floor $ (fromIntegral b) * r
default_div t b = (fromIntegral t) / (fromIntegral b)
instance UnitRelationshipDefault Integer SafeValue Integer where
default_mult b r = floor $ b * (fromIntegral r)
default_div t b = floor $ (fromIntegral t) / b
-- Actual unit types and their interactions:
-- (Notes)
-- TimeSamplingRate = TimeSamples/Time
-- Frequency = NumCycles/Time
-- Time * Frequency = NumCycles
-- Time * SamplingRate = Samples
-- CycleSamplingRate = CycleSamples/NumCycles
data Hertz = Hertz
data Cycle = Cycle
data Second = Second
data Sample = Sample
data Amplitude = Amplitude
data SignalValue = SignalValue
data SignalSlope = SignalSlope
data SamplePerSecond = SamplePerSecond
-- Some shortcuts, since the type of __ aka UnitValue is ambiguous, and Hertz etc are only a parameter.
-- Would be nice if these were automatically made
_second = __ :: SafeValue -> UnitValue Second SafeValue
_sample = __ :: Integer -> UnitValue Sample Integer
_cycle = __ :: SafeValue -> UnitValue Cycle SafeValue
_hertz = __ :: SafeValue -> UnitValue Hertz SafeValue
_amplitude = __ :: SafeValue -> UnitValue Amplitude SafeValue
_signalvalue = __ :: SafeValue -> UnitValue SignalValue SafeValue
instance Unit Hertz SafeValue
instance Unit SamplePerSecond Integer
instance Unit Sample Integer
-- Not a unit in the sense of physics. One cycle represents start to finish of a sine wave in a timeless domain
instance Unit Cycle SafeValue
instance Unit Second SafeValue
instance Unit Amplitude SafeValue -- Not a unit in the sense of physics. One amplitude represents the ability to transform a SignalValue
-- Definitely not a unit in the sense of physics. We (explicitly) break unit laws by converting to other units.
instance Unit SignalValue SafeValue
instance Unit SignalSlope SafeValue
instance Progression Cycle SafeValue
instance Progression Second SafeValue
instance Progression Sample Integer
instance UnitRelationship Cycle Second Hertz SafeValue SafeValue SafeValue
-- sortof lame that I have to do the commutative manually, but I actually don't want it
-- automatically implied anyway. for instance, I see no reason (yet) to end up with
-- a sampling rate as a result, that should actually be constant
instance UnitRelationship Cycle Hertz Second SafeValue SafeValue SafeValue
instance UnitRelationship Sample Second SamplePerSecond Integer SafeValue Integer
instance UnitRelationship Sample SamplePerSecond Second Integer Integer SafeValue
instance UnitRelationship SignalValue Sample SignalSlope SafeValue Integer SafeValue
instance UnitRelationship SignalValue SignalSlope Sample SafeValue SafeValue Integer
-- This will make sure Amplitude inputs are used correctly.
instance UnitRelationship Amplitude SignalValue SignalValue SafeValue SafeValue SafeValue
get_frequency :: UnitValue Second SafeValue -> UnitValue Cycle SafeValue -> UnitValue Hertz SafeValue
get_frequency s c = c /: s
get_seconds :: UnitValue Hertz SafeValue -> UnitValue Cycle SafeValue -> UnitValue Second SafeValue
get_seconds h c = c /: h
add_5_seconds s = (s :: (UnitValue Second SafeValue)) +: _second 5
| orblivion/Haskell-Synth | Units.hs | mit | 6,050 | 0 | 9 | 1,093 | 1,280 | 669 | 611 | 83 | 1 |
{- |
Copyright: (c) 2022 Kowainik
SPDX-License-Identifier: MIT
Maintainer: Kowainik <[email protected]>
Full-featured test project
-}
module FullBatteries
( projectName
) where
projectName :: String
projectName = "full-batteries"
| vrom911/hs-init | summoner-cli/examples/full-batteries/src/FullBatteries.hs | mit | 244 | 0 | 4 | 39 | 20 | 13 | 7 | 4 | 1 |
module Examples.EX2 where
import Control.Lens
import Control.Lens.Setter
import Control.Monad (void)
import Control.Monad.Trans.Class (lift)
import Data.List (intersperse)
import Data.Time.LocalTime
import Twilio.IVR
data User = User {
uid :: String,
name :: String,
balance :: Int
}
users = [
User "1234" "Joe Smith" 150,
User "2525" "Jane Doe" 267,
User "4321" "Linda Doe" 350
]
-- Can use functions that return values in the monad
-- they do not need to include or terminate in a "gather"
-- they can also be recursive
signin :: TwilioIVRCoroutine User
signin = do
eid <- gather "Please enter your account id" (numDigits .~ 4)
case filter (\u -> read (uid u) == eid) users of
[] -> do
say "Sorry, we don't recognize that id, please try again."
signin
[u] -> do
say $ "Welcome " ++ (name u)
return u
account :: Call -> TwilioIVRCoroutine ()
account call = do
-- work in the IO monad
(ZonedTime (LocalTime _ timeOfDay) _) <- lift getZonedTime
let hours = todHour timeOfDay
say $ "Good " ++ if (hours < 12) then "morning"
else if (hours < 20) then "afternoon"
else "evening"
user <- signin
say $ "Your account balance is " ++ (show $ balance user)
hangup -- Not actually needed
| steven777400/TwilioIVR | src/Examples/EX2.hs | mit | 1,352 | 0 | 14 | 380 | 373 | 197 | 176 | 36 | 3 |
#!/usr/bin/env stack
{- stack
--resolver nightly-2022-01-10
script
--package base
-}
main :: IO ()
main = putStrLn "Hello, World!"
| vrom911/hs-init | summoner-cli/examples/stackScript.hs | mit | 138 | 1 | 6 | 26 | 24 | 11 | 13 | 2 | 1 |
main = print $ answer 1 $ length $ collatz 1
answer :: Int -> Int -> Int
answer 1000000 x = x
answer n x = if (length $ collatz n) > (length $ collatz x)
then answer (n+1) n
else answer (n+1) x
collatz :: Int -> [Int]
collatz n | n == 1 = [n]
| even n = n : (collatz $ n `quot` 2)
| otherwise = n : (collatz $ 3*n + 1)
| lekto/haskell | Project-Euler-Solutions/problem0014.hs | mit | 375 | 1 | 10 | 135 | 215 | 105 | 110 | 10 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Ketchup.Routing
( Route
, match
, prefix
, route
, useHandler
) where
import qualified Data.ByteString.Char8 as B
import Ketchup.Httpd
import Ketchup.Utils
import Network
type Route = Socket -> HTTPRequest -> (B.ByteString -> Maybe B.ByteString) -> IO ()
type Matcher = B.ByteString -> (Bool, Arguments)
data Arguments = None | Parameters [(B.ByteString, B.ByteString)]
deriving Show
-- |Router function
-- Takes a list of routes and iterates through them for every request
route :: [(Matcher, Route)] -- ^ Routes
-> Handler
route [] handle _ = sendNotFound handle
route (r:routes) handle request
| isMatch = (snd r) handle request (get params)
| otherwise = route routes handle request
where
(isMatch, params) = (fst r) (uri request)
-- |Wrap a handler in a route
-- Lets you use a handler (no parameters) as a route
useHandler :: Handler -> Route
useHandler handler hnd req _ = handler hnd req
-- |Create a matchable template with parameters (:param)
match :: B.ByteString -> Matcher
match template url =
(isMatch, Parameters params)
where
(isMatch, params) = parse urlparts temparts []
urlparts = B.split '/' url
temparts = B.split '/' template
-- |Tries to match an URL against a template and returns any found parameters
parse :: [B.ByteString] -- ^ The parts of the splitted URL
-> [B.ByteString] -- ^ The parts of the template to match the URL against
-> [(B.ByteString, B.ByteString)] -- ^ The parameters accumulator
-> (Bool, [(B.ByteString, B.ByteString)])
parse [] [] params = (True, params)
parse [""] [] params = (True, params)
parse _ [] _ = (False, [])
parse [] _ _ = (False, [])
parse (u:url) (t:temp) params
| B.length t < 1 = parse url temp params
| B.length u < 1 = parse url (t:temp) params
| B.head t == ':' = parse url temp ((B.tail t, u) : params)
| u == t = parse url temp params
| otherwise = (False, [])
-- |Create a prefix matcher
prefix :: B.ByteString -> Matcher
prefix urlPrefix url = (B.isPrefixOf urlPrefix url, None)
get :: Arguments -> B.ByteString -> Maybe B.ByteString
get (Parameters params) x = lookup x params
get None _ = Nothing
| silverweed/ketchup | Ketchup/Routing.hs | mit | 2,466 | 0 | 11 | 714 | 762 | 409 | 353 | 49 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Page
(
gameSite
, jsInject
, makeMove
, getScore
, getGameState
) where
import Board
import Test.WebDriver
import Data.List (transpose)
import qualified Data.Aeson as A
import qualified Data.Text as T
gameSite :: WD ()
gameSite = openPage "http://gabrielecirulli.github.io/2048/"
pageBody :: WD Element
pageBody = findElem $ ByTag "body"
getScore :: WD Int
getScore = executeJS [] "return GameManager._instance.score"
makeMove :: Moves -> WD ()
makeMove m = do
executeJS [] $ T.pack $ "GameManager._instance.move(" ++ (show . fromEnum $ m) ++ ")" :: WD A.Value
return ()
getGameState :: WD Board
getGameState = do
board <- executeJS [] "return GameManager._instance.grid.cells.map(function(col) { return col.map(function(tile) { return tile ? tile.value : 0 }) })" :: WD Board
return . transpose $ board
jsInject :: WD ()
jsInject = do
elem <- pageBody
funcTmp <- executeJS [] "return GameManager.prototype.isGameTerminated.toString();" :: WD String
executeJS [] "GameManager.prototype.isGameTerminated = function() { GameManager._instance = this; return true; }" :: WD A.Value
sendKeys "s" elem
executeJS [] $ T.pack $ "eval(GameManager.prototype.isGameTerminated = " ++ funcTmp ++ ")" :: WD A.Value
return ()
| mattvperry/AI-2048-hs | src/Page.hs | mit | 1,330 | 0 | 12 | 256 | 343 | 174 | 169 | 35 | 1 |
{-# LANGUAGE PackageImports #-}
module Netmosphere.IPFilter where
import "bytestring-trie" Data.Trie
| TOSPIO/netmosphere-core | src/Netmosphere/IPFilter.hs | mit | 105 | 0 | 4 | 14 | 13 | 9 | 4 | 3 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
module Ledger.Errors where
import Control.Exception
import Data.Text
import Data.Typeable
import Ledger.Types
data LedgerException = CommodityMismatch { leCmOperator :: Text
, leCmLeft :: Amount
, leCmRight :: Amount }
deriving (Show, Typeable)
instance Exception LedgerException
| ledger/commodities | Data/Commodity/Errors.hs | mit | 430 | 0 | 8 | 152 | 72 | 43 | 29 | 11 | 0 |
import Hand (Suit(..), Rank(..), Card(..), Hand(Hand))
-- | 'main' runs the main program
main :: IO ()
main = print $ straightFlush `compare` quads
where
straightFlush = Hand Card {rank = Three, suit = Spades}
Card {rank = Four, suit = Spades}
Card {rank = Five, suit = Spades}
Card {rank = Six, suit = Spades}
Card {rank = Seven, suit = Spades}
quads = Hand Card {rank = Eight, suit = Spades}
Card {rank = Eight, suit = Hearts}
Card {rank = Eight, suit = Diamonds}
Card {rank = Eight, suit = Clubs}
Card {rank = Seven, suit = Clubs}
| benperez/chinese-poker | src/Main.hs | mit | 717 | 0 | 9 | 284 | 234 | 145 | 89 | 13 | 1 |
{-# LANGUAGE RecordWildCards, NoMonomorphismRestriction #-}
module Idea(module Idea, Severity(..)) where
import HSE.All
import Settings
import Language.Haskell.HsColour.TTY
import Language.Haskell.HsColour.Colourise
import Util
data Idea
= Idea {func :: FuncName, severity :: Severity, hint :: String, loc :: SrcLoc, from :: String, to :: String, note :: String}
| ParseError {severity :: Severity, hint :: String, loc :: SrcLoc, msg :: String, from :: String}
deriving (Eq,Ord)
isParseError ParseError{} = True; isParseError _ = False
instance Show Idea where
show = showEx id
showANSI :: IO (Idea -> String)
showANSI = do
prefs <- readColourPrefs
return $ showEx (hscolour prefs)
showEx :: (String -> String) -> Idea -> String
showEx tt Idea{..} = unlines $
[showSrcLoc loc ++ ": " ++ show severity ++ ": " ++ hint] ++
f "Found" from ++ f "Why not" to ++
["Note: " ++ note | note /= ""]
where
f msg x | null xs = [msg ++ " remove it."]
| otherwise = (msg ++ ":") : map (" "++) xs
where xs = lines $ tt x
showEx tt ParseError{..} = unlines $
[showSrcLoc loc ++ ": Parse error","Error message:"," " ++ msg,"Code:"] ++ map (" "++) (lines $ tt from)
rawIdea = Idea ("","")
idea severity hint from to = rawIdea severity hint (toSrcLoc $ ann from) (f from) (f to) ""
where f = ltrim . prettyPrint
warn = idea Warning
err = idea Error
| alphaHeavy/hlint | src/Idea.hs | gpl-2.0 | 1,433 | 0 | 14 | 337 | 549 | 299 | 250 | 33 | 1 |
import Data.Maybe
maybeRead :: Read a => String -> Maybe a
maybeRead s = case reads s of
[(x,"")] -> Just x
_ -> Nothing
getListFromString :: String -> Maybe [Integer]
getListFromString str = maybeRead $ "[" ++ str ++ "]"
askUser :: IO [Integer]
askUser =
putStrLn "Enter a list of numbers (sep. by commas):" >>
getLine >>= \input ->
let maybeList = getListFromString input in
case maybeList of
Just l -> return l
Nothing -> askUser
main :: IO ()
main = askUser >>=
\list -> print $ sum list
| ardumont/haskell-lab | src/io.hs | gpl-2.0 | 581 | 0 | 12 | 181 | 198 | 99 | 99 | 18 | 2 |
module Test where
scalaskel :: (Enum t, Eq t, Num t) => t -> [(t, t, t, t)]
scalaskel n = [(w, x, y, z) |
w <- [0..n],
x <- [0..n],
y <- [0..n],
z <- [0..n],
(1 * w) + (7 * x) + (11 * y) + (21 * z) == n]
-- *Test> map scalaskel [1..10]
-- [[(1,0,0,0)],[(2,0,0,0)],[(3,0,0,0)],[(4,0,0,0)],[(5,0,0,0)],[(6,0,0,0)],[(0,1,0,0),(7,0,0,0)],[(1,1,0,0),(8,0,0,0)],[(2,1,0,0),(9,0,0,0)],[(3,1,0,0),(10,0,0,0)]]
| ardumont/haskell-lab | src/enonce1.hs | gpl-2.0 | 479 | 0 | 12 | 139 | 174 | 99 | 75 | 8 | 1 |
{-# LANGUAGE OverlappingInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module Blubber where
class Test a where
foo :: a -> Int
instance Test Int where
foo _ = 2
instance Test a where
foo _ = 66
jsf
test1 = foo (2::Int) -- chooses more specific instance
-- test2 = foo 22 -- fails to compile. not so in repl with let
--test3 = foo (3::Num a => a) -- same here
{-
*Blubber> test1
2
*Blubber> foo 22
66
*Blubber> let test2 = foo 22
*Blubber> test2
66
*Blubber>
-}
| haraldsteinlechner/lambdaWolf | Blubber.hs | gpl-3.0 | 484 | 0 | 7 | 111 | 77 | 43 | 34 | -1 | -1 |
-- | Gets the GHC Core information we need, also obtains or creates the
-- QuickSpec signature
{-# LANGUAGE RecordWildCards, CPP #-}
module HipSpec.Read (execute,EntryResult(..),SigInfo(..)) where
import Test.QuickSpec.Signature (Sig)
import HipSpec.ParseDSL
import Data.List.Split (splitOn)
import HipSpec.Sig.Resolve
import HipSpec.Sig.Make
import HipSpec.Sig.Get
import HipSpec.Sig.Symbols
import HipSpec.Params
import CoreSyn (flattenBinds)
import CoreMonad (liftIO)
import DynFlags
import GHC hiding (Sig)
import GHC.Paths
import HscTypes
#if __GLASGOW_HASKELL__ < 708
import StaticFlags
#endif
import System.FilePath
import Var
import HipSpec.GHC.Unfoldings
import HipSpec.GHC.Utils
import HipSpec.Sig.Scope
import qualified Data.Map as M
import Data.Maybe
import Data.List
import Control.Monad
{-# ANN module "HLint: ignore Use camelCase" #-}
-- | The result from calling GHC
data EntryResult = EntryResult
{ sig_info :: Maybe SigInfo
, prop_ids :: [Var]
, extra_tcs :: [TyCon]
}
-- | Signature from QuickSpec
data SigInfo = SigInfo
{ sig :: Sig
, resolve_map :: ResolveMap
, symbol_map :: SymbolMap
}
execute :: Params -> IO EntryResult
execute params@Params{..} = do
-- Use -threaded
#if __GLASGOW_HASKELL__ < 708
addWay WayThreaded
addWay WayDyn
#endif
-- Notify where ghc is installed
runGhc (Just libdir) $ do
-- Set interpreted so we can get the signature,
-- and expose all unfoldings
dflags0 <- getSessionDynFlags
let dflags =
#if __GLASGOW_HASKELL__ >= 708
updateWays $
addWay' WayThreaded $
addWay' WayDyn $
#endif
dflags0 { ghcMode = CompManager
, optLevel = 1
, profAuto = NoProfAuto
}
`wopt_unset` Opt_WarnOverlappingPatterns
#if __GLASGOW_HASKELL__ >= 708
`gopt_unset` Opt_IgnoreInterfacePragmas
`gopt_unset` Opt_OmitInterfacePragmas
`gopt_set` Opt_ExposeAllUnfoldings
#else
`dopt_unset` Opt_IgnoreInterfacePragmas
`dopt_unset` Opt_OmitInterfacePragmas
`dopt_set` Opt_ExposeAllUnfoldings
#endif
_ <- setSessionDynFlags dflags
-- add .hs if it is not present (apparently not supporting lhs)
let file_with_ext = replaceExtension file ".hs"
target <- guessTarget file_with_ext Nothing
addTarget target
r <- load LoadAllTargets
when (failed r) $ error "Compilation failed!"
mod_graph <- getModuleGraph
let mod_sum = findModuleSum file_with_ext mod_graph
-- Parse, typecheck and desugar the module
p <- parseModule mod_sum
t <- typecheckModule p
d <- desugarModule t
let modguts = dm_core_module d
binds = fixUnfoldings (mg_binds modguts)
fix_id :: Id -> Id
fix_id = fixId binds
whenFlag params PrintCore (liftIO (putStrLn (showOutputable binds)))
-- Set the context for evaluation
setContext $
[ IIDecl (simpleImportDecl (moduleName (ms_mod mod_sum)))
, IIDecl (qualifiedImport "Test.QuickSpec.Signature")
, IIDecl (qualifiedImport "Test.QuickSpec.Prelude")
, IIDecl (qualifiedImport "GHC.Types")
, IIDecl (qualifiedImport "Prelude")
]
-- Also include the imports the module is importing
++ map (IIDecl . unLoc) (ms_textual_imps mod_sum)
-- Get ids in scope to find the properties (fix their unfoldings, too)
ids_in_scope <- getIdsInScope fix_id
let only' :: [String]
only' = concatMap (splitOn ",") only
props :: [Var]
props =
[ i
| i <- ids_in_scope
, varWithPropType i
, not (varFromPrelude i)
, null only || varToString i `elem` only'
]
-- Make or get signature
m_sig <-
if TranslateOnly `elem` debug_flags
then return Nothing
else if auto
then makeSignature params props
else getSignature
-- Make signature map
--
-- The extra_ids comes from --extra and --extra-trans fields from
-- the auto signature generation
(sig_info,extra_ids,extra_tcs) <- case m_sig of
Nothing -> return (Nothing,[],[])
Just sig -> do
resolve_map <- makeResolveMap params sig
let symbol_map = makeSymbolMap resolve_map sig
(ids,tcs) = case resolve_map of
ResolveMap m n -> (M.elems m,M.elems n)
whenFlag params DebugStrConv (liftIO (putStrLn (show symbol_map)))
return (Just SigInfo{..},ids,tcs)
let toplvl_binds | tr_mod = map (fix_id . fst) (flattenBinds binds)
| otherwise = []
whenFlag params PrintCore (liftIO (putStrLn (showOutputable toplvl_binds)))
-- Wrapping up
return EntryResult
{ sig_info = sig_info
, prop_ids = props ++ extra_ids ++ toplvl_binds
, extra_tcs = extra_tcs
}
findModuleSum :: FilePath -> [ModSummary] -> ModSummary
findModuleSum file
= fromMaybe (error $ "Cannot find module " ++ file)
. find (maybe False (== file) . summaryHsFile)
summaryHsFile :: ModSummary -> Maybe FilePath
summaryHsFile = ml_hs_file . ms_location
qualifiedImport :: String -> ImportDecl name
qualifiedImport = qualifiedImportDecl . mkModuleName
qualifiedImportDecl :: ModuleName -> ImportDecl name
qualifiedImportDecl m = (simpleImportDecl m) { ideclQualified = True }
| danr/hipspec | src/HipSpec/Read.hs | gpl-3.0 | 5,927 | 0 | 24 | 1,896 | 1,244 | 663 | 581 | 114 | 4 |
module Handler.CCSpec(spec) where
import TestImport
import qualified TestImport as I
import qualified Yesod.Test.TransversingCSS as CSS
import Yesod.Auth
import Test.QuickCheck.Monadic
import Test.QuickCheck
import qualified Test.HUnit
import Network.Wai.Test
import Network.HTTP.Types.Method
import qualified Data.Map as Map
import Data.ByteString.Lazy as BL
import Data.ByteString.Lazy.Char8 as BL8
import Data.Time.LocalTime
import System.IO
import qualified Text.XML as XML
import qualified Text.XML.Cursor as XMLCursor
import Authentication
import DBFS hiding(runDB)
spec :: Spec
spec = withApp $ do
it "tests the connection" $ do
get ("http://localhost:3000" :: String)
statusIs 303
it "tests the form contents" $ do
get ("http://localhost:3000/blog" :: String)
statusIs 200
htmlAllContain ".articleDetail .articleDetailHead .pageTitle"
"Delimited continuations and nested transactions"
it "tests the form structure" $ do
get ("http://localhost:3000/blog" :: String)
statusIs 200
[form] <- htmlQuery "#blogLoginForm"
let cursor = parseHTML form
XML.NodeElement (XML.Element name attr _nodes) = XMLCursor.node cursor
Just action = Map.lookup "action" attr
liftIO $ I.unpack action `shouldStartWith` "/blog/"
it "tests the form submission" $ do
get ("http://localhost:3000/blog" :: String)
statusIs 200
[form] <- htmlQuery "#blogLoginForm"
let cursor = parseHTML form
XML.NodeElement (XML.Element name attr _nodes) = XMLCursor.node cursor
Just action = Map.lookup "action" attr
liftIO $ I.unpack action `shouldStartWith` "/blog/"
let req = do setMethod methodPost
--setUrl $ "http://localhost:3000" ++ I.unpack action
setUrl $ "http://localhost:3000" ++ I.unpack action
byLabel "Enter the user name:" "root"
byLabel "Enter the password" "root"
addPostParam "_formid" "userForm"
addToken
I.request req
statusIs 200
bodyContains "You are logged in as root."
it "tests the login form" $ do
login
it "benchmarks the continuation" $ do
let i = 250
i' = 2
putTime
forM [1..i'] $ \_ -> do
login
logout
login
newArticle
cancel
newArticle
submit
newArticle
preview
cancelPreview
newArticle
preview
submitPreview
putTime
---------------------------- Login Page ----------------------------
login = do
get ("http://localhost:3000/blog" :: String)
statusIs 200
bodyContains "Please log in"
action <- formActionValue "#blogLoginForm"
liftIO $ I.unpack action `shouldStartWith` "/blog/"
let req = do setMethod methodPost
setUrl $ "http://localhost:3000" ++ I.unpack action
byLabel "Enter the user name:" "root"
byLabel "Enter the password" "root"
addPostParam "_formid" "userForm"
addToken
I.request req
statusIs 200
bodyContains "Blog View"
bodyContains "You are logged in as root."
-------------------------- Blog view page --------------------------
logout = do
bodyContains "Blog View"
bodyContains "You are logged in as root."
action <- formActionValue ".articleDetailBody form"
liftIO $ I.unpack action `shouldStartWith` "/blog/"
let req = do setMethod methodPost
setUrl $ "http://localhost:3000" ++ I.unpack action
addPostParam "logout" "Logout"
addToken
I.request req
statusIs 200
bodyContains "Good bye"
newArticle = do
bodyContains "Blog View"
bodyContains "You are logged in as root."
action <- formActionValue ".articleDetailBody form"
liftIO $ I.unpack action `shouldStartWith` "/blog/"
let req = do setMethod methodPost
setUrl $ "http://localhost:3000" ++ I.unpack action
addPostParam "new" "New Article"
addToken
I.request req
statusIs 200
bodyContains "Enter a new article"
-------------------------- New article page --------------------------
cancel = do
bodyContains "Enter a new article"
action <- formActionValue ".articleDetailBody form"
liftIO $ I.unpack action `shouldStartWith` "/blog/"
let req = do setMethod methodPost
setUrl $ "http://localhost:3000" ++ I.unpack action
byLabel "Subject:" "subject"
byLabel "Body:" "body"
addPostParam "cancel" "Cancel"
addToken
I.request req
statusIs 200
bodyContains "Blog View"
-- bodyContains "Enter a new article"
bodyContains "You are logged in as root."
submit = do
bodyContains "Enter a new article"
action <- formActionValue ".articleDetailBody form"
liftIO $ I.unpack action `shouldStartWith` "/blog/"
let req = do setMethod methodPost
setUrl $ "http://localhost:3000" ++ I.unpack action
byLabel "Subject:" "subject"
byLabel "Body:" "body"
addPostParam "submit" "Submit"
addToken
I.request req
statusIs 200
bodyContains "Blog View"
preview = do
bodyContains "Enter a new article"
action <- formActionValue ".articleDetailBody form"
liftIO $ I.unpack action `shouldStartWith` "/blog/"
let req = do setMethod methodPost
setUrl $ "http://localhost:3000" ++ I.unpack action
byLabel "Subject:" "subject"
byLabel "Body:" "body"
addPostParam "preview" "Preview"
addToken
I.request req
statusIs 200
bodyContains "Preview your submission"
---------------------------- Preview page ----------------------------
cancelPreview = do
bodyContains "Preview your submission"
action <- formActionValue ".articleDetailBody form"
liftIO $ I.unpack action `shouldStartWith` "/blog/"
let req = do setMethod methodPost
setUrl $ "http://localhost:3000" ++ I.unpack action
addPostParam "cancel" "Cancel"
addToken
I.request req
statusIs 200
bodyContains "Blog View"
bodyContains "You are logged in as root."
submitPreview = do
bodyContains "Preview your submission"
action <- formActionValue ".articleDetailBody form"
liftIO $ I.unpack action `shouldStartWith` "/blog/"
let req = do setMethod methodPost
setUrl $ "http://localhost:3000" ++ I.unpack action
addPostParam "submit" "Submit"
addToken
I.request req
statusIs 200
bodyContains "Blog View"
bodyContains "You are logged in as root."
------------------------------------------------------------------
formActionValue query = do
[form] <- htmlQuery query
let cursor = parseHTML form
XML.NodeElement (XML.Element name attr _nodes) = XMLCursor.node cursor
Just action = Map.lookup "action" attr
return action
putTime = liftIO $ do
withFile "/tmp/time.log" AppendMode $ \h -> do
ZonedTime time tz <- getZonedTime
I.hPutStrLn h $ show time
| nishiuramakoto/logiku | test/Handler/CCSpec.hs | gpl-3.0 | 7,092 | 0 | 18 | 1,822 | 1,686 | 749 | 937 | 189 | 1 |
-- find the prime factors of a number
p35 n = f n primes where
f n p@(x:xs)
| n < 2 = []
| mod n x == 0 = x : f (div n x) p
| otherwise = f n xs
primes = sieve [2..] where
sieve (x:xs) = x : sieve [ z | z <- xs, mod z x /= 0 ]
| yalpul/CENG242 | H99/31-41/p35.hs | gpl-3.0 | 308 | 0 | 14 | 148 | 157 | 76 | 81 | 7 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AccessContextManager.AccessPolicies.AccessLevels.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- List all Access Levels for an access policy.
--
-- /See:/ <https://cloud.google.com/access-context-manager/docs/reference/rest/ Access Context Manager API Reference> for @accesscontextmanager.accessPolicies.accessLevels.list@.
module Network.Google.Resource.AccessContextManager.AccessPolicies.AccessLevels.List
(
-- * REST Resource
AccessPoliciesAccessLevelsListResource
-- * Creating a Request
, accessPoliciesAccessLevelsList
, AccessPoliciesAccessLevelsList
-- * Request Lenses
, apallParent
, apallXgafv
, apallUploadProtocol
, apallAccessToken
, apallUploadType
, apallAccessLevelFormat
, apallPageToken
, apallPageSize
, apallCallback
) where
import Network.Google.AccessContextManager.Types
import Network.Google.Prelude
-- | A resource alias for @accesscontextmanager.accessPolicies.accessLevels.list@ method which the
-- 'AccessPoliciesAccessLevelsList' request conforms to.
type AccessPoliciesAccessLevelsListResource =
"v1" :>
Capture "parent" Text :>
"accessLevels" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "accessLevelFormat"
AccessPoliciesAccessLevelsListAccessLevelFormat
:>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListAccessLevelsResponse
-- | List all Access Levels for an access policy.
--
-- /See:/ 'accessPoliciesAccessLevelsList' smart constructor.
data AccessPoliciesAccessLevelsList =
AccessPoliciesAccessLevelsList'
{ _apallParent :: !Text
, _apallXgafv :: !(Maybe Xgafv)
, _apallUploadProtocol :: !(Maybe Text)
, _apallAccessToken :: !(Maybe Text)
, _apallUploadType :: !(Maybe Text)
, _apallAccessLevelFormat :: !(Maybe AccessPoliciesAccessLevelsListAccessLevelFormat)
, _apallPageToken :: !(Maybe Text)
, _apallPageSize :: !(Maybe (Textual Int32))
, _apallCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccessPoliciesAccessLevelsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'apallParent'
--
-- * 'apallXgafv'
--
-- * 'apallUploadProtocol'
--
-- * 'apallAccessToken'
--
-- * 'apallUploadType'
--
-- * 'apallAccessLevelFormat'
--
-- * 'apallPageToken'
--
-- * 'apallPageSize'
--
-- * 'apallCallback'
accessPoliciesAccessLevelsList
:: Text -- ^ 'apallParent'
-> AccessPoliciesAccessLevelsList
accessPoliciesAccessLevelsList pApallParent_ =
AccessPoliciesAccessLevelsList'
{ _apallParent = pApallParent_
, _apallXgafv = Nothing
, _apallUploadProtocol = Nothing
, _apallAccessToken = Nothing
, _apallUploadType = Nothing
, _apallAccessLevelFormat = Nothing
, _apallPageToken = Nothing
, _apallPageSize = Nothing
, _apallCallback = Nothing
}
-- | Required. Resource name for the access policy to list Access Levels
-- from. Format: \`accessPolicies\/{policy_id}\`
apallParent :: Lens' AccessPoliciesAccessLevelsList Text
apallParent
= lens _apallParent (\ s a -> s{_apallParent = a})
-- | V1 error format.
apallXgafv :: Lens' AccessPoliciesAccessLevelsList (Maybe Xgafv)
apallXgafv
= lens _apallXgafv (\ s a -> s{_apallXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
apallUploadProtocol :: Lens' AccessPoliciesAccessLevelsList (Maybe Text)
apallUploadProtocol
= lens _apallUploadProtocol
(\ s a -> s{_apallUploadProtocol = a})
-- | OAuth access token.
apallAccessToken :: Lens' AccessPoliciesAccessLevelsList (Maybe Text)
apallAccessToken
= lens _apallAccessToken
(\ s a -> s{_apallAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
apallUploadType :: Lens' AccessPoliciesAccessLevelsList (Maybe Text)
apallUploadType
= lens _apallUploadType
(\ s a -> s{_apallUploadType = a})
-- | Whether to return \`BasicLevels\` in the Cloud Common Expression
-- language, as \`CustomLevels\`, rather than as \`BasicLevels\`. Defaults
-- to returning \`AccessLevels\` in the format they were defined.
apallAccessLevelFormat :: Lens' AccessPoliciesAccessLevelsList (Maybe AccessPoliciesAccessLevelsListAccessLevelFormat)
apallAccessLevelFormat
= lens _apallAccessLevelFormat
(\ s a -> s{_apallAccessLevelFormat = a})
-- | Next page token for the next batch of Access Level instances. Defaults
-- to the first page of results.
apallPageToken :: Lens' AccessPoliciesAccessLevelsList (Maybe Text)
apallPageToken
= lens _apallPageToken
(\ s a -> s{_apallPageToken = a})
-- | Number of Access Levels to include in the list. Default 100.
apallPageSize :: Lens' AccessPoliciesAccessLevelsList (Maybe Int32)
apallPageSize
= lens _apallPageSize
(\ s a -> s{_apallPageSize = a})
. mapping _Coerce
-- | JSONP
apallCallback :: Lens' AccessPoliciesAccessLevelsList (Maybe Text)
apallCallback
= lens _apallCallback
(\ s a -> s{_apallCallback = a})
instance GoogleRequest AccessPoliciesAccessLevelsList
where
type Rs AccessPoliciesAccessLevelsList =
ListAccessLevelsResponse
type Scopes AccessPoliciesAccessLevelsList =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient AccessPoliciesAccessLevelsList'{..}
= go _apallParent _apallXgafv _apallUploadProtocol
_apallAccessToken
_apallUploadType
_apallAccessLevelFormat
_apallPageToken
_apallPageSize
_apallCallback
(Just AltJSON)
accessContextManagerService
where go
= buildClient
(Proxy ::
Proxy AccessPoliciesAccessLevelsListResource)
mempty
| brendanhay/gogol | gogol-accesscontextmanager/gen/Network/Google/Resource/AccessContextManager/AccessPolicies/AccessLevels/List.hs | mpl-2.0 | 7,008 | 0 | 19 | 1,555 | 962 | 556 | 406 | 143 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.ElasticBeanstalk.Types
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
module Network.AWS.ElasticBeanstalk.Types
(
-- * Service
ElasticBeanstalk
-- ** Error
, RESTError
-- ** XML
, ns
-- * ApplicationDescription
, ApplicationDescription
, applicationDescription
, adApplicationName
, adConfigurationTemplates
, adDateCreated
, adDateUpdated
, adDescription
, adVersions
-- * EventSeverity
, EventSeverity (..)
-- * Tag
, Tag
, tag
, tagKey
, tagValue
-- * EventDescription
, EventDescription
, eventDescription
, edApplicationName
, edEnvironmentName
, edEventDate
, edMessage
, edRequestId
, edSeverity
, edTemplateName
, edVersionLabel
-- * LaunchConfiguration
, LaunchConfiguration
, launchConfiguration
, lcName
-- * ApplicationVersionDescriptionMessage
, ApplicationVersionDescriptionMessage
, applicationVersionDescriptionMessage
, avdmApplicationVersion
-- * AutoScalingGroup
, AutoScalingGroup
, autoScalingGroup
, asgName
-- * ConfigurationDeploymentStatus
, ConfigurationDeploymentStatus (..)
-- * ConfigurationOptionSetting
, ConfigurationOptionSetting
, configurationOptionSetting
, cosNamespace
, cosOptionName
, cosResourceName
, cosValue
-- * ConfigurationOptionValueType
, ConfigurationOptionValueType (..)
-- * ConfigurationSettingsDescription
, ConfigurationSettingsDescription
, configurationSettingsDescription
, csdApplicationName
, csdDateCreated
, csdDateUpdated
, csdDeploymentStatus
, csdDescription
, csdEnvironmentName
, csdOptionSettings
, csdSolutionStackName
, csdTemplateName
-- * ApplicationVersionDescription
, ApplicationVersionDescription
, applicationVersionDescription
, avdApplicationName
, avdDateCreated
, avdDateUpdated
, avdDescription
, avdSourceBundle
, avdVersionLabel
-- * OptionSpecification
, OptionSpecification
, optionSpecification
, osNamespace
, osOptionName
, osResourceName
-- * EnvironmentResourceDescription
, EnvironmentResourceDescription
, environmentResourceDescription
, erdAutoScalingGroups
, erdEnvironmentName
, erdInstances
, erdLaunchConfigurations
, erdLoadBalancers
, erdQueues
, erdTriggers
-- * Queue
, Queue
, queue
, qName
, qURL
-- * EnvironmentStatus
, EnvironmentStatus (..)
-- * LoadBalancerDescription
, LoadBalancerDescription
, loadBalancerDescription
, lbdDomain
, lbdListeners
, lbdLoadBalancerName
-- * ApplicationDescriptionMessage
, ApplicationDescriptionMessage
, applicationDescriptionMessage
, admApplication
-- * EnvironmentTier
, EnvironmentTier
, environmentTier
, etName
, etType
, etVersion
-- * LoadBalancer
, LoadBalancer
, loadBalancer
, lbName
-- * EnvironmentResourcesDescription
, EnvironmentResourcesDescription
, environmentResourcesDescription
, erdLoadBalancer
-- * OptionRestrictionRegex
, OptionRestrictionRegex
, optionRestrictionRegex
, orrLabel
, orrPattern
-- * ConfigurationOptionDescription
, ConfigurationOptionDescription
, configurationOptionDescription
, codChangeSeverity
, codDefaultValue
, codMaxLength
, codMaxValue
, codMinValue
, codName
, codNamespace
, codRegex
, codUserDefined
, codValueOptions
, codValueType
-- * SourceConfiguration
, SourceConfiguration
, sourceConfiguration
, scApplicationName
, scTemplateName
-- * EnvironmentInfoDescription
, EnvironmentInfoDescription
, environmentInfoDescription
, eidEc2InstanceId
, eidInfoType
, eidMessage
, eidSampleTimestamp
-- * S3Location
, S3Location
, s3Location
, slS3Bucket
, slS3Key
-- * ValidationMessage
, ValidationMessage
, validationMessage
, vmMessage
, vmNamespace
, vmOptionName
, vmSeverity
-- * ValidationSeverity
, ValidationSeverity (..)
-- * Trigger
, Trigger
, trigger
, tName
-- * EnvironmentInfoType
, EnvironmentInfoType (..)
-- * EnvironmentDescription
, EnvironmentDescription
, environmentDescription
, ed1AbortableOperationInProgress
, ed1ApplicationName
, ed1CNAME
, ed1DateCreated
, ed1DateUpdated
, ed1Description
, ed1EndpointURL
, ed1EnvironmentId
, ed1EnvironmentName
, ed1Health
, ed1Resources
, ed1SolutionStackName
, ed1Status
, ed1TemplateName
, ed1Tier
, ed1VersionLabel
-- * Listener
, Listener
, listener
, lPort
, lProtocol
-- * EnvironmentHealth
, EnvironmentHealth (..)
-- * Instance
, Instance
, instance'
, iId
-- * SolutionStackDescription
, SolutionStackDescription
, solutionStackDescription
, ssdPermittedFileTypes
, ssdSolutionStackName
) where
import Network.AWS.Prelude
import Network.AWS.Signing
import qualified GHC.Exts
-- | Version @2010-12-01@ of the Amazon Elastic Beanstalk service.
data ElasticBeanstalk
instance AWSService ElasticBeanstalk where
type Sg ElasticBeanstalk = V4
type Er ElasticBeanstalk = RESTError
service = service'
where
service' :: Service ElasticBeanstalk
service' = Service
{ _svcAbbrev = "ElasticBeanstalk"
, _svcPrefix = "elasticbeanstalk"
, _svcVersion = "2010-12-01"
, _svcTargetPrefix = Nothing
, _svcJSONVersion = Nothing
, _svcHandle = handle
, _svcRetry = retry
}
handle :: Status
-> Maybe (LazyByteString -> ServiceError RESTError)
handle = restError statusSuccess service'
retry :: Retry ElasticBeanstalk
retry = Exponential
{ _retryBase = 0.05
, _retryGrowth = 2
, _retryAttempts = 5
, _retryCheck = check
}
check :: Status
-> RESTError
-> Bool
check (statusCode -> s) (awsErrorCode -> e)
| s == 400 && (Just "Throttling") == e = True -- Throttling
| s == 500 = True -- General Server Error
| s == 509 = True -- Limit Exceeded
| s == 503 = True -- Service Unavailable
| otherwise = False
ns :: Text
ns = "http://elasticbeanstalk.amazonaws.com/docs/2010-12-01/"
{-# INLINE ns #-}
data ApplicationDescription = ApplicationDescription
{ _adApplicationName :: Maybe Text
, _adConfigurationTemplates :: List "member" Text
, _adDateCreated :: Maybe ISO8601
, _adDateUpdated :: Maybe ISO8601
, _adDescription :: Maybe Text
, _adVersions :: List "member" Text
} deriving (Eq, Ord, Read, Show)
-- | 'ApplicationDescription' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'adApplicationName' @::@ 'Maybe' 'Text'
--
-- * 'adConfigurationTemplates' @::@ ['Text']
--
-- * 'adDateCreated' @::@ 'Maybe' 'UTCTime'
--
-- * 'adDateUpdated' @::@ 'Maybe' 'UTCTime'
--
-- * 'adDescription' @::@ 'Maybe' 'Text'
--
-- * 'adVersions' @::@ ['Text']
--
applicationDescription :: ApplicationDescription
applicationDescription = ApplicationDescription
{ _adApplicationName = Nothing
, _adDescription = Nothing
, _adDateCreated = Nothing
, _adDateUpdated = Nothing
, _adVersions = mempty
, _adConfigurationTemplates = mempty
}
-- | The name of the application.
adApplicationName :: Lens' ApplicationDescription (Maybe Text)
adApplicationName =
lens _adApplicationName (\s a -> s { _adApplicationName = a })
-- | The names of the configuration templates associated with this application.
adConfigurationTemplates :: Lens' ApplicationDescription [Text]
adConfigurationTemplates =
lens _adConfigurationTemplates
(\s a -> s { _adConfigurationTemplates = a })
. _List
-- | The date when the application was created.
adDateCreated :: Lens' ApplicationDescription (Maybe UTCTime)
adDateCreated = lens _adDateCreated (\s a -> s { _adDateCreated = a }) . mapping _Time
-- | The date when the application was last modified.
adDateUpdated :: Lens' ApplicationDescription (Maybe UTCTime)
adDateUpdated = lens _adDateUpdated (\s a -> s { _adDateUpdated = a }) . mapping _Time
-- | User-defined description of the application.
adDescription :: Lens' ApplicationDescription (Maybe Text)
adDescription = lens _adDescription (\s a -> s { _adDescription = a })
-- | The names of the versions for this application.
adVersions :: Lens' ApplicationDescription [Text]
adVersions = lens _adVersions (\s a -> s { _adVersions = a }) . _List
instance FromXML ApplicationDescription where
parseXML x = ApplicationDescription
<$> x .@? "ApplicationName"
<*> x .@? "ConfigurationTemplates" .!@ mempty
<*> x .@? "DateCreated"
<*> x .@? "DateUpdated"
<*> x .@? "Description"
<*> x .@? "Versions" .!@ mempty
instance ToQuery ApplicationDescription where
toQuery ApplicationDescription{..} = mconcat
[ "ApplicationName" =? _adApplicationName
, "ConfigurationTemplates" =? _adConfigurationTemplates
, "DateCreated" =? _adDateCreated
, "DateUpdated" =? _adDateUpdated
, "Description" =? _adDescription
, "Versions" =? _adVersions
]
data EventSeverity
= Debug -- ^ DEBUG
| Error -- ^ ERROR
| Fatal -- ^ FATAL
| Info -- ^ INFO
| Trace -- ^ TRACE
| Warn -- ^ WARN
deriving (Eq, Ord, Read, Show, Generic, Enum)
instance Hashable EventSeverity
instance FromText EventSeverity where
parser = takeLowerText >>= \case
"debug" -> pure Debug
"error" -> pure Error
"fatal" -> pure Fatal
"info" -> pure Info
"trace" -> pure Trace
"warn" -> pure Warn
e -> fail $
"Failure parsing EventSeverity from " ++ show e
instance ToText EventSeverity where
toText = \case
Debug -> "DEBUG"
Error -> "ERROR"
Fatal -> "FATAL"
Info -> "INFO"
Trace -> "TRACE"
Warn -> "WARN"
instance ToByteString EventSeverity
instance ToHeader EventSeverity
instance ToQuery EventSeverity
instance FromXML EventSeverity where
parseXML = parseXMLText "EventSeverity"
data Tag = Tag
{ _tagKey :: Maybe Text
, _tagValue :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'Tag' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'tagKey' @::@ 'Maybe' 'Text'
--
-- * 'tagValue' @::@ 'Maybe' 'Text'
--
tag :: Tag
tag = Tag
{ _tagKey = Nothing
, _tagValue = Nothing
}
-- | The key of the tag.
tagKey :: Lens' Tag (Maybe Text)
tagKey = lens _tagKey (\s a -> s { _tagKey = a })
-- | The value of the tag.
tagValue :: Lens' Tag (Maybe Text)
tagValue = lens _tagValue (\s a -> s { _tagValue = a })
instance FromXML Tag where
parseXML x = Tag
<$> x .@? "Key"
<*> x .@? "Value"
instance ToQuery Tag where
toQuery Tag{..} = mconcat
[ "Key" =? _tagKey
, "Value" =? _tagValue
]
data EventDescription = EventDescription
{ _edApplicationName :: Maybe Text
, _edEnvironmentName :: Maybe Text
, _edEventDate :: Maybe ISO8601
, _edMessage :: Maybe Text
, _edRequestId :: Maybe Text
, _edSeverity :: Maybe EventSeverity
, _edTemplateName :: Maybe Text
, _edVersionLabel :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'EventDescription' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'edApplicationName' @::@ 'Maybe' 'Text'
--
-- * 'edEnvironmentName' @::@ 'Maybe' 'Text'
--
-- * 'edEventDate' @::@ 'Maybe' 'UTCTime'
--
-- * 'edMessage' @::@ 'Maybe' 'Text'
--
-- * 'edRequestId' @::@ 'Maybe' 'Text'
--
-- * 'edSeverity' @::@ 'Maybe' 'EventSeverity'
--
-- * 'edTemplateName' @::@ 'Maybe' 'Text'
--
-- * 'edVersionLabel' @::@ 'Maybe' 'Text'
--
eventDescription :: EventDescription
eventDescription = EventDescription
{ _edEventDate = Nothing
, _edMessage = Nothing
, _edApplicationName = Nothing
, _edVersionLabel = Nothing
, _edTemplateName = Nothing
, _edEnvironmentName = Nothing
, _edRequestId = Nothing
, _edSeverity = Nothing
}
-- | The application associated with the event.
edApplicationName :: Lens' EventDescription (Maybe Text)
edApplicationName =
lens _edApplicationName (\s a -> s { _edApplicationName = a })
-- | The name of the environment associated with this event.
edEnvironmentName :: Lens' EventDescription (Maybe Text)
edEnvironmentName =
lens _edEnvironmentName (\s a -> s { _edEnvironmentName = a })
-- | The date when the event occurred.
edEventDate :: Lens' EventDescription (Maybe UTCTime)
edEventDate = lens _edEventDate (\s a -> s { _edEventDate = a }) . mapping _Time
-- | The event message.
edMessage :: Lens' EventDescription (Maybe Text)
edMessage = lens _edMessage (\s a -> s { _edMessage = a })
-- | The web service request ID for the activity of this event.
edRequestId :: Lens' EventDescription (Maybe Text)
edRequestId = lens _edRequestId (\s a -> s { _edRequestId = a })
-- | The severity level of this event.
edSeverity :: Lens' EventDescription (Maybe EventSeverity)
edSeverity = lens _edSeverity (\s a -> s { _edSeverity = a })
-- | The name of the configuration associated with this event.
edTemplateName :: Lens' EventDescription (Maybe Text)
edTemplateName = lens _edTemplateName (\s a -> s { _edTemplateName = a })
-- | The release label for the application version associated with this event.
edVersionLabel :: Lens' EventDescription (Maybe Text)
edVersionLabel = lens _edVersionLabel (\s a -> s { _edVersionLabel = a })
instance FromXML EventDescription where
parseXML x = EventDescription
<$> x .@? "ApplicationName"
<*> x .@? "EnvironmentName"
<*> x .@? "EventDate"
<*> x .@? "Message"
<*> x .@? "RequestId"
<*> x .@? "Severity"
<*> x .@? "TemplateName"
<*> x .@? "VersionLabel"
instance ToQuery EventDescription where
toQuery EventDescription{..} = mconcat
[ "ApplicationName" =? _edApplicationName
, "EnvironmentName" =? _edEnvironmentName
, "EventDate" =? _edEventDate
, "Message" =? _edMessage
, "RequestId" =? _edRequestId
, "Severity" =? _edSeverity
, "TemplateName" =? _edTemplateName
, "VersionLabel" =? _edVersionLabel
]
newtype LaunchConfiguration = LaunchConfiguration
{ _lcName :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'LaunchConfiguration' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lcName' @::@ 'Maybe' 'Text'
--
launchConfiguration :: LaunchConfiguration
launchConfiguration = LaunchConfiguration
{ _lcName = Nothing
}
-- | The name of the launch configuration.
lcName :: Lens' LaunchConfiguration (Maybe Text)
lcName = lens _lcName (\s a -> s { _lcName = a })
instance FromXML LaunchConfiguration where
parseXML x = LaunchConfiguration
<$> x .@? "Name"
instance ToQuery LaunchConfiguration where
toQuery LaunchConfiguration{..} = mconcat
[ "Name" =? _lcName
]
newtype ApplicationVersionDescriptionMessage = ApplicationVersionDescriptionMessage
{ _avdmApplicationVersion :: Maybe ApplicationVersionDescription
} deriving (Eq, Read, Show)
-- | 'ApplicationVersionDescriptionMessage' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'avdmApplicationVersion' @::@ 'Maybe' 'ApplicationVersionDescription'
--
applicationVersionDescriptionMessage :: ApplicationVersionDescriptionMessage
applicationVersionDescriptionMessage = ApplicationVersionDescriptionMessage
{ _avdmApplicationVersion = Nothing
}
-- | The 'ApplicationVersionDescription' of the application version.
avdmApplicationVersion :: Lens' ApplicationVersionDescriptionMessage (Maybe ApplicationVersionDescription)
avdmApplicationVersion =
lens _avdmApplicationVersion (\s a -> s { _avdmApplicationVersion = a })
instance FromXML ApplicationVersionDescriptionMessage where
parseXML x = ApplicationVersionDescriptionMessage
<$> x .@? "ApplicationVersion"
instance ToQuery ApplicationVersionDescriptionMessage where
toQuery ApplicationVersionDescriptionMessage{..} = mconcat
[ "ApplicationVersion" =? _avdmApplicationVersion
]
newtype AutoScalingGroup = AutoScalingGroup
{ _asgName :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'AutoScalingGroup' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'asgName' @::@ 'Maybe' 'Text'
--
autoScalingGroup :: AutoScalingGroup
autoScalingGroup = AutoScalingGroup
{ _asgName = Nothing
}
-- | The name of the 'AutoScalingGroup' .
asgName :: Lens' AutoScalingGroup (Maybe Text)
asgName = lens _asgName (\s a -> s { _asgName = a })
instance FromXML AutoScalingGroup where
parseXML x = AutoScalingGroup
<$> x .@? "Name"
instance ToQuery AutoScalingGroup where
toQuery AutoScalingGroup{..} = mconcat
[ "Name" =? _asgName
]
data ConfigurationDeploymentStatus
= Deployed -- ^ deployed
| Failed -- ^ failed
| Pending -- ^ pending
deriving (Eq, Ord, Read, Show, Generic, Enum)
instance Hashable ConfigurationDeploymentStatus
instance FromText ConfigurationDeploymentStatus where
parser = takeLowerText >>= \case
"deployed" -> pure Deployed
"failed" -> pure Failed
"pending" -> pure Pending
e -> fail $
"Failure parsing ConfigurationDeploymentStatus from " ++ show e
instance ToText ConfigurationDeploymentStatus where
toText = \case
Deployed -> "deployed"
Failed -> "failed"
Pending -> "pending"
instance ToByteString ConfigurationDeploymentStatus
instance ToHeader ConfigurationDeploymentStatus
instance ToQuery ConfigurationDeploymentStatus
instance FromXML ConfigurationDeploymentStatus where
parseXML = parseXMLText "ConfigurationDeploymentStatus"
data ConfigurationOptionSetting = ConfigurationOptionSetting
{ _cosNamespace :: Maybe Text
, _cosOptionName :: Maybe Text
, _cosResourceName :: Maybe Text
, _cosValue :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'ConfigurationOptionSetting' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cosNamespace' @::@ 'Maybe' 'Text'
--
-- * 'cosOptionName' @::@ 'Maybe' 'Text'
--
-- * 'cosResourceName' @::@ 'Maybe' 'Text'
--
-- * 'cosValue' @::@ 'Maybe' 'Text'
--
configurationOptionSetting :: ConfigurationOptionSetting
configurationOptionSetting = ConfigurationOptionSetting
{ _cosResourceName = Nothing
, _cosNamespace = Nothing
, _cosOptionName = Nothing
, _cosValue = Nothing
}
-- | A unique namespace identifying the option's associated AWS resource.
cosNamespace :: Lens' ConfigurationOptionSetting (Maybe Text)
cosNamespace = lens _cosNamespace (\s a -> s { _cosNamespace = a })
-- | The name of the configuration option.
cosOptionName :: Lens' ConfigurationOptionSetting (Maybe Text)
cosOptionName = lens _cosOptionName (\s a -> s { _cosOptionName = a })
-- | A unique resource name for a time-based scaling configuration option.
cosResourceName :: Lens' ConfigurationOptionSetting (Maybe Text)
cosResourceName = lens _cosResourceName (\s a -> s { _cosResourceName = a })
-- | The current value for the configuration option.
cosValue :: Lens' ConfigurationOptionSetting (Maybe Text)
cosValue = lens _cosValue (\s a -> s { _cosValue = a })
instance FromXML ConfigurationOptionSetting where
parseXML x = ConfigurationOptionSetting
<$> x .@? "Namespace"
<*> x .@? "OptionName"
<*> x .@? "ResourceName"
<*> x .@? "Value"
instance ToQuery ConfigurationOptionSetting where
toQuery ConfigurationOptionSetting{..} = mconcat
[ "Namespace" =? _cosNamespace
, "OptionName" =? _cosOptionName
, "ResourceName" =? _cosResourceName
, "Value" =? _cosValue
]
data ConfigurationOptionValueType
= List' -- ^ List
| Scalar -- ^ Scalar
deriving (Eq, Ord, Read, Show, Generic, Enum)
instance Hashable ConfigurationOptionValueType
instance FromText ConfigurationOptionValueType where
parser = takeLowerText >>= \case
"list" -> pure List'
"scalar" -> pure Scalar
e -> fail $
"Failure parsing ConfigurationOptionValueType from " ++ show e
instance ToText ConfigurationOptionValueType where
toText = \case
List' -> "List"
Scalar -> "Scalar"
instance ToByteString ConfigurationOptionValueType
instance ToHeader ConfigurationOptionValueType
instance ToQuery ConfigurationOptionValueType
instance FromXML ConfigurationOptionValueType where
parseXML = parseXMLText "ConfigurationOptionValueType"
data ConfigurationSettingsDescription = ConfigurationSettingsDescription
{ _csdApplicationName :: Maybe Text
, _csdDateCreated :: Maybe ISO8601
, _csdDateUpdated :: Maybe ISO8601
, _csdDeploymentStatus :: Maybe ConfigurationDeploymentStatus
, _csdDescription :: Maybe Text
, _csdEnvironmentName :: Maybe Text
, _csdOptionSettings :: List "member" ConfigurationOptionSetting
, _csdSolutionStackName :: Maybe Text
, _csdTemplateName :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'ConfigurationSettingsDescription' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'csdApplicationName' @::@ 'Maybe' 'Text'
--
-- * 'csdDateCreated' @::@ 'Maybe' 'UTCTime'
--
-- * 'csdDateUpdated' @::@ 'Maybe' 'UTCTime'
--
-- * 'csdDeploymentStatus' @::@ 'Maybe' 'ConfigurationDeploymentStatus'
--
-- * 'csdDescription' @::@ 'Maybe' 'Text'
--
-- * 'csdEnvironmentName' @::@ 'Maybe' 'Text'
--
-- * 'csdOptionSettings' @::@ ['ConfigurationOptionSetting']
--
-- * 'csdSolutionStackName' @::@ 'Maybe' 'Text'
--
-- * 'csdTemplateName' @::@ 'Maybe' 'Text'
--
configurationSettingsDescription :: ConfigurationSettingsDescription
configurationSettingsDescription = ConfigurationSettingsDescription
{ _csdSolutionStackName = Nothing
, _csdApplicationName = Nothing
, _csdTemplateName = Nothing
, _csdDescription = Nothing
, _csdEnvironmentName = Nothing
, _csdDeploymentStatus = Nothing
, _csdDateCreated = Nothing
, _csdDateUpdated = Nothing
, _csdOptionSettings = mempty
}
-- | The name of the application associated with this configuration set.
csdApplicationName :: Lens' ConfigurationSettingsDescription (Maybe Text)
csdApplicationName =
lens _csdApplicationName (\s a -> s { _csdApplicationName = a })
-- | The date (in UTC time) when this configuration set was created.
csdDateCreated :: Lens' ConfigurationSettingsDescription (Maybe UTCTime)
csdDateCreated = lens _csdDateCreated (\s a -> s { _csdDateCreated = a }) . mapping _Time
-- | The date (in UTC time) when this configuration set was last modified.
csdDateUpdated :: Lens' ConfigurationSettingsDescription (Maybe UTCTime)
csdDateUpdated = lens _csdDateUpdated (\s a -> s { _csdDateUpdated = a }) . mapping _Time
-- | If this configuration set is associated with an environment, the 'DeploymentStatus' parameter indicates the deployment status of this configuration set:
--
-- 'null': This configuration is not associated with a running environment.
--
-- 'pending': This is a draft configuration that is not deployed to the
-- associated environment but is in the process of deploying.
--
-- 'deployed': This is the configuration that is currently deployed to the
-- associated running environment.
--
-- 'failed': This is a draft configuration, that failed to successfully
-- deploy.
--
-- 'null': This configuration is not associated with a running environment. 'pending': This is a draft configuration that is not deployed to the associated
-- environment but is in the process of deploying. 'deployed': This is the
-- configuration that is currently deployed to the associated running
-- environment. 'failed': This is a draft configuration that failed to
-- successfully deploy.
csdDeploymentStatus :: Lens' ConfigurationSettingsDescription (Maybe ConfigurationDeploymentStatus)
csdDeploymentStatus =
lens _csdDeploymentStatus (\s a -> s { _csdDeploymentStatus = a })
-- | Describes this configuration set.
csdDescription :: Lens' ConfigurationSettingsDescription (Maybe Text)
csdDescription = lens _csdDescription (\s a -> s { _csdDescription = a })
-- | If not 'null', the name of the environment for this configuration set.
csdEnvironmentName :: Lens' ConfigurationSettingsDescription (Maybe Text)
csdEnvironmentName =
lens _csdEnvironmentName (\s a -> s { _csdEnvironmentName = a })
-- | A list of the configuration options and their values in this configuration
-- set.
csdOptionSettings :: Lens' ConfigurationSettingsDescription [ConfigurationOptionSetting]
csdOptionSettings =
lens _csdOptionSettings (\s a -> s { _csdOptionSettings = a })
. _List
-- | The name of the solution stack this configuration set uses.
csdSolutionStackName :: Lens' ConfigurationSettingsDescription (Maybe Text)
csdSolutionStackName =
lens _csdSolutionStackName (\s a -> s { _csdSolutionStackName = a })
-- | If not 'null', the name of the configuration template for this configuration
-- set.
csdTemplateName :: Lens' ConfigurationSettingsDescription (Maybe Text)
csdTemplateName = lens _csdTemplateName (\s a -> s { _csdTemplateName = a })
instance FromXML ConfigurationSettingsDescription where
parseXML x = ConfigurationSettingsDescription
<$> x .@? "ApplicationName"
<*> x .@? "DateCreated"
<*> x .@? "DateUpdated"
<*> x .@? "DeploymentStatus"
<*> x .@? "Description"
<*> x .@? "EnvironmentName"
<*> x .@? "OptionSettings" .!@ mempty
<*> x .@? "SolutionStackName"
<*> x .@? "TemplateName"
instance ToQuery ConfigurationSettingsDescription where
toQuery ConfigurationSettingsDescription{..} = mconcat
[ "ApplicationName" =? _csdApplicationName
, "DateCreated" =? _csdDateCreated
, "DateUpdated" =? _csdDateUpdated
, "DeploymentStatus" =? _csdDeploymentStatus
, "Description" =? _csdDescription
, "EnvironmentName" =? _csdEnvironmentName
, "OptionSettings" =? _csdOptionSettings
, "SolutionStackName" =? _csdSolutionStackName
, "TemplateName" =? _csdTemplateName
]
data ApplicationVersionDescription = ApplicationVersionDescription
{ _avdApplicationName :: Maybe Text
, _avdDateCreated :: Maybe ISO8601
, _avdDateUpdated :: Maybe ISO8601
, _avdDescription :: Maybe Text
, _avdSourceBundle :: Maybe S3Location
, _avdVersionLabel :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'ApplicationVersionDescription' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'avdApplicationName' @::@ 'Maybe' 'Text'
--
-- * 'avdDateCreated' @::@ 'Maybe' 'UTCTime'
--
-- * 'avdDateUpdated' @::@ 'Maybe' 'UTCTime'
--
-- * 'avdDescription' @::@ 'Maybe' 'Text'
--
-- * 'avdSourceBundle' @::@ 'Maybe' 'S3Location'
--
-- * 'avdVersionLabel' @::@ 'Maybe' 'Text'
--
applicationVersionDescription :: ApplicationVersionDescription
applicationVersionDescription = ApplicationVersionDescription
{ _avdApplicationName = Nothing
, _avdDescription = Nothing
, _avdVersionLabel = Nothing
, _avdSourceBundle = Nothing
, _avdDateCreated = Nothing
, _avdDateUpdated = Nothing
}
-- | The name of the application associated with this release.
avdApplicationName :: Lens' ApplicationVersionDescription (Maybe Text)
avdApplicationName =
lens _avdApplicationName (\s a -> s { _avdApplicationName = a })
-- | The creation date of the application version.
avdDateCreated :: Lens' ApplicationVersionDescription (Maybe UTCTime)
avdDateCreated = lens _avdDateCreated (\s a -> s { _avdDateCreated = a }) . mapping _Time
-- | The last modified date of the application version.
avdDateUpdated :: Lens' ApplicationVersionDescription (Maybe UTCTime)
avdDateUpdated = lens _avdDateUpdated (\s a -> s { _avdDateUpdated = a }) . mapping _Time
-- | The description of this application version.
avdDescription :: Lens' ApplicationVersionDescription (Maybe Text)
avdDescription = lens _avdDescription (\s a -> s { _avdDescription = a })
-- | The location where the source bundle is located for this version.
avdSourceBundle :: Lens' ApplicationVersionDescription (Maybe S3Location)
avdSourceBundle = lens _avdSourceBundle (\s a -> s { _avdSourceBundle = a })
-- | A label uniquely identifying the version for the associated application.
avdVersionLabel :: Lens' ApplicationVersionDescription (Maybe Text)
avdVersionLabel = lens _avdVersionLabel (\s a -> s { _avdVersionLabel = a })
instance FromXML ApplicationVersionDescription where
parseXML x = ApplicationVersionDescription
<$> x .@? "ApplicationName"
<*> x .@? "DateCreated"
<*> x .@? "DateUpdated"
<*> x .@? "Description"
<*> x .@? "SourceBundle"
<*> x .@? "VersionLabel"
instance ToQuery ApplicationVersionDescription where
toQuery ApplicationVersionDescription{..} = mconcat
[ "ApplicationName" =? _avdApplicationName
, "DateCreated" =? _avdDateCreated
, "DateUpdated" =? _avdDateUpdated
, "Description" =? _avdDescription
, "SourceBundle" =? _avdSourceBundle
, "VersionLabel" =? _avdVersionLabel
]
data OptionSpecification = OptionSpecification
{ _osNamespace :: Maybe Text
, _osOptionName :: Maybe Text
, _osResourceName :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'OptionSpecification' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'osNamespace' @::@ 'Maybe' 'Text'
--
-- * 'osOptionName' @::@ 'Maybe' 'Text'
--
-- * 'osResourceName' @::@ 'Maybe' 'Text'
--
optionSpecification :: OptionSpecification
optionSpecification = OptionSpecification
{ _osResourceName = Nothing
, _osNamespace = Nothing
, _osOptionName = Nothing
}
-- | A unique namespace identifying the option's associated AWS resource.
osNamespace :: Lens' OptionSpecification (Maybe Text)
osNamespace = lens _osNamespace (\s a -> s { _osNamespace = a })
-- | The name of the configuration option.
osOptionName :: Lens' OptionSpecification (Maybe Text)
osOptionName = lens _osOptionName (\s a -> s { _osOptionName = a })
-- | A unique resource name for a time-based scaling configuration option.
osResourceName :: Lens' OptionSpecification (Maybe Text)
osResourceName = lens _osResourceName (\s a -> s { _osResourceName = a })
instance FromXML OptionSpecification where
parseXML x = OptionSpecification
<$> x .@? "Namespace"
<*> x .@? "OptionName"
<*> x .@? "ResourceName"
instance ToQuery OptionSpecification where
toQuery OptionSpecification{..} = mconcat
[ "Namespace" =? _osNamespace
, "OptionName" =? _osOptionName
, "ResourceName" =? _osResourceName
]
data EnvironmentResourceDescription = EnvironmentResourceDescription
{ _erdAutoScalingGroups :: List "member" AutoScalingGroup
, _erdEnvironmentName :: Maybe Text
, _erdInstances :: List "member" Instance
, _erdLaunchConfigurations :: List "member" LaunchConfiguration
, _erdLoadBalancers :: List "member" LoadBalancer
, _erdQueues :: List "member" Queue
, _erdTriggers :: List "member" Trigger
} deriving (Eq, Read, Show)
-- | 'EnvironmentResourceDescription' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'erdAutoScalingGroups' @::@ ['AutoScalingGroup']
--
-- * 'erdEnvironmentName' @::@ 'Maybe' 'Text'
--
-- * 'erdInstances' @::@ ['Instance']
--
-- * 'erdLaunchConfigurations' @::@ ['LaunchConfiguration']
--
-- * 'erdLoadBalancers' @::@ ['LoadBalancer']
--
-- * 'erdQueues' @::@ ['Queue']
--
-- * 'erdTriggers' @::@ ['Trigger']
--
environmentResourceDescription :: EnvironmentResourceDescription
environmentResourceDescription = EnvironmentResourceDescription
{ _erdEnvironmentName = Nothing
, _erdAutoScalingGroups = mempty
, _erdInstances = mempty
, _erdLaunchConfigurations = mempty
, _erdLoadBalancers = mempty
, _erdTriggers = mempty
, _erdQueues = mempty
}
-- | The 'AutoScalingGroups' used by this environment.
erdAutoScalingGroups :: Lens' EnvironmentResourceDescription [AutoScalingGroup]
erdAutoScalingGroups =
lens _erdAutoScalingGroups (\s a -> s { _erdAutoScalingGroups = a })
. _List
-- | The name of the environment.
erdEnvironmentName :: Lens' EnvironmentResourceDescription (Maybe Text)
erdEnvironmentName =
lens _erdEnvironmentName (\s a -> s { _erdEnvironmentName = a })
-- | The Amazon EC2 instances used by this environment.
erdInstances :: Lens' EnvironmentResourceDescription [Instance]
erdInstances = lens _erdInstances (\s a -> s { _erdInstances = a }) . _List
-- | The Auto Scaling launch configurations in use by this environment.
erdLaunchConfigurations :: Lens' EnvironmentResourceDescription [LaunchConfiguration]
erdLaunchConfigurations =
lens _erdLaunchConfigurations (\s a -> s { _erdLaunchConfigurations = a })
. _List
-- | The LoadBalancers in use by this environment.
erdLoadBalancers :: Lens' EnvironmentResourceDescription [LoadBalancer]
erdLoadBalancers = lens _erdLoadBalancers (\s a -> s { _erdLoadBalancers = a }) . _List
-- | The queues used by this environment.
erdQueues :: Lens' EnvironmentResourceDescription [Queue]
erdQueues = lens _erdQueues (\s a -> s { _erdQueues = a }) . _List
-- | The 'AutoScaling' triggers in use by this environment.
erdTriggers :: Lens' EnvironmentResourceDescription [Trigger]
erdTriggers = lens _erdTriggers (\s a -> s { _erdTriggers = a }) . _List
instance FromXML EnvironmentResourceDescription where
parseXML x = EnvironmentResourceDescription
<$> x .@? "AutoScalingGroups" .!@ mempty
<*> x .@? "EnvironmentName"
<*> x .@? "Instances" .!@ mempty
<*> x .@? "LaunchConfigurations" .!@ mempty
<*> x .@? "LoadBalancers" .!@ mempty
<*> x .@? "Queues" .!@ mempty
<*> x .@? "Triggers" .!@ mempty
instance ToQuery EnvironmentResourceDescription where
toQuery EnvironmentResourceDescription{..} = mconcat
[ "AutoScalingGroups" =? _erdAutoScalingGroups
, "EnvironmentName" =? _erdEnvironmentName
, "Instances" =? _erdInstances
, "LaunchConfigurations" =? _erdLaunchConfigurations
, "LoadBalancers" =? _erdLoadBalancers
, "Queues" =? _erdQueues
, "Triggers" =? _erdTriggers
]
data Queue = Queue
{ _qName :: Maybe Text
, _qURL :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'Queue' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'qName' @::@ 'Maybe' 'Text'
--
-- * 'qURL' @::@ 'Maybe' 'Text'
--
queue :: Queue
queue = Queue
{ _qName = Nothing
, _qURL = Nothing
}
-- | The name of the queue.
qName :: Lens' Queue (Maybe Text)
qName = lens _qName (\s a -> s { _qName = a })
-- | The URL of the queue.
qURL :: Lens' Queue (Maybe Text)
qURL = lens _qURL (\s a -> s { _qURL = a })
instance FromXML Queue where
parseXML x = Queue
<$> x .@? "Name"
<*> x .@? "URL"
instance ToQuery Queue where
toQuery Queue{..} = mconcat
[ "Name" =? _qName
, "URL" =? _qURL
]
data EnvironmentStatus
= Launching -- ^ Launching
| Ready -- ^ Ready
| Terminated -- ^ Terminated
| Terminating -- ^ Terminating
| Updating -- ^ Updating
deriving (Eq, Ord, Read, Show, Generic, Enum)
instance Hashable EnvironmentStatus
instance FromText EnvironmentStatus where
parser = takeLowerText >>= \case
"launching" -> pure Launching
"ready" -> pure Ready
"terminated" -> pure Terminated
"terminating" -> pure Terminating
"updating" -> pure Updating
e -> fail $
"Failure parsing EnvironmentStatus from " ++ show e
instance ToText EnvironmentStatus where
toText = \case
Launching -> "Launching"
Ready -> "Ready"
Terminated -> "Terminated"
Terminating -> "Terminating"
Updating -> "Updating"
instance ToByteString EnvironmentStatus
instance ToHeader EnvironmentStatus
instance ToQuery EnvironmentStatus
instance FromXML EnvironmentStatus where
parseXML = parseXMLText "EnvironmentStatus"
data LoadBalancerDescription = LoadBalancerDescription
{ _lbdDomain :: Maybe Text
, _lbdListeners :: List "member" Listener
, _lbdLoadBalancerName :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'LoadBalancerDescription' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lbdDomain' @::@ 'Maybe' 'Text'
--
-- * 'lbdListeners' @::@ ['Listener']
--
-- * 'lbdLoadBalancerName' @::@ 'Maybe' 'Text'
--
loadBalancerDescription :: LoadBalancerDescription
loadBalancerDescription = LoadBalancerDescription
{ _lbdLoadBalancerName = Nothing
, _lbdDomain = Nothing
, _lbdListeners = mempty
}
-- | The domain name of the LoadBalancer.
lbdDomain :: Lens' LoadBalancerDescription (Maybe Text)
lbdDomain = lens _lbdDomain (\s a -> s { _lbdDomain = a })
-- | A list of Listeners used by the LoadBalancer.
lbdListeners :: Lens' LoadBalancerDescription [Listener]
lbdListeners = lens _lbdListeners (\s a -> s { _lbdListeners = a }) . _List
-- | The name of the LoadBalancer.
lbdLoadBalancerName :: Lens' LoadBalancerDescription (Maybe Text)
lbdLoadBalancerName =
lens _lbdLoadBalancerName (\s a -> s { _lbdLoadBalancerName = a })
instance FromXML LoadBalancerDescription where
parseXML x = LoadBalancerDescription
<$> x .@? "Domain"
<*> x .@? "Listeners" .!@ mempty
<*> x .@? "LoadBalancerName"
instance ToQuery LoadBalancerDescription where
toQuery LoadBalancerDescription{..} = mconcat
[ "Domain" =? _lbdDomain
, "Listeners" =? _lbdListeners
, "LoadBalancerName" =? _lbdLoadBalancerName
]
newtype ApplicationDescriptionMessage = ApplicationDescriptionMessage
{ _admApplication :: Maybe ApplicationDescription
} deriving (Eq, Read, Show)
-- | 'ApplicationDescriptionMessage' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'admApplication' @::@ 'Maybe' 'ApplicationDescription'
--
applicationDescriptionMessage :: ApplicationDescriptionMessage
applicationDescriptionMessage = ApplicationDescriptionMessage
{ _admApplication = Nothing
}
-- | The 'ApplicationDescription' of the application.
admApplication :: Lens' ApplicationDescriptionMessage (Maybe ApplicationDescription)
admApplication = lens _admApplication (\s a -> s { _admApplication = a })
instance FromXML ApplicationDescriptionMessage where
parseXML x = ApplicationDescriptionMessage
<$> x .@? "Application"
instance ToQuery ApplicationDescriptionMessage where
toQuery ApplicationDescriptionMessage{..} = mconcat
[ "Application" =? _admApplication
]
data EnvironmentTier = EnvironmentTier
{ _etName :: Maybe Text
, _etType :: Maybe Text
, _etVersion :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'EnvironmentTier' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'etName' @::@ 'Maybe' 'Text'
--
-- * 'etType' @::@ 'Maybe' 'Text'
--
-- * 'etVersion' @::@ 'Maybe' 'Text'
--
environmentTier :: EnvironmentTier
environmentTier = EnvironmentTier
{ _etName = Nothing
, _etType = Nothing
, _etVersion = Nothing
}
-- | The name of this environment tier.
etName :: Lens' EnvironmentTier (Maybe Text)
etName = lens _etName (\s a -> s { _etName = a })
-- | The type of this environment tier.
etType :: Lens' EnvironmentTier (Maybe Text)
etType = lens _etType (\s a -> s { _etType = a })
-- | The version of this environment tier.
etVersion :: Lens' EnvironmentTier (Maybe Text)
etVersion = lens _etVersion (\s a -> s { _etVersion = a })
instance FromXML EnvironmentTier where
parseXML x = EnvironmentTier
<$> x .@? "Name"
<*> x .@? "Type"
<*> x .@? "Version"
instance ToQuery EnvironmentTier where
toQuery EnvironmentTier{..} = mconcat
[ "Name" =? _etName
, "Type" =? _etType
, "Version" =? _etVersion
]
newtype LoadBalancer = LoadBalancer
{ _lbName :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'LoadBalancer' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lbName' @::@ 'Maybe' 'Text'
--
loadBalancer :: LoadBalancer
loadBalancer = LoadBalancer
{ _lbName = Nothing
}
-- | The name of the LoadBalancer.
lbName :: Lens' LoadBalancer (Maybe Text)
lbName = lens _lbName (\s a -> s { _lbName = a })
instance FromXML LoadBalancer where
parseXML x = LoadBalancer
<$> x .@? "Name"
instance ToQuery LoadBalancer where
toQuery LoadBalancer{..} = mconcat
[ "Name" =? _lbName
]
newtype EnvironmentResourcesDescription = EnvironmentResourcesDescription
{ _erdLoadBalancer :: Maybe LoadBalancerDescription
} deriving (Eq, Read, Show)
-- | 'EnvironmentResourcesDescription' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'erdLoadBalancer' @::@ 'Maybe' 'LoadBalancerDescription'
--
environmentResourcesDescription :: EnvironmentResourcesDescription
environmentResourcesDescription = EnvironmentResourcesDescription
{ _erdLoadBalancer = Nothing
}
-- | Describes the LoadBalancer.
erdLoadBalancer :: Lens' EnvironmentResourcesDescription (Maybe LoadBalancerDescription)
erdLoadBalancer = lens _erdLoadBalancer (\s a -> s { _erdLoadBalancer = a })
instance FromXML EnvironmentResourcesDescription where
parseXML x = EnvironmentResourcesDescription
<$> x .@? "LoadBalancer"
instance ToQuery EnvironmentResourcesDescription where
toQuery EnvironmentResourcesDescription{..} = mconcat
[ "LoadBalancer" =? _erdLoadBalancer
]
data OptionRestrictionRegex = OptionRestrictionRegex
{ _orrLabel :: Maybe Text
, _orrPattern :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'OptionRestrictionRegex' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'orrLabel' @::@ 'Maybe' 'Text'
--
-- * 'orrPattern' @::@ 'Maybe' 'Text'
--
optionRestrictionRegex :: OptionRestrictionRegex
optionRestrictionRegex = OptionRestrictionRegex
{ _orrPattern = Nothing
, _orrLabel = Nothing
}
-- | A unique name representing this regular expression.
orrLabel :: Lens' OptionRestrictionRegex (Maybe Text)
orrLabel = lens _orrLabel (\s a -> s { _orrLabel = a })
-- | The regular expression pattern that a string configuration option value with
-- this restriction must match.
orrPattern :: Lens' OptionRestrictionRegex (Maybe Text)
orrPattern = lens _orrPattern (\s a -> s { _orrPattern = a })
instance FromXML OptionRestrictionRegex where
parseXML x = OptionRestrictionRegex
<$> x .@? "Label"
<*> x .@? "Pattern"
instance ToQuery OptionRestrictionRegex where
toQuery OptionRestrictionRegex{..} = mconcat
[ "Label" =? _orrLabel
, "Pattern" =? _orrPattern
]
data ConfigurationOptionDescription = ConfigurationOptionDescription
{ _codChangeSeverity :: Maybe Text
, _codDefaultValue :: Maybe Text
, _codMaxLength :: Maybe Int
, _codMaxValue :: Maybe Int
, _codMinValue :: Maybe Int
, _codName :: Maybe Text
, _codNamespace :: Maybe Text
, _codRegex :: Maybe OptionRestrictionRegex
, _codUserDefined :: Maybe Bool
, _codValueOptions :: List "member" Text
, _codValueType :: Maybe ConfigurationOptionValueType
} deriving (Eq, Read, Show)
-- | 'ConfigurationOptionDescription' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'codChangeSeverity' @::@ 'Maybe' 'Text'
--
-- * 'codDefaultValue' @::@ 'Maybe' 'Text'
--
-- * 'codMaxLength' @::@ 'Maybe' 'Int'
--
-- * 'codMaxValue' @::@ 'Maybe' 'Int'
--
-- * 'codMinValue' @::@ 'Maybe' 'Int'
--
-- * 'codName' @::@ 'Maybe' 'Text'
--
-- * 'codNamespace' @::@ 'Maybe' 'Text'
--
-- * 'codRegex' @::@ 'Maybe' 'OptionRestrictionRegex'
--
-- * 'codUserDefined' @::@ 'Maybe' 'Bool'
--
-- * 'codValueOptions' @::@ ['Text']
--
-- * 'codValueType' @::@ 'Maybe' 'ConfigurationOptionValueType'
--
configurationOptionDescription :: ConfigurationOptionDescription
configurationOptionDescription = ConfigurationOptionDescription
{ _codNamespace = Nothing
, _codName = Nothing
, _codDefaultValue = Nothing
, _codChangeSeverity = Nothing
, _codUserDefined = Nothing
, _codValueType = Nothing
, _codValueOptions = mempty
, _codMinValue = Nothing
, _codMaxValue = Nothing
, _codMaxLength = Nothing
, _codRegex = Nothing
}
-- | An indication of which action is required if the value for this
-- configuration option changes:
--
-- NoInterruption - There is no interruption to the environment or
-- application availability.
--
-- RestartEnvironment - The environment is restarted, all AWS resources are
-- deleted and recreated, and the environment is unavailable during the process.
--
-- RestartApplicationServer - The environment is available the entire time.
-- However, a short application outage occurs when the application servers on
-- the running Amazon EC2 instances are restarted.
--
-- 'NoInterruption' : There is no interruption to the environment or
-- application availability. 'RestartEnvironment' : The environment is entirely
-- restarted, all AWS resources are deleted and recreated, and the environment
-- is unavailable during the process. 'RestartApplicationServer' : The
-- environment is available the entire time. However, a short application outage
-- occurs when the application servers on the running Amazon EC2 instances are
-- restarted.
codChangeSeverity :: Lens' ConfigurationOptionDescription (Maybe Text)
codChangeSeverity =
lens _codChangeSeverity (\s a -> s { _codChangeSeverity = a })
-- | The default value for this configuration option.
codDefaultValue :: Lens' ConfigurationOptionDescription (Maybe Text)
codDefaultValue = lens _codDefaultValue (\s a -> s { _codDefaultValue = a })
-- | If specified, the configuration option must be a string value no longer than
-- this value.
codMaxLength :: Lens' ConfigurationOptionDescription (Maybe Int)
codMaxLength = lens _codMaxLength (\s a -> s { _codMaxLength = a })
-- | If specified, the configuration option must be a numeric value less than
-- this value.
codMaxValue :: Lens' ConfigurationOptionDescription (Maybe Int)
codMaxValue = lens _codMaxValue (\s a -> s { _codMaxValue = a })
-- | If specified, the configuration option must be a numeric value greater than
-- this value.
codMinValue :: Lens' ConfigurationOptionDescription (Maybe Int)
codMinValue = lens _codMinValue (\s a -> s { _codMinValue = a })
-- | The name of the configuration option.
codName :: Lens' ConfigurationOptionDescription (Maybe Text)
codName = lens _codName (\s a -> s { _codName = a })
-- | A unique namespace identifying the option's associated AWS resource.
codNamespace :: Lens' ConfigurationOptionDescription (Maybe Text)
codNamespace = lens _codNamespace (\s a -> s { _codNamespace = a })
-- | If specified, the configuration option must be a string value that satisfies
-- this regular expression.
codRegex :: Lens' ConfigurationOptionDescription (Maybe OptionRestrictionRegex)
codRegex = lens _codRegex (\s a -> s { _codRegex = a })
-- | An indication of whether the user defined this configuration option:
--
-- 'true' : This configuration option was defined by the user. It is a valid
-- choice for specifying this as an Option to Remove when updating configuration
-- settings.
--
-- 'false' : This configuration was not defined by the user.
--
-- 'true' : This configuration option was defined by the user. It is a valid
-- choice for specifying if this as an 'Option to Remove' when updating
-- configuration settings.
--
-- 'false' : This configuration was not defined by the user. Constraint: You
-- can remove only 'UserDefined' options from a configuration.
--
-- Valid Values: 'true' | 'false'
codUserDefined :: Lens' ConfigurationOptionDescription (Maybe Bool)
codUserDefined = lens _codUserDefined (\s a -> s { _codUserDefined = a })
-- | If specified, values for the configuration option are selected from this
-- list.
codValueOptions :: Lens' ConfigurationOptionDescription [Text]
codValueOptions = lens _codValueOptions (\s a -> s { _codValueOptions = a }) . _List
-- | An indication of which type of values this option has and whether it is
-- allowable to select one or more than one of the possible values:
--
-- 'Scalar' : Values for this option are a single selection from the possible
-- values, or a unformatted string or numeric value governed by the
-- MIN/MAX/Regex constraints:
--
-- 'List' : Values for this option are multiple selections of the possible
-- values.
--
-- 'Boolean' : Values for this option are either 'true' or 'false' .
--
-- 'Scalar' : Values for this option are a single selection from the
-- possible values, or an unformatted string, or numeric value governed by the 'MIN/MAX/Regex' constraints. 'List' : Values for this option are multiple selections from
-- the possible values. 'Boolean' : Values for this option are either 'true' or 'false' .
codValueType :: Lens' ConfigurationOptionDescription (Maybe ConfigurationOptionValueType)
codValueType = lens _codValueType (\s a -> s { _codValueType = a })
instance FromXML ConfigurationOptionDescription where
parseXML x = ConfigurationOptionDescription
<$> x .@? "ChangeSeverity"
<*> x .@? "DefaultValue"
<*> x .@? "MaxLength"
<*> x .@? "MaxValue"
<*> x .@? "MinValue"
<*> x .@? "Name"
<*> x .@? "Namespace"
<*> x .@? "Regex"
<*> x .@? "UserDefined"
<*> x .@? "ValueOptions" .!@ mempty
<*> x .@? "ValueType"
instance ToQuery ConfigurationOptionDescription where
toQuery ConfigurationOptionDescription{..} = mconcat
[ "ChangeSeverity" =? _codChangeSeverity
, "DefaultValue" =? _codDefaultValue
, "MaxLength" =? _codMaxLength
, "MaxValue" =? _codMaxValue
, "MinValue" =? _codMinValue
, "Name" =? _codName
, "Namespace" =? _codNamespace
, "Regex" =? _codRegex
, "UserDefined" =? _codUserDefined
, "ValueOptions" =? _codValueOptions
, "ValueType" =? _codValueType
]
data SourceConfiguration = SourceConfiguration
{ _scApplicationName :: Maybe Text
, _scTemplateName :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'SourceConfiguration' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'scApplicationName' @::@ 'Maybe' 'Text'
--
-- * 'scTemplateName' @::@ 'Maybe' 'Text'
--
sourceConfiguration :: SourceConfiguration
sourceConfiguration = SourceConfiguration
{ _scApplicationName = Nothing
, _scTemplateName = Nothing
}
-- | The name of the application associated with the configuration.
scApplicationName :: Lens' SourceConfiguration (Maybe Text)
scApplicationName =
lens _scApplicationName (\s a -> s { _scApplicationName = a })
-- | The name of the configuration template.
scTemplateName :: Lens' SourceConfiguration (Maybe Text)
scTemplateName = lens _scTemplateName (\s a -> s { _scTemplateName = a })
instance FromXML SourceConfiguration where
parseXML x = SourceConfiguration
<$> x .@? "ApplicationName"
<*> x .@? "TemplateName"
instance ToQuery SourceConfiguration where
toQuery SourceConfiguration{..} = mconcat
[ "ApplicationName" =? _scApplicationName
, "TemplateName" =? _scTemplateName
]
data EnvironmentInfoDescription = EnvironmentInfoDescription
{ _eidEc2InstanceId :: Maybe Text
, _eidInfoType :: Maybe EnvironmentInfoType
, _eidMessage :: Maybe Text
, _eidSampleTimestamp :: Maybe ISO8601
} deriving (Eq, Read, Show)
-- | 'EnvironmentInfoDescription' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'eidEc2InstanceId' @::@ 'Maybe' 'Text'
--
-- * 'eidInfoType' @::@ 'Maybe' 'EnvironmentInfoType'
--
-- * 'eidMessage' @::@ 'Maybe' 'Text'
--
-- * 'eidSampleTimestamp' @::@ 'Maybe' 'UTCTime'
--
environmentInfoDescription :: EnvironmentInfoDescription
environmentInfoDescription = EnvironmentInfoDescription
{ _eidInfoType = Nothing
, _eidEc2InstanceId = Nothing
, _eidSampleTimestamp = Nothing
, _eidMessage = Nothing
}
-- | The Amazon EC2 Instance ID for this information.
eidEc2InstanceId :: Lens' EnvironmentInfoDescription (Maybe Text)
eidEc2InstanceId = lens _eidEc2InstanceId (\s a -> s { _eidEc2InstanceId = a })
-- | The type of information retrieved.
eidInfoType :: Lens' EnvironmentInfoDescription (Maybe EnvironmentInfoType)
eidInfoType = lens _eidInfoType (\s a -> s { _eidInfoType = a })
-- | The retrieved information.
eidMessage :: Lens' EnvironmentInfoDescription (Maybe Text)
eidMessage = lens _eidMessage (\s a -> s { _eidMessage = a })
-- | The time stamp when this information was retrieved.
eidSampleTimestamp :: Lens' EnvironmentInfoDescription (Maybe UTCTime)
eidSampleTimestamp =
lens _eidSampleTimestamp (\s a -> s { _eidSampleTimestamp = a })
. mapping _Time
instance FromXML EnvironmentInfoDescription where
parseXML x = EnvironmentInfoDescription
<$> x .@? "Ec2InstanceId"
<*> x .@? "InfoType"
<*> x .@? "Message"
<*> x .@? "SampleTimestamp"
instance ToQuery EnvironmentInfoDescription where
toQuery EnvironmentInfoDescription{..} = mconcat
[ "Ec2InstanceId" =? _eidEc2InstanceId
, "InfoType" =? _eidInfoType
, "Message" =? _eidMessage
, "SampleTimestamp" =? _eidSampleTimestamp
]
data S3Location = S3Location
{ _slS3Bucket :: Maybe Text
, _slS3Key :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'S3Location' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'slS3Bucket' @::@ 'Maybe' 'Text'
--
-- * 'slS3Key' @::@ 'Maybe' 'Text'
--
s3Location :: S3Location
s3Location = S3Location
{ _slS3Bucket = Nothing
, _slS3Key = Nothing
}
-- | The Amazon S3 bucket where the data is located.
slS3Bucket :: Lens' S3Location (Maybe Text)
slS3Bucket = lens _slS3Bucket (\s a -> s { _slS3Bucket = a })
-- | The Amazon S3 key where the data is located.
slS3Key :: Lens' S3Location (Maybe Text)
slS3Key = lens _slS3Key (\s a -> s { _slS3Key = a })
instance FromXML S3Location where
parseXML x = S3Location
<$> x .@? "S3Bucket"
<*> x .@? "S3Key"
instance ToQuery S3Location where
toQuery S3Location{..} = mconcat
[ "S3Bucket" =? _slS3Bucket
, "S3Key" =? _slS3Key
]
data ValidationMessage = ValidationMessage
{ _vmMessage :: Maybe Text
, _vmNamespace :: Maybe Text
, _vmOptionName :: Maybe Text
, _vmSeverity :: Maybe ValidationSeverity
} deriving (Eq, Read, Show)
-- | 'ValidationMessage' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'vmMessage' @::@ 'Maybe' 'Text'
--
-- * 'vmNamespace' @::@ 'Maybe' 'Text'
--
-- * 'vmOptionName' @::@ 'Maybe' 'Text'
--
-- * 'vmSeverity' @::@ 'Maybe' 'ValidationSeverity'
--
validationMessage :: ValidationMessage
validationMessage = ValidationMessage
{ _vmMessage = Nothing
, _vmSeverity = Nothing
, _vmNamespace = Nothing
, _vmOptionName = Nothing
}
-- | A message describing the error or warning.
vmMessage :: Lens' ValidationMessage (Maybe Text)
vmMessage = lens _vmMessage (\s a -> s { _vmMessage = a })
vmNamespace :: Lens' ValidationMessage (Maybe Text)
vmNamespace = lens _vmNamespace (\s a -> s { _vmNamespace = a })
vmOptionName :: Lens' ValidationMessage (Maybe Text)
vmOptionName = lens _vmOptionName (\s a -> s { _vmOptionName = a })
-- | An indication of the severity of this message:
--
-- error: This message indicates that this is not a valid setting for an
-- option.
--
-- warning: This message is providing information you should take into
-- account.
--
-- error: This message indicates that this is not a valid setting for an
-- option. warning: This message is providing information you should take into
-- account.
vmSeverity :: Lens' ValidationMessage (Maybe ValidationSeverity)
vmSeverity = lens _vmSeverity (\s a -> s { _vmSeverity = a })
instance FromXML ValidationMessage where
parseXML x = ValidationMessage
<$> x .@? "Message"
<*> x .@? "Namespace"
<*> x .@? "OptionName"
<*> x .@? "Severity"
instance ToQuery ValidationMessage where
toQuery ValidationMessage{..} = mconcat
[ "Message" =? _vmMessage
, "Namespace" =? _vmNamespace
, "OptionName" =? _vmOptionName
, "Severity" =? _vmSeverity
]
data ValidationSeverity
= VSError -- ^ error
| VSWarning -- ^ warning
deriving (Eq, Ord, Read, Show, Generic, Enum)
instance Hashable ValidationSeverity
instance FromText ValidationSeverity where
parser = takeLowerText >>= \case
"error" -> pure VSError
"warning" -> pure VSWarning
e -> fail $
"Failure parsing ValidationSeverity from " ++ show e
instance ToText ValidationSeverity where
toText = \case
VSError -> "error"
VSWarning -> "warning"
instance ToByteString ValidationSeverity
instance ToHeader ValidationSeverity
instance ToQuery ValidationSeverity
instance FromXML ValidationSeverity where
parseXML = parseXMLText "ValidationSeverity"
newtype Trigger = Trigger
{ _tName :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'Trigger' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'tName' @::@ 'Maybe' 'Text'
--
trigger :: Trigger
trigger = Trigger
{ _tName = Nothing
}
-- | The name of the trigger.
tName :: Lens' Trigger (Maybe Text)
tName = lens _tName (\s a -> s { _tName = a })
instance FromXML Trigger where
parseXML x = Trigger
<$> x .@? "Name"
instance ToQuery Trigger where
toQuery Trigger{..} = mconcat
[ "Name" =? _tName
]
data EnvironmentInfoType
= Bundle -- ^ bundle
| Tail' -- ^ tail
deriving (Eq, Ord, Read, Show, Generic, Enum)
instance Hashable EnvironmentInfoType
instance FromText EnvironmentInfoType where
parser = takeLowerText >>= \case
"bundle" -> pure Bundle
"tail" -> pure Tail'
e -> fail $
"Failure parsing EnvironmentInfoType from " ++ show e
instance ToText EnvironmentInfoType where
toText = \case
Bundle -> "bundle"
Tail' -> "tail"
instance ToByteString EnvironmentInfoType
instance ToHeader EnvironmentInfoType
instance ToQuery EnvironmentInfoType
instance FromXML EnvironmentInfoType where
parseXML = parseXMLText "EnvironmentInfoType"
data EnvironmentDescription = EnvironmentDescription
{ _ed1AbortableOperationInProgress :: Maybe Bool
, _ed1ApplicationName :: Maybe Text
, _ed1CNAME :: Maybe Text
, _ed1DateCreated :: Maybe ISO8601
, _ed1DateUpdated :: Maybe ISO8601
, _ed1Description :: Maybe Text
, _ed1EndpointURL :: Maybe Text
, _ed1EnvironmentId :: Maybe Text
, _ed1EnvironmentName :: Maybe Text
, _ed1Health :: Maybe EnvironmentHealth
, _ed1Resources :: Maybe EnvironmentResourcesDescription
, _ed1SolutionStackName :: Maybe Text
, _ed1Status :: Maybe EnvironmentStatus
, _ed1TemplateName :: Maybe Text
, _ed1Tier :: Maybe EnvironmentTier
, _ed1VersionLabel :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'EnvironmentDescription' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ed1AbortableOperationInProgress' @::@ 'Maybe' 'Bool'
--
-- * 'ed1ApplicationName' @::@ 'Maybe' 'Text'
--
-- * 'ed1CNAME' @::@ 'Maybe' 'Text'
--
-- * 'ed1DateCreated' @::@ 'Maybe' 'UTCTime'
--
-- * 'ed1DateUpdated' @::@ 'Maybe' 'UTCTime'
--
-- * 'ed1Description' @::@ 'Maybe' 'Text'
--
-- * 'ed1EndpointURL' @::@ 'Maybe' 'Text'
--
-- * 'ed1EnvironmentId' @::@ 'Maybe' 'Text'
--
-- * 'ed1EnvironmentName' @::@ 'Maybe' 'Text'
--
-- * 'ed1Health' @::@ 'Maybe' 'EnvironmentHealth'
--
-- * 'ed1Resources' @::@ 'Maybe' 'EnvironmentResourcesDescription'
--
-- * 'ed1SolutionStackName' @::@ 'Maybe' 'Text'
--
-- * 'ed1Status' @::@ 'Maybe' 'EnvironmentStatus'
--
-- * 'ed1TemplateName' @::@ 'Maybe' 'Text'
--
-- * 'ed1Tier' @::@ 'Maybe' 'EnvironmentTier'
--
-- * 'ed1VersionLabel' @::@ 'Maybe' 'Text'
--
environmentDescription :: EnvironmentDescription
environmentDescription = EnvironmentDescription
{ _ed1EnvironmentName = Nothing
, _ed1EnvironmentId = Nothing
, _ed1ApplicationName = Nothing
, _ed1VersionLabel = Nothing
, _ed1SolutionStackName = Nothing
, _ed1TemplateName = Nothing
, _ed1Description = Nothing
, _ed1EndpointURL = Nothing
, _ed1CNAME = Nothing
, _ed1DateCreated = Nothing
, _ed1DateUpdated = Nothing
, _ed1Status = Nothing
, _ed1AbortableOperationInProgress = Nothing
, _ed1Health = Nothing
, _ed1Resources = Nothing
, _ed1Tier = Nothing
}
-- | Indicates if there is an in-progress environment configuration update or
-- application version deployment that you can cancel.
--
-- 'true:' There is an update in progress.
--
-- 'false:' There are no updates currently in progress.
ed1AbortableOperationInProgress :: Lens' EnvironmentDescription (Maybe Bool)
ed1AbortableOperationInProgress =
lens _ed1AbortableOperationInProgress
(\s a -> s { _ed1AbortableOperationInProgress = a })
-- | The name of the application associated with this environment.
ed1ApplicationName :: Lens' EnvironmentDescription (Maybe Text)
ed1ApplicationName =
lens _ed1ApplicationName (\s a -> s { _ed1ApplicationName = a })
-- | The URL to the CNAME for this environment.
ed1CNAME :: Lens' EnvironmentDescription (Maybe Text)
ed1CNAME = lens _ed1CNAME (\s a -> s { _ed1CNAME = a })
-- | The creation date for this environment.
ed1DateCreated :: Lens' EnvironmentDescription (Maybe UTCTime)
ed1DateCreated = lens _ed1DateCreated (\s a -> s { _ed1DateCreated = a }) . mapping _Time
-- | The last modified date for this environment.
ed1DateUpdated :: Lens' EnvironmentDescription (Maybe UTCTime)
ed1DateUpdated = lens _ed1DateUpdated (\s a -> s { _ed1DateUpdated = a }) . mapping _Time
-- | Describes this environment.
ed1Description :: Lens' EnvironmentDescription (Maybe Text)
ed1Description = lens _ed1Description (\s a -> s { _ed1Description = a })
-- | For load-balanced, autoscaling environments, the URL to the LoadBalancer. For
-- single-instance environments, the IP address of the instance.
ed1EndpointURL :: Lens' EnvironmentDescription (Maybe Text)
ed1EndpointURL = lens _ed1EndpointURL (\s a -> s { _ed1EndpointURL = a })
-- | The ID of this environment.
ed1EnvironmentId :: Lens' EnvironmentDescription (Maybe Text)
ed1EnvironmentId = lens _ed1EnvironmentId (\s a -> s { _ed1EnvironmentId = a })
-- | The name of this environment.
ed1EnvironmentName :: Lens' EnvironmentDescription (Maybe Text)
ed1EnvironmentName =
lens _ed1EnvironmentName (\s a -> s { _ed1EnvironmentName = a })
-- | Describes the health status of the environment. AWS Elastic Beanstalk
-- indicates the failure levels for a running environment:
--
-- 'Red' : Indicates the environment is not working.
--
-- 'Yellow': Indicates that something is wrong, the application might not be
-- available, but the instances appear running.
--
-- 'Green': Indicates the environment is healthy and fully functional.
--
-- 'Red': Indicates the environment is not responsive. Occurs when three or
-- more consecutive failures occur for an environment. 'Yellow': Indicates that
-- something is wrong. Occurs when two consecutive failures occur for an
-- environment. 'Green': Indicates the environment is healthy and fully
-- functional. 'Grey': Default health for a new environment. The environment is
-- not fully launched and health checks have not started or health checks are
-- suspended during an 'UpdateEnvironment' or 'RestartEnvironement' request.
-- Default: 'Grey'
ed1Health :: Lens' EnvironmentDescription (Maybe EnvironmentHealth)
ed1Health = lens _ed1Health (\s a -> s { _ed1Health = a })
-- | The description of the AWS resources used by this environment.
ed1Resources :: Lens' EnvironmentDescription (Maybe EnvironmentResourcesDescription)
ed1Resources = lens _ed1Resources (\s a -> s { _ed1Resources = a })
-- | The name of the 'SolutionStack' deployed with this environment.
ed1SolutionStackName :: Lens' EnvironmentDescription (Maybe Text)
ed1SolutionStackName =
lens _ed1SolutionStackName (\s a -> s { _ed1SolutionStackName = a })
-- | The current operational status of the environment:
--
-- 'Launching': Environment is in the process of initial deployment. 'Updating': Environment is in the process of updating its configuration settings or application version.
-- 'Ready': Environment is available to have an action performed on it, such as
-- update or terminate. 'Terminating': Environment is in the shut-down process.
-- 'Terminated': Environment is not running.
ed1Status :: Lens' EnvironmentDescription (Maybe EnvironmentStatus)
ed1Status = lens _ed1Status (\s a -> s { _ed1Status = a })
-- | The name of the configuration template used to originally launch this
-- environment.
ed1TemplateName :: Lens' EnvironmentDescription (Maybe Text)
ed1TemplateName = lens _ed1TemplateName (\s a -> s { _ed1TemplateName = a })
-- | Describes the current tier of this environment.
ed1Tier :: Lens' EnvironmentDescription (Maybe EnvironmentTier)
ed1Tier = lens _ed1Tier (\s a -> s { _ed1Tier = a })
-- | The application version deployed in this environment.
ed1VersionLabel :: Lens' EnvironmentDescription (Maybe Text)
ed1VersionLabel = lens _ed1VersionLabel (\s a -> s { _ed1VersionLabel = a })
instance FromXML EnvironmentDescription where
parseXML x = EnvironmentDescription
<$> x .@? "AbortableOperationInProgress"
<*> x .@? "ApplicationName"
<*> x .@? "CNAME"
<*> x .@? "DateCreated"
<*> x .@? "DateUpdated"
<*> x .@? "Description"
<*> x .@? "EndpointURL"
<*> x .@? "EnvironmentId"
<*> x .@? "EnvironmentName"
<*> x .@? "Health"
<*> x .@? "Resources"
<*> x .@? "SolutionStackName"
<*> x .@? "Status"
<*> x .@? "TemplateName"
<*> x .@? "Tier"
<*> x .@? "VersionLabel"
instance ToQuery EnvironmentDescription where
toQuery EnvironmentDescription{..} = mconcat
[ "AbortableOperationInProgress" =? _ed1AbortableOperationInProgress
, "ApplicationName" =? _ed1ApplicationName
, "CNAME" =? _ed1CNAME
, "DateCreated" =? _ed1DateCreated
, "DateUpdated" =? _ed1DateUpdated
, "Description" =? _ed1Description
, "EndpointURL" =? _ed1EndpointURL
, "EnvironmentId" =? _ed1EnvironmentId
, "EnvironmentName" =? _ed1EnvironmentName
, "Health" =? _ed1Health
, "Resources" =? _ed1Resources
, "SolutionStackName" =? _ed1SolutionStackName
, "Status" =? _ed1Status
, "TemplateName" =? _ed1TemplateName
, "Tier" =? _ed1Tier
, "VersionLabel" =? _ed1VersionLabel
]
data Listener = Listener
{ _lPort :: Maybe Int
, _lProtocol :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'Listener' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lPort' @::@ 'Maybe' 'Int'
--
-- * 'lProtocol' @::@ 'Maybe' 'Text'
--
listener :: Listener
listener = Listener
{ _lProtocol = Nothing
, _lPort = Nothing
}
-- | The port that is used by the Listener.
lPort :: Lens' Listener (Maybe Int)
lPort = lens _lPort (\s a -> s { _lPort = a })
-- | The protocol that is used by the Listener.
lProtocol :: Lens' Listener (Maybe Text)
lProtocol = lens _lProtocol (\s a -> s { _lProtocol = a })
instance FromXML Listener where
parseXML x = Listener
<$> x .@? "Port"
<*> x .@? "Protocol"
instance ToQuery Listener where
toQuery Listener{..} = mconcat
[ "Port" =? _lPort
, "Protocol" =? _lProtocol
]
data EnvironmentHealth
= Green -- ^ Green
| Grey -- ^ Grey
| Red -- ^ Red
| Yellow -- ^ Yellow
deriving (Eq, Ord, Read, Show, Generic, Enum)
instance Hashable EnvironmentHealth
instance FromText EnvironmentHealth where
parser = takeLowerText >>= \case
"green" -> pure Green
"grey" -> pure Grey
"red" -> pure Red
"yellow" -> pure Yellow
e -> fail $
"Failure parsing EnvironmentHealth from " ++ show e
instance ToText EnvironmentHealth where
toText = \case
Green -> "Green"
Grey -> "Grey"
Red -> "Red"
Yellow -> "Yellow"
instance ToByteString EnvironmentHealth
instance ToHeader EnvironmentHealth
instance ToQuery EnvironmentHealth
instance FromXML EnvironmentHealth where
parseXML = parseXMLText "EnvironmentHealth"
newtype Instance = Instance
{ _iId :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'Instance' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'iId' @::@ 'Maybe' 'Text'
--
instance' :: Instance
instance' = Instance
{ _iId = Nothing
}
-- | The ID of the Amazon EC2 instance.
iId :: Lens' Instance (Maybe Text)
iId = lens _iId (\s a -> s { _iId = a })
instance FromXML Instance where
parseXML x = Instance
<$> x .@? "Id"
instance ToQuery Instance where
toQuery Instance{..} = mconcat
[ "Id" =? _iId
]
data SolutionStackDescription = SolutionStackDescription
{ _ssdPermittedFileTypes :: List "member" Text
, _ssdSolutionStackName :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'SolutionStackDescription' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ssdPermittedFileTypes' @::@ ['Text']
--
-- * 'ssdSolutionStackName' @::@ 'Maybe' 'Text'
--
solutionStackDescription :: SolutionStackDescription
solutionStackDescription = SolutionStackDescription
{ _ssdSolutionStackName = Nothing
, _ssdPermittedFileTypes = mempty
}
-- | The permitted file types allowed for a solution stack.
ssdPermittedFileTypes :: Lens' SolutionStackDescription [Text]
ssdPermittedFileTypes =
lens _ssdPermittedFileTypes (\s a -> s { _ssdPermittedFileTypes = a })
. _List
-- | The name of the solution stack.
ssdSolutionStackName :: Lens' SolutionStackDescription (Maybe Text)
ssdSolutionStackName =
lens _ssdSolutionStackName (\s a -> s { _ssdSolutionStackName = a })
instance FromXML SolutionStackDescription where
parseXML x = SolutionStackDescription
<$> x .@? "PermittedFileTypes" .!@ mempty
<*> x .@? "SolutionStackName"
instance ToQuery SolutionStackDescription where
toQuery SolutionStackDescription{..} = mconcat
[ "PermittedFileTypes" =? _ssdPermittedFileTypes
, "SolutionStackName" =? _ssdSolutionStackName
]
| romanb/amazonka | amazonka-elasticbeanstalk/gen/Network/AWS/ElasticBeanstalk/Types.hs | mpl-2.0 | 74,709 | 0 | 37 | 17,096 | 13,021 | 7,405 | 5,616 | -1 | -1 |
module Main where
import Criterion.Main
import Data.Either
import Data.String.Utils (replace)
import Parsing.Parse (parse)
import Parsing.ParseOptions (defaultParseOptions)
import Rendering.Render (toHtml)
import Rendering.RenderOptions (RenderOptions, defaultRenderOptions, inlineCSS, inlineJS)
parseAndRender :: RenderOptions -> String -> String
parseAndRender options = either show (toHtml options) . parse defaultParseOptions
main :: IO ()
main = do
readme <- readFile "Readme.md"
let readmeWithErrors = replace "](" "]" readme
defaultMain [bench "Readme.md" $ nf (parseAndRender defaultRenderOptions) readme,
bench "Readme.md with inlining" $ nf (parseAndRender defaultRenderOptions {inlineJS=True, inlineCSS=True}) readme,
bench "Readme.md with errors" $ nf (parseAndRender defaultRenderOptions) readmeWithErrors]
| alexbecker/blogdown | test/Benchmark.hs | agpl-3.0 | 870 | 0 | 14 | 138 | 237 | 125 | 112 | 17 | 1 |
{-
Author : shelarcy 2004
Advised by: Sean Seefried
Adapted from: BezCurve.hs
By: (c) Sven Panne 2003 <[email protected]>
"BezCurve.hs (adapted from fog.c which is (c) Silicon Graphics, Inc)
This file is part of HOpenGL and distributed under a BSD-style license
See the file libraries/GLUT/LICENSE"
This program renders a lighted, filled Bezier surface, using two-dimensional
evaluators.
-}
-- ghci -package wx -package OpenGL
module Main
where
import Data.List ( transpose )
import Graphics.UI.WX
import Graphics.UI.WXCore
import Graphics.Rendering.OpenGL
-- Many code and Type are ambiguous, so we must qualify names.
import qualified Graphics.UI.WX as WX
import qualified Graphics.Rendering.OpenGL as GL
main :: IO()
main = start gui
defaultWidth = 320
defaultHeight = 200
gui = do
f <- frame [ text := "Simple OpenGL" ]
glCanvas <- glCanvasCreateEx f 0 (Rect 0 0 defaultWidth defaultHeight) 0 "GLCanvas" [GL_RGBA] nullPalette
glContext <- glContextCreateFromNull glCanvas
glCanvasSetCurrent glCanvas glContext
let glWidgetLayout = fill $ widget glCanvas
WX.set f [ layout := glWidgetLayout
-- you have to use the paintRaw event. Otherwise the OpenGL window won't
-- show anything!
, on paintRaw := paintGL glCanvas
]
repaint f
convWG (WX.Size w h) = (GL.Size (convInt32 w) (convInt32 h))
convInt32 = fromInteger . toInteger
-- This paint function gets the current glCanvas for knowing where to draw in.
-- It is possible to have multiple GL windows in your application.
paintGL :: GLCanvas a -> DC() -> WX.Rect -> [WX.Rect]-> IO ()
paintGL glWindow dc myrect _ = do
myInit
reshape $ convWG $ rectSize myrect
display
glCanvasSwapBuffers glWindow
return ()
ctrlPoints :: [[GL.Vertex3 GL.GLfloat]]
ctrlPoints = [
[ GL.Vertex3 (-1.5) (-1.5) 4.0, GL.Vertex3 (-0.5) (-1.5) 2.0,
GL.Vertex3 0.5 (-1.5) (-1.0), GL.Vertex3 1.5 (-1.5) 2.0 ],
[ GL.Vertex3 (-1.5) (-0.5) 1.0, GL.Vertex3 (-0.5) (-0.5) 3.0,
GL.Vertex3 0.5 (-0.5) 0.0, GL.Vertex3 1.5 (-0.5) (-1.0) ],
[ GL.Vertex3 (-1.5) 0.5 4.0, GL.Vertex3 (-0.5) 0.5 0.0,
GL.Vertex3 0.5 0.5 3.0, GL.Vertex3 1.5 0.5 4.0 ],
[ GL.Vertex3 (-1.5) 1.5 (-2.0), GL.Vertex3 (-0.5) 1.5 (-2.0),
GL.Vertex3 0.5 1.5 0.0, GL.Vertex3 1.5 1.5 (-1.0) ]]
initlights :: IO ()
initlights = do
GL.lighting GL.$= GL.Enabled
GL.light (GL.Light 0) GL.$= GL.Enabled
GL.ambient (GL.Light 0) GL.$= GL.Color4 0.2 0.2 0.2 1.0
GL.position (GL.Light 0) GL.$= GL.Vertex4 0 0 2 1
GL.materialDiffuse GL.Front GL.$= GL.Color4 0.6 0.6 0.6 1.0
GL.materialSpecular GL.Front GL.$= GL.Color4 1.0 1.0 1.0 1.0
GL.materialShininess GL.Front GL.$= 50
myInit :: IO ()
myInit = do
GL.clearColor GL.$= GL.Color4 0.1 0.1 0.6 0
GL.depthFunc GL.$= Just GL.Less
m <- GL.newMap2 (0, 1) (0, 1) (transpose ctrlPoints)
GL.map2 GL.$= Just (m :: GLmap2 GL.Vertex3 GL.GLfloat)
GL.autoNormal GL.$= GL.Enabled
mapGrid2 GL.$= ((20, (0, 1)), (20, (0, 1 :: GL.GLfloat)))
initlights -- for lighted version only
display = do
GL.clear [ GL.ColorBuffer, GL.DepthBuffer ]
GL.preservingMatrix $ do
GL.rotate (85 :: GL.GLfloat) (GL.Vector3 1 1 1)
evalMesh2 Fill (0, 20) (0, 20)
GL.flush
reshape mysize@(GL.Size w h) = do
GL.viewport GL.$= (GL.Position 0 0, mysize)
GL.matrixMode GL.$= GL.Projection
GL.loadIdentity
let wf = fromIntegral w
hf = fromIntegral h
if w <= h
then GL.ortho (-4.0) 4.0 (-4.0*hf/wf) (4.0*hf/wf) (-4.0) 4.0
else GL.ortho (-4.0*wf/hf) (4.0*wf/hf) (-4.0) 4.0 (-4.0) 4.0
GL.matrixMode GL.$= GL.Modelview 0
GL.loadIdentity
| ekmett/wxHaskell | samples/contrib/GLCanvas.hs | lgpl-2.1 | 3,862 | 0 | 13 | 963 | 1,355 | 693 | 662 | 74 | 2 |
-- Hoff -- A gatekeeper for your commits
-- Copyright 2016 Ruud van Asseldonk
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- A copy of the License has been included in the root of the repository.
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module WebInterface (renderPage, viewIndex, viewProject) where
import Control.Monad (forM_, unless, void)
import Data.FileEmbed (embedStringFile)
import Data.Text (Text)
import Data.Text.Format.Params (Params)
import Data.Text.Lazy (toStrict)
import Prelude hiding (id, div, head, span)
import Text.Blaze ((!), toValue)
import Text.Blaze.Html.Renderer.Utf8
import Text.Blaze.Html5 (Html, a, body, div, docTypeHtml, h1, h2, head, meta, p, span, style, title, toHtml)
import Text.Blaze.Html5.Attributes (class_, charset, content, href, id, name)
import qualified Data.ByteString.Lazy as LazyByteString
import qualified Data.Text as Text
import qualified Data.Text.Format as Text
import Project (ProjectInfo, ProjectState, PullRequest, PullRequestId (..))
import qualified Project
-- Conversion function because of Haskell string type madness. This is just
-- Text.format, but returning a strict Text instead of a lazy one.
format :: Params ps => Text.Format -> ps -> Text
format formatString params = toStrict $ Text.format formatString params
-- TODO: Minify this css at inclusion time.
stylesheet :: Text
stylesheet = $(embedStringFile "static/style.css")
-- Wraps the given body html in html for an actual page, and encodes the
-- resulting page in utf-8.
renderPage :: Text -> Html -> LazyByteString.ByteString
renderPage pageTitle bodyHtml = renderHtml $ docTypeHtml $ do
head $ do
meta ! charset "utf-8"
meta ! name "viewport" ! content "width=device-width, initial-scale=1"
meta ! name "robots" ! content "noindex, nofollow"
title $ toHtml pageTitle
style $ toHtml stylesheet
body $
div ! id "content" $
bodyHtml
-- Render an "owner/repo" link.
viewProjectInfo :: ProjectInfo -> Html
viewProjectInfo info =
let
owner = Project.owner info
repo = Project.repository info
repoUrl = format "/{}/{}" [owner, repo]
in
p $ do
a ! href (toValue repoUrl) $ do
toHtml owner
void "\x2009/\x2009" -- U+2009 is a thin space.
toHtml repo
-- Renders the body html for the index page.
viewIndex :: [ProjectInfo] -> Html
viewIndex infos =
let
in do
h1 "Hoff"
h2 "About"
p $ do
void "Hoff is a gatekeeper for your commits. See "
a ! href "https://github.com/ruuda/hoff" $ "github.com/ruuda/hoff"
void " for more information."
h2 "Tracked repositories"
mapM_ viewProjectInfo infos
-- Renders the body html for the status page of a project.
viewProject :: ProjectInfo -> ProjectState -> Html
viewProject info state =
let
owner = Project.owner info
repo = Project.repository info
ownerUrl = format "https://github.com/{}" [owner]
repoUrl = format "https://github.com/{}/{}" (owner, repo)
in do
h1 $ do
a ! href (toValue ownerUrl) $ toHtml owner
void "\x2009/\x2009" -- U+2009 is a thin space.
a ! href (toValue repoUrl) $ toHtml repo
viewProjectQueues info state
-- Render the html for the queues in a project, excluding the header and footer.
viewProjectQueues :: ProjectInfo -> ProjectState -> Html
viewProjectQueues info state = do
let
pullRequests = Project.classifyPullRequests state
filterPrs predicate = fmap fst $ filter (predicate . snd) pullRequests
let building = filterPrs (== Project.PrStatusBuildPending)
h2 "Building"
if null building
then p "There are no builds in progress at the moment."
else viewList viewPullRequestWithApproval info state building
let approved = filterPrs (== Project.PrStatusApproved)
unless (null approved) $ do
h2 "Approved"
viewList viewPullRequestWithApproval info state approved
let awaitingApproval = filterPrs (== Project.PrStatusAwaitingApproval)
unless (null awaitingApproval) $ do
h2 "Awaiting approval"
viewList viewPullRequest info state awaitingApproval
let failed = filterPrs $ \ st ->
(st == Project.PrStatusFailedConflict) || (st == Project.PrStatusFailedBuild)
unless (null failed) $ do
h2 "Failed"
-- TODO: Also render failure reason: conflicted or build failed.
viewList viewPullRequestWithApproval info state failed
-- TODO: Keep a list of the last n integrated pull requests, so they stay
-- around for a bit after they have been closed.
let integrated = filterPrs (== Project.PrStatusIntegrated)
unless (null integrated) $ do
h2 "Recently integrated"
viewList viewPullRequestWithApproval info state integrated
-- Renders the contents of a list item with a link for a pull request.
viewPullRequest :: ProjectInfo -> PullRequestId -> PullRequest -> Html
viewPullRequest info (PullRequestId n) pullRequest =
let
url = format "https://github.com/{}/{}/pull/{}"
(Project.owner info, Project.repository info, n)
in
a ! href (toValue url) $ toHtml $ Project.title pullRequest
viewPullRequestWithApproval :: ProjectInfo -> PullRequestId -> PullRequest -> Html
viewPullRequestWithApproval info prId pullRequest = do
viewPullRequest info prId pullRequest
case Project.approvedBy pullRequest of
Just username ->
span ! class_ "review" $ do
void "Approved by "
-- TODO: Link to approval comment, not just username.
let url = Text.append "https://github.com/" username
a ! href (toValue url) $ toHtml username
Nothing ->
fail $
"Tried to render approval link for pull request " ++ (show prId) ++
" which was not approved. This is a programming error."
-- Render all pull requests in the list with the given view function.
-- TODO: Use a safer abstraction, than a list of IDs for which it is not clear
-- from the types that lookup will not fail.
viewList :: (ProjectInfo -> PullRequestId -> PullRequest -> Html)
-> ProjectInfo
-> ProjectState
-> [PullRequestId]
-> Html
viewList view info state prIds = forM_ prIds $ \ prId ->
let
Just pr = Project.lookupPullRequest prId state
in
p $ view info prId pr
| ruuda/hoff | src/WebInterface.hs | apache-2.0 | 6,316 | 0 | 17 | 1,291 | 1,460 | 741 | 719 | 124 | 2 |
-- starman.hs
-- Jeremy Singer
-- based on a Functional Programming
-- exercise from Glasgow,
-- (inherited from John O'Donnell)
check :: String -> String -> Char -> (Bool, String)
-- check whether a single char is in the mystery word
check word display c
= (c `elem` word, [if x==c
then c
else y | (x,y) <- zip word display])
turn :: String -> String -> Int -> IO ()
-- single turn for user
turn word display n =
do if n==0
then putStrLn "You lose"
else if word==display
then putStrLn "You win!"
else mkguess word display n
mkguess :: String -> String -> Int -> IO ()
-- user inputs a single char (first on the line)
mkguess word display n =
do putStrLn (display ++ " " ++ take n (repeat '*'))
putStr " Enter your guess: "
q <- getLine
let (correct, display') = check word display (q!!0)
let n' = if correct then n else n-1
turn word display' n'
-- notice how turn and mkguess have the same signatures,
-- and are mutually recursive. Is this elegant?
starman :: String -> Int -> IO ()
-- top-level function. Usage: starman "WORD" NUM_TURNS
starman word n = turn word ['-' | x <- word] n
| P7h/FutureLearn__FP_in_Haskell | Week2_Code__starman.hs | apache-2.0 | 1,191 | 0 | 12 | 314 | 365 | 186 | 179 | 22 | 3 |
-- | Simple rate limiting combinator.
module NationStates.RateLimit (
RateLimit(),
newRateLimit,
rateLimit,
setDelay,
) where
import Control.Concurrent
import Control.Exception
import System.Clock
data RateLimit = RateLimit {
_rateLock :: !(MVar TimeSpec),
_rateDelay :: !TimeSpec
}
-- | Create a new rate limiter with the specified delay.
--
-- The rate limiter is thread-safe, and can be shared between threads.
newRateLimit
:: Rational
-- ^ Delay, in seconds
-> IO RateLimit
newRateLimit delay' = do
lock <- newMVar $! negate delay
return $ RateLimit lock delay
where
delay = fromSeconds delay'
-- | Run the given action, pausing as necessary to keep under the rate limit.
rateLimit :: RateLimit -> IO a -> IO a
rateLimit (RateLimit lock delay) action =
mask $ \restore -> do
prev <- takeMVar lock
now <- getTime Monotonic
threadDelay' (prev + delay - now) `onException` putMVar lock prev
restore action `finally` (putMVar lock =<< getTime Monotonic)
threadDelay' :: TimeSpec -> IO ()
threadDelay' t = threadDelay . fromInteger $ timeSpecAsNanoSecs t `div` 1000
-- | Create a new rate limiter with the same lock but a different delay.
setDelay :: Rational -> RateLimit -> RateLimit
setDelay delay' (RateLimit lock _) = RateLimit lock (fromSeconds delay')
fromSeconds :: Rational -> TimeSpec
fromSeconds n = fromInteger . ceiling $ n * 1000 * 1000 * 1000
| lfairy/nationstates | NationStates/RateLimit.hs | apache-2.0 | 1,470 | 0 | 13 | 331 | 368 | 191 | 177 | 37 | 1 |
import GHC.Conc (setNumCapabilities)
import LogicGrowsOnTrees.Parallel.Adapter.Threads
(RunOutcome(..)
,TerminationReason(..)
,exploreTree
,setNumberOfWorkers
)
import LogicGrowsOnTrees.Utils.WordSum (WordSum(..))
import LogicGrowsOnTrees.Examples.Queens (nqueensUsingBitsSolutions)
main = do
setNumCapabilities 2
RunOutcome statistics termination_reason <-
exploreTree (setNumberOfWorkers 2)
.
fmap (const $ WordSum 1)
.
nqueensUsingBitsSolutions
$
10
case termination_reason of
Aborted progress -> putStrLn "Count aborted."
Completed (WordSum count) -> putStrLn $ "Found " ++ show count ++ " solutions."
Failure progress message -> putStrLn $ "Failed: " ++ message | gcross/LogicGrowsOnTrees | LogicGrowsOnTrees/tutorial/tutorial-6.hs | bsd-2-clause | 780 | 3 | 15 | 184 | 203 | 100 | 103 | 22 | 3 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Text.XML.Cursor.FromXML
( FromXML
, fromXML
, (!<@>)
, (?<@>)
, (!<|>)
, cont
, (?<|>)
, (?<.>)
, (!<.>)
, (!<=>)
, (?<=>)
, (<//.>)
, (!<//.>)
, (<//=>)
, (!<//=>)
, (!<//|>)
, XmlException, xmlErrorMessage
) where
import Control.Applicative (Applicative)
import Control.Exception (Exception)
import Control.Failure (Failure)
import Control.Monad ((<=<))
import Data.Maybe (listToMaybe)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Typeable (Typeable)
import Text.XML.Cursor ( Cursor, ($|), (&/), ($/), laxAttribute, laxElement
, content, force )
-- | FromXML inspired by Aeson's FromJSON typeclass.
class FromXML a where
fromXML :: (Functor m, Applicative m, Failure XmlException m) => Cursor -> m a
-- | Get the value of an attribute at the cursor, and throw an exception if the
-- attribute does not exist.
(!<@>) :: Failure XmlException m => Cursor -> Text -> m Text
el !<@> n = forceEx ("missing " ++ T.unpack n) $ el $| laxAttribute n
-- | Attempt to get the value of an attribute at the cursor, returning @Nothing@
-- if the attribute does not exist.
(?<@>) :: Cursor -> Text -> Maybe Text
el ?<@> n = listToMaybe $ el $| laxAttribute n
-- | Get the text content of an element, throwing an exception if the element
-- does not exist.
(!<|>) :: Failure XmlException m => Cursor -> Text -> m Text
el !<|> n = forceEx ("missing " ++ T.unpack n) $ el $/ (cont <=< laxElement n)
-- | Get text content from the current point of the cursor, throwing an
-- exception if there is no content.
cont :: Failure XmlException m => Cursor -> m Text
cont el = forceEx "no content" $ el $/ content
-- | Get content from an element, returning @Nothing@ if the element cannot be
-- found.
(?<|>) :: Cursor -> Text -> Maybe Text
el ?<|> n = listToMaybe $ el $/ (laxElement n &/ content)
-- | Attempt to move into an element, returning @Nothing@ if the element does
-- not exist.
(?<.>) :: Cursor -> Text -> Maybe Cursor
el ?<.> n = listToMaybe $ el $/ laxElement n
-- | Move the cursor into an element, throwing an exception if the element does
-- not exist.
(!<.>) :: (Failure XmlException m) => Cursor -> Text -> m Cursor
el !<.> n = forceEx ("missing " ++ T.unpack n) $ el $/ laxElement n
-- | Move the cursor into an element, and use the current cursor context to
-- deserialize XML into an "a". Throw an exception if the element does not
-- exist.
(!<=>) :: (Failure XmlException m, Applicative m, FromXML a)
=> Cursor -> Text -> m a
el !<=> n = fromXML =<< el !<.> n
-- | Attempt to move the cursor into an element, and deserialize it into an
-- @a@. If the element does not exist, @Nothing@ will be returned.
(?<=>) :: (FromXML a) => Cursor -> Text -> Maybe a
el ?<=> n = fromXML =<< el ?<.> n
-- REFACTOR: ! is inaccurate. It returns an array so there is no opportunity for
-- failure. Find leaf nodes given list of paths to walk down
(<//.>) :: Cursor -> [Text] -> [Cursor]
el <//.> path = el $/ (foldl1 (&/) $ map laxElement path)
(!<//.>) :: (Functor m, Applicative m, Failure XmlException m)
=> Cursor -> [Text] -> m Cursor
el !<//.> path =
forceEx ("missing " ++ pathStr) $ el $/ (foldl1 (&/) $ map laxElement path)
where pathStr = T.unpack . T.concat $ path
(<//=>) :: (FromXML a, Functor m, Applicative m, Failure XmlException m)
=> Cursor -> [Text] -> m [a]
el <//=> path = mapM fromXML $ el <//.> path
(!<//=>) :: (FromXML a, Functor m, Applicative m, Failure XmlException m)
=> Cursor -> [Text] -> m a
el !<//=> path = fromXML =<< el !<//.> path
(!<//|>) :: Cursor -> [Text] -> [Text]
el !<//|> path = concatMap ($/ content) $ el <//.> path
forceEx :: Failure XmlException m => String -> [a] -> m a
forceEx = force . XmlException
newtype XmlException = XmlException { xmlErrorMessage :: String }
deriving (Show, Typeable)
instance Exception XmlException
| ocharles/Web-MusicBrainz | Text/XML/Cursor/FromXML.hs | bsd-2-clause | 4,055 | 0 | 11 | 866 | 1,172 | 654 | 518 | 72 | 1 |
{- |
Module : Data.Conduit.EndOnQ
Copyright : (c) Nathan Ingle
Licence : BSD2
Maintainer : [email protected]
Stability : experimental
Portability : portable
Provides a conduit to watch standard input and terminate upon receipt of 'q'.
-}
module Data.Conduit.EndOnQ (endOnQ) where
import Data.Conduit
import Control.Monad.IO.Class (liftIO)
import System.IO
endOnQ :: Conduit a IO a
endOnQ = do
liftIO $ hSetBuffering stdin NoBuffering
liftIO $ hSetEcho stdin False
keyPressed <- liftIO $ hReady stdin
if keyPressed
then do
k <- liftIO getChar
case k of
'q' -> return ()
_ -> keepGoing
else keepGoing
where keepGoing = do
c <- await
case c of
Just c' -> yield c'
Nothing -> return ()
endOnQ
| nathaningle/hs-ifstat | src/Data/Conduit/EndOnQ.hs | bsd-2-clause | 751 | 5 | 14 | 169 | 187 | 93 | 94 | 22 | 4 |
-- http://www.codewars.com/kata/540c013634e6bac0350000a5
module Codewars.Kata.Unique where
import Control.Arrow
projectPartners :: Integer -> Integer
projectPartners = (`div` 2) . uncurry (*) . (id &&& pred) | Bodigrim/katas | src/haskell/7-Unique-Pairs.hs | bsd-2-clause | 210 | 0 | 7 | 25 | 53 | 33 | 20 | 4 | 1 |
{-# LANGUAGE NamedFieldPuns #-}
-- |A Node that detects UFOs (white pixels on a black sky background)
-- in a video feed from a sensor.
module DetectUFO (detectUFO, main) where
import Ros.Node
import qualified Data.Vector.Storable as V
import Data.Word (Word8)
import Ros.Sensor_msgs.Image (Image(..), width, height, encoding, _data)
findPt :: Image -> IO ()
findPt (Image {width, height, encoding, _data})
| encoding == "mono8" = maybe noPt showPt p
| otherwise = putStrLn "Unsupported image format"
where p = V.elemIndex 255 _data
toTheta = iatan2 . translate . (`divMod` fi width)
iatan2 (y,x) = atan2 (fromIntegral y) (fromIntegral x) * 180 / pi
showPt index = putStrLn $ "UFO at angle: " ++ show (toTheta index)
noPt = putStrLn "Couldn't find UFO"
fi = fromIntegral
translate (y,x) = (y - (fi height `div` 2), x - (fi width `div` 2))
detectUFO :: Node ()
detectUFO = subscribe "video" >>= runHandler findPt >> return ()
main = runNode "Detect" detectUFO
| rgleichman/roshask | Examples/NodeCompose/src/DetectUFO.hs | bsd-3-clause | 1,017 | 0 | 11 | 216 | 344 | 189 | 155 | 20 | 1 |
{-# LANGUAGE OverlappingInstances, TypeSynonymInstances, FlexibleInstances #-}
-- | Define BERT terms their binary encoding & decoding and a typeclass
-- for converting Haskell values to BERT terms and back.
--
-- We define a number of convenient instances for 'BERT'. Users will
-- probably want to define their own instances for composite types.
module Data.BERT.Term
( BERT(..)
, showTerm
, parseTerm
) where
import Control.Monad
import Control.Applicative
import Data.Bits
import Data.Char
import Data.Int
import Data.Binary
import Data.Binary.Put
import Data.Binary.Get
import Data.List
import Data.Time
import Data.ByteString.Lazy (ByteString)
import qualified Data.ByteString.Lazy as B
import qualified Data.ByteString.Lazy.Char8 as C
import Data.Map (Map)
import qualified Data.Map as Map
import Text.Printf
import Data.BERT.Types
import Data.BERT.Parser
-- The 0th-hour as per the BERT spec.
zeroHour = UTCTime (read "1970-01-01") 0
decomposeTime :: UTCTime -> (Int, Int, Int)
decomposeTime t = (mS, s, uS)
where
d = diffUTCTime t zeroHour
(mS, s) = (floor d) `divMod` 1000000
uS = floor $ 1000000 * (snd $ properFraction d)
composeTime :: (Int, Int, Int) -> UTCTime
composeTime (mS, s, uS) = addUTCTime seconds zeroHour
where
mS' = fromIntegral mS
s' = fromIntegral s
uS' = fromIntegral uS
seconds = ((mS' * 1000000) + s' + (uS' / 1000000))
-- Another design would be to split the Term type into
-- SimpleTerm|CompositeTerm, and then do everything in one go, but
-- that complicates syntax and semantics for end users. Let's do this
-- one ugly thing instead, eh?
ct b rest = TupleTerm $ [AtomTerm "bert", AtomTerm b] ++ rest
compose NilTerm = ListTerm []
compose (BoolTerm True) = ct "true" []
compose (BoolTerm False) = ct "false" []
compose (DictionaryTerm kvs) =
ct "dict" [ListTerm $ map (\(k, v) -> TupleTerm [k, v]) kvs]
compose (TimeTerm t) =
ct "time" [IntTerm mS, IntTerm s, IntTerm uS]
where
(mS, s, uS) = decomposeTime t
compose (RegexTerm s os) =
ct "regex" [BytelistTerm (C.pack s),
TupleTerm [ListTerm $ map AtomTerm os]]
compose _ = error "invalid composite term"
showTerm (IntTerm x) = show x
showTerm (FloatTerm x) = printf "%15.15e" x
showTerm (AtomTerm "") = ""
showTerm (AtomTerm a@(x:xs))
| isAsciiLower x = a
| otherwise = "'" ++ a ++ "'"
showTerm (TupleTerm ts) =
"{" ++ intercalate ", " (map showTerm ts) ++ "}"
showTerm (BytelistTerm bs) = show $ C.unpack bs
showTerm (ListTerm ts) =
"[" ++ intercalate ", " (map showTerm ts) ++ "]"
showTerm (BinaryTerm b)
| all (isAscii . chr . fromIntegral) (B.unpack b) =
wrap $ "\"" ++ C.unpack b ++ "\""
| otherwise =
wrap $ intercalate ", " $ map show $ B.unpack b
where
wrap x = "<<" ++ x ++ ">>"
showTerm (BigintTerm x) = show x
showTerm (BigbigintTerm x) = show x
-- All other terms are composite:
showTerm t = showTerm . compose $ t
class BERT a where
-- | Introduce a 'Term' from a Haskell value.
showBERT :: a -> Term
-- | Attempt to read a haskell value from a 'Term'.
readBERT :: Term -> (Either String a)
-- Herein are some instances for common Haskell data types. To do
-- anything more complicated, you should make your own instance.
instance BERT Term where
showBERT = id
readBERT = return . id
instance BERT Int where
showBERT = IntTerm
readBERT (IntTerm value) = return value
readBERT _ = fail "Invalid integer type"
instance BERT Bool where
showBERT = BoolTerm
readBERT (BoolTerm x) = return x
readBERT _ = fail "Invalid bool type"
instance BERT Integer where
showBERT = BigbigintTerm
readBERT (BigintTerm x) = return x
readBERT (BigbigintTerm x) = return x
readBERT _ = fail "Invalid integer type"
instance BERT Float where
showBERT = FloatTerm
readBERT (FloatTerm value) = return value
readBERT _ = fail "Invalid floating point type"
instance BERT String where
showBERT = BytelistTerm . C.pack
readBERT (BytelistTerm x) = return $ C.unpack x
readBERT (BinaryTerm x) = return $ C.unpack x
readBERT (AtomTerm x) = return x
readBERT (ListTerm xs) = mapM readBERT xs >>= return . map chr
readBERT _ = fail "Invalid string type"
instance BERT ByteString where
showBERT = BytelistTerm
readBERT (BytelistTerm value) = return value
readBERT _ = fail "Invalid bytestring type"
instance (BERT a) => BERT [a] where
showBERT xs = ListTerm $ map showBERT xs
readBERT (ListTerm xs) = mapM readBERT xs
readBERT _ = fail "Invalid list type"
instance (BERT a, BERT b) => BERT (a, b) where
showBERT (a, b) = TupleTerm [showBERT a, showBERT b]
readBERT (TupleTerm [a, b]) = liftM2 (,) (readBERT a) (readBERT b)
readBERT _ = fail "Invalid tuple(2) type"
instance (BERT a, BERT b, BERT c) => BERT (a, b, c) where
showBERT (a, b, c) = TupleTerm [showBERT a, showBERT b, showBERT c]
readBERT (TupleTerm [a, b, c]) =
liftM3 (,,) (readBERT a) (readBERT b) (readBERT c)
readBERT _ = fail "Invalid tuple(3) type"
instance (BERT a, BERT b, BERT c, BERT d) => BERT (a, b, c, d) where
showBERT (a, b, c, d) =
TupleTerm [showBERT a, showBERT b, showBERT c, showBERT d]
readBERT (TupleTerm [a, b, c, d]) =
liftM4 (,,,) (readBERT a) (readBERT b) (readBERT c) (readBERT d)
readBERT _ = fail "Invalid tuple(4) type"
instance (Ord k, BERT k, BERT v) => BERT (Map k v) where
showBERT m = DictionaryTerm
$ map (\(k, v) -> (showBERT k, showBERT v)) (Map.toList m)
readBERT (DictionaryTerm kvs) =
mapM (\(k, v) -> liftM2 (,) (readBERT k) (readBERT v)) kvs >>=
return . Map.fromList
readBERT _ = fail "Invalid map type"
-- Binary encoding & decoding.
instance Binary Term where
put term = putWord8 131 >> putTerm term
get = getWord8 >>= \magic ->
case magic of
131 -> getTerm
_ -> fail "bad magic"
-- | Binary encoding of a single term (without header)
putTerm :: Term -> PutM ()
putTerm (IntTerm value)
| 0 <= value && value < 256 = tag 97 >> put8u value
| otherwise = tag 98 >> put32s value
putTerm (FloatTerm value) =
tag 99 >> (putL . C.pack . pad $ printf "%15.15e" value)
where
pad s = s ++ replicate (31 - (length s)) '\0'
putTerm (AtomTerm value)
| len < 256 = tag 100 >> put16u len >> (putL $ C.pack value)
| otherwise = fail "BERT atom too long (>= 256)"
where
len = length value
putTerm (TupleTerm value)
| len < 256 = tag 104 >> put8u len >> forM_ value putTerm
| otherwise = tag 105 >> put32u len >> forM_ value putTerm
where
len = length value
putTerm (BytelistTerm value)
| len < 65536 = tag 107 >> put16u len >> putL value
| otherwise = do -- too big: encode as a list.
tag 108
put32u len
forM_ (B.unpack value) $ \v -> do
tag 97
putWord8 v
where
len = B.length value
putTerm (ListTerm value)
| len == 0 = putNil -- this is mentioned in the BERT spec.
| otherwise= do
tag 108
put32u $ length value
forM_ value putTerm
putNil
where
len = length value
putNil = putWord8 106
putTerm (BinaryTerm value) = tag 109 >> (put32u $ B.length value) >> putL value
putTerm (BigintTerm value) = tag 110 >> putBigint put8u value
putTerm (BigbigintTerm value) = tag 111 >> putBigint put32u value
-- All other terms are composite:
putTerm t = putTerm . compose $ t
-- | Binary decoding of a single term (without header)
getTerm :: Get Term
getTerm = do
tag <- get8u
case tag of
97 -> IntTerm <$> get8u
98 -> IntTerm <$> get32s
99 -> getL 31 >>= return . FloatTerm . read . C.unpack
100 -> get16u >>= getL >>= return . AtomTerm . C.unpack
104 -> get8u >>= getN >>= tupleTerm
105 -> get32u >>= getN >>= tupleTerm
106 -> return $ ListTerm []
107 -> get16u >>= getL >>= return . BytelistTerm
108 -> get32u >>= \n -> getN n <* expectNil >>= return . ListTerm
109 -> get32u >>= getL >>= return . BinaryTerm
110 -> getBigint get8u >>= return . BigintTerm . fromIntegral
111 -> getBigint get32u >>= return . BigintTerm . fromIntegral
where
getN :: Int -> Get [Term]
getN n = replicateM n getTerm
expectNil :: Get ()
expectNil = do
tag <- get8u
case tag of
106 -> return ()
_ -> fail $ "invalid list - expected list ending with Nil"
-- First try & decode composite terms.
tupleTerm [AtomTerm "bert", AtomTerm "true"] = return $ BoolTerm True
tupleTerm [AtomTerm "bert", AtomTerm "false"] = return $ BoolTerm False
tupleTerm [AtomTerm "bert", AtomTerm "dict", ListTerm kvs] =
mapM toTuple kvs >>= return . DictionaryTerm
where
toTuple (TupleTerm [k, v]) = return $ (k, v)
toTuple _ = fail "invalid dictionary"
tupleTerm [AtomTerm "bert", AtomTerm "time",
IntTerm mS, IntTerm s, IntTerm uS] =
return $ TimeTerm $ composeTime (mS, s, uS)
tupleTerm [AtomTerm "bert", AtomTerm "regex",
BytelistTerm s, ListTerm os] =
options os >>= return . RegexTerm (C.unpack s)
where
-- TODO: type-check the options values as well
options [] = return []
options ((AtomTerm o):os) = options os >>= return . (o:)
options _ = fail "regex options must be atoms"
-- All other tuples are just .. tuples
tupleTerm xs = return $ TupleTerm xs
putBigint putter value = do
putter len -- TODO: verify size?
if value < 0
then put8u 1
else put8u 0
putL $ B.pack $ map (fromIntegral . digit) [0..len-1]
where
value' = abs value
len = ceiling $ logBase 256 (fromIntegral $ value' + 1)
digit pos = (value' `shiftR` (8 * pos)) .&. 0xFF
getBigint getter = do
len <- fromIntegral <$> getter
sign <- get8u
bytes <- getL len
multiplier <-
case sign of
0 -> return 1
1 -> return (-1)
_ -> fail "Invalid sign byte"
return $ (*) multiplier
$ foldl (\s (n, d) -> s + d*(256^n)) 0
$ zip [0..len-1] (map fromIntegral $ B.unpack bytes)
-- Note about put32s/get32s:
--
-- When dealing with 32-bit signed ints, we first convert between Int and
-- Int32, and only then cast to Word32. This is to ensure put and get are
-- as close to inverse as possible. Coercing word types to and from
-- integer types using 'fromIntegral' is guaranteed to preserve
-- representation (see Notes in "Data.Int").
--
-- For an example of what can go wrong, see
-- https://github.com/feuerbach/bert/issues/6
put8u :: (Integral a) => a -> Put
put8u = putWord8 . fromIntegral
put16u :: (Integral a) => a -> Put
put16u = putWord16be . fromIntegral
put32u :: (Integral a) => a -> Put
put32u = putWord32be . fromIntegral
put32s :: (Integral a) => a -> Put
put32s = putWord32be . (fromIntegral :: Int32 -> Word32) . fromIntegral
putL = putLazyByteString
get8u :: (Integral a) => Get a
get8u = fromIntegral <$> getWord8
get16u :: (Integral a) => Get a
get16u = fromIntegral <$> getWord16be
get32u :: (Integral a) => Get a
get32u = fromIntegral <$> getWord32be
get32s :: (Integral a) => Get a
get32s = fromIntegral . (fromIntegral :: Word32 -> Int32) <$> getWord32be
getL :: (Integral a) => a -> Get ByteString
getL = getLazyByteString . fromIntegral
tag :: Word8 -> Put
tag which = putWord8 which
| feuerbach/bert | src/Data/BERT/Term.hs | bsd-3-clause | 11,297 | 0 | 15 | 2,695 | 3,957 | 2,015 | 1,942 | 253 | 21 |
import Control.Monad.IO.Class
import Data.Conduit.Shell
import System.Directory
main =
run (do exists <- liftIO (doesDirectoryExist "fpco")
if exists
then rm "fpco/.hsenvs" "-rf"
else git "clone" "[email protected]:fpco/fpco.git"
liftIO (setCurrentDirectory "fpco")
shell "./dev-scripts/update-repo.sh"
shell "./dev-scripts/build-all.sh"
alertDone)
| chrisdone/shell-conduit | examples/Clone.hs | bsd-3-clause | 425 | 0 | 12 | 112 | 94 | 45 | 49 | 12 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Text.Pandoc.Readers.ScalarSpec (main, spec) where
import Test.Hspec
import Test.QuickCheck
import Text.Scalar.Types
import Text.Pandoc.Readers.Scalar
import Text.Pandoc
import Text.Pandoc.Builder
import Data.Either (isRight)
import qualified Data.Map as Map
import Text.ScalarSpec (singlePageScalar)
import Examples
-- `main` is here so that this module can be run from GHCi on its own. It is
-- not needed for automatic spec discovery.
main :: IO ()
main = hspec spec
inlineHead :: Many Inline -> Inline
inlineHead = head . toList
noteSpan :: Inline
noteSpan = Span ("",["note"],[("rev","scalar:has_note"),("resource","note-on-notes")]) [Str "notes"]
processedNoteSpan :: Inline
processedNoteSpan = Span ("",[],[]) [Str "notes",Note [Para [Str "this"]]]
notePage :: Page
notePage = Page { pageTitle = "Note on notes"
, pageContent = "this"
}
noteScalar :: Scalar
noteScalar =
Scalar { scalarPages = Map.singleton (mkVersionURI "/note-on-notes.1") notePage
, scalarPaths = Map.empty
, scalarOptions = def
}
spec :: Spec
spec = do
describe "notesTransform" $ do
it "turns a span class='note' into a pandoc Note" $
notesTransform noteScalar noteSpan `shouldBe` processedNoteSpan
describe "pageToBlocks" $ do
it "takes a 'Page' and returns Right '[Block]'" $
pageToBlocks def singlePageScalarPage `shouldBeScalar` Right (singlePageTitle : singlePageContentPandoc)
describe "scalarToPandoc" $ do
it "takes a 'Scalar' book and returns 'Pandoc'" $ do
scalarToPandoc def singlePageScalar { scalarOptions = def { orderPagesBy = None }} `shouldBeScalar` Right singlePagePandoc
describe "readScalar" $ do
it "parses a Scalar RDF/XML string into Right 'Pandoc'" $
readScalar def def {orderPagesBy = None } (getExample "single_page.xml") `shouldBeScalar` Right singlePagePandoc
it "parses a complex Scalar RDF/XML string into Right 'Pandoc'" $
readScalar def def (getExample "full_book.xml") `shouldSatisfyScalar` isRight
| corajr/scalar-convert | test/Text/Pandoc/Readers/ScalarSpec.hs | bsd-3-clause | 2,084 | 0 | 18 | 389 | 509 | 279 | 230 | 44 | 1 |
module Rules.Gmp (gmpRules, gmpBuildPath, gmpObjects, gmpLibraryH) where
import Base
import Context
import Oracles.Setting
import Packages
import Target
import Utilities
-- | Build GMP library objects and return their paths.
gmpObjects :: Action [FilePath]
gmpObjects = do
gmpPath <- gmpBuildPath
need [gmpPath -/- gmpLibraryH]
-- The line below causes a Shake Lint failure on Windows, which forced us to
-- disable Lint by default. See more details here:
-- https://gitlab.haskell.org/ghc/ghc/issues/15971.
map (unifyPath . (gmpPath -/-)) <$>
liftIO (getDirectoryFilesIO gmpPath [gmpObjectsDir -/- "*.o"])
gmpBase :: FilePath
gmpBase = pkgPath integerGmp -/- "gmp"
gmpLibraryInTreeH :: FilePath
gmpLibraryInTreeH = "include/gmp.h"
gmpLibrary :: FilePath
gmpLibrary = ".libs/libgmp.a"
-- | GMP is considered a Stage1 package. This determines GMP build directory.
gmpContext :: Context
gmpContext = vanillaContext Stage1 integerGmp
-- TODO: Location of 'gmpBuildPath' is important: it should be outside any
-- package build directory, as otherwise GMP's object files will match build
-- patterns of 'compilePackage' rules. We could make 'compilePackage' rules
-- more precise to avoid such spurious matching.
-- | Build directory for in-tree GMP library.
gmpBuildPath :: Action FilePath
gmpBuildPath = buildRoot <&> (-/- stageString (stage gmpContext) -/- "gmp")
-- | Like 'gmpBuildPath' but in the 'Rules' monad.
gmpBuildPathRules :: Rules FilePath
gmpBuildPathRules = buildRootRules <&> (-/- stageString (stage gmpContext) -/- "gmp")
-- | GMP library header, relative to 'gmpBuildPath'.
gmpLibraryH :: FilePath
gmpLibraryH = "include/ghc-gmp.h"
-- | Directory for GMP library object files, relative to 'gmpBuildPath'.
gmpObjectsDir :: FilePath
gmpObjectsDir = "objs"
configureEnvironment :: Action [CmdOption]
configureEnvironment = sequence [ builderEnvironment "CC" $ Cc CompileC Stage1
, builderEnvironment "AR" (Ar Unpack Stage1)
, builderEnvironment "NM" Nm ]
gmpRules :: Rules ()
gmpRules = do
-- Copy appropriate GMP header and object files
gmpPath <- gmpBuildPathRules
gmpPath -/- gmpLibraryH %> \header -> do
configMk <- readFile' =<< (buildPath gmpContext <&> (-/- "config.mk"))
if not windowsHost && -- TODO: We don't use system GMP on Windows. Fix?
any (`isInfixOf` configMk) [ "HaveFrameworkGMP = YES", "HaveLibGmp = YES" ]
then do
putBuild "| GMP library/framework detected and will be used"
copyFile (gmpBase -/- "ghc-gmp.h") header
else do
putBuild "| No GMP library/framework detected; in tree GMP will be built"
need [gmpPath -/- gmpLibrary]
createDirectory (gmpPath -/- gmpObjectsDir)
top <- topDirectory
build $ target gmpContext (Ar Unpack Stage1)
[top -/- gmpPath -/- gmpLibrary] [gmpPath -/- gmpObjectsDir]
objs <- liftIO $ getDirectoryFilesIO "." [gmpPath -/- gmpObjectsDir -/- "*"]
produces objs
copyFileUntracked (gmpPath -/- "gmp.h") header
-- Build in-tree GMP library, prioritised so that it matches "before"
-- the generic @.a@ library rule in 'Rules.Library'.
priority 2.0 $ gmpPath -/- gmpLibrary %> \lib -> do
build $ target gmpContext (Make gmpPath) [gmpPath -/- "Makefile"] [lib]
putSuccess "| Successfully built custom library 'gmp'"
gmpPath -/- gmpLibraryInTreeH %> copyFile (gmpPath -/- gmpLibraryH)
root <- buildRootRules
root -/- buildDir gmpContext -/- gmpLibraryH %>
copyFile (gmpPath -/- gmpLibraryH)
-- This file is created when 'integerGmp' is configured.
gmpPath -/- "config.mk" %> \_ -> ensureConfigured gmpContext
-- Run GMP's configure script
gmpPath -/- "Makefile" %> \mk -> do
env <- configureEnvironment
need [mk <.> "in"]
buildWithCmdOptions env $
target gmpContext (Configure gmpPath) [mk <.> "in"] [mk]
-- Extract in-tree GMP sources and apply patches
fmap (gmpPath -/-) ["Makefile.in", "configure"] &%> \_ -> do
top <- topDirectory
removeDirectory gmpPath
-- Note: We use a tarball like gmp-4.2.4-nodoc.tar.bz2, which is
-- gmp-4.2.4.tar.bz2 repacked without the doc/ directory contents.
-- That's because the doc/ directory contents are under the GFDL,
-- which causes problems for Debian.
tarball <- unifyPath . fromSingleton "Exactly one GMP tarball is expected"
<$> getDirectoryFiles top [gmpBase -/- "gmp-tarballs/gmp*.tar.bz2"]
withTempDir $ \dir -> do
let tmp = unifyPath dir
need [top -/- tarball]
build $ target gmpContext (Tar Extract) [top -/- tarball] [tmp]
let patch = gmpBase -/- "gmpsrc.patch"
patchName = takeFileName patch
copyFile patch $ tmp -/- patchName
applyPatch tmp patchName
let name = dropExtension . dropExtension $ takeFileName tarball
unpack = fromMaybe . error $ "gmpRules: expected suffix "
++ "-nodoc (found: " ++ name ++ ")."
libName = unpack $ stripSuffix "-nodoc" name
moveDirectory (tmp -/- libName) gmpPath
| sdiehl/ghc | hadrian/src/Rules/Gmp.hs | bsd-3-clause | 5,369 | 0 | 22 | 1,338 | 1,048 | 535 | 513 | 84 | 2 |
{-# LANGUAGE DeriveFunctor #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Types
-- Copyright : (c) David Himmelstrup 2005
-- Duncan Coutts 2011
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Various common data types for the entire cabal-install system
-----------------------------------------------------------------------------
module Distribution.Client.Types where
import Distribution.Package
( PackageName, PackageId, Package(..)
, mkPackageKey, PackageKey, InstalledPackageId(..)
, HasInstalledPackageId(..), PackageInstalled(..) )
import Distribution.InstalledPackageInfo
( InstalledPackageInfo )
import Distribution.PackageDescription
( Benchmark(..), GenericPackageDescription(..), FlagAssignment
, TestSuite(..) )
import Distribution.PackageDescription.Configuration
( mapTreeData )
import Distribution.Client.PackageIndex
( PackageIndex, PackageFixedDeps(..) )
import Distribution.Client.ComponentDeps
( ComponentDeps )
import qualified Distribution.Client.ComponentDeps as CD
import Distribution.Version
( VersionRange )
import Distribution.Simple.Compiler
( Compiler, packageKeySupported )
import Distribution.Text (display)
import qualified Distribution.InstalledPackageInfo as Info
import Data.Map (Map)
import Network.URI (URI, nullURI)
import Data.ByteString.Lazy (ByteString)
import Control.Exception
( SomeException )
newtype Username = Username { unUsername :: String }
newtype Password = Password { unPassword :: String }
-- | This is the information we get from a @00-index.tar.gz@ hackage index.
--
data SourcePackageDb = SourcePackageDb {
packageIndex :: PackageIndex SourcePackage,
packagePreferences :: Map PackageName VersionRange
}
-- ------------------------------------------------------------
-- * Various kinds of information about packages
-- ------------------------------------------------------------
-- | InstalledPackage caches its dependencies as source package IDs.
-- This is for the benefit of the top-down solver only.
data InstalledPackage = InstalledPackage
InstalledPackageInfo
[PackageId]
instance Package InstalledPackage where
packageId (InstalledPackage pkg _) = packageId pkg
instance PackageFixedDeps InstalledPackage where
depends (InstalledPackage pkg _) = depends pkg
instance HasInstalledPackageId InstalledPackage where
installedPackageId (InstalledPackage pkg _) = installedPackageId pkg
instance PackageInstalled InstalledPackage where
installedDepends (InstalledPackage pkg _) = installedDepends pkg
-- | In order to reuse the implementation of PackageIndex which relies on
-- 'InstalledPackageId', we need to be able to synthesize these IDs prior
-- to installation. Eventually, we'll move to a representation of
-- 'InstalledPackageId' which can be properly computed before compilation
-- (of course, it's a bit of a misnomer since the packages are not actually
-- installed yet.) In any case, we'll synthesize temporary installed package
-- IDs to use as keys during install planning. These should never be written
-- out! Additionally, they need to be guaranteed unique within the install
-- plan.
fakeInstalledPackageId :: PackageId -> InstalledPackageId
fakeInstalledPackageId = InstalledPackageId . (".fake."++) . display
-- | A 'ConfiguredPackage' is a not-yet-installed package along with the
-- total configuration information. The configuration information is total in
-- the sense that it provides all the configuration information and so the
-- final configure process will be independent of the environment.
--
data ConfiguredPackage = ConfiguredPackage
SourcePackage -- package info, including repo
FlagAssignment -- complete flag assignment for the package
[OptionalStanza] -- list of enabled optional stanzas for the package
(ComponentDeps [ConfiguredId])
-- set of exact dependencies (installed or source).
-- These must be consistent with the 'buildDepends'
-- in the 'PackageDescription' that you'd get by
-- applying the flag assignment and optional stanzas.
deriving Show
-- | A ConfiguredId is a package ID for a configured package.
--
-- Once we configure a source package we know it's InstalledPackageId
-- (at least, in principle, even if we have to fake it currently). It is still
-- however useful in lots of places to also know the source ID for the package.
-- We therefore bundle the two.
--
-- An already installed package of course is also "configured" (all it's
-- configuration parameters and dependencies have been specified).
--
-- TODO: I wonder if it would make sense to promote this datatype to Cabal
-- and use it consistently instead of InstalledPackageIds?
data ConfiguredId = ConfiguredId {
confSrcId :: PackageId
, confInstId :: InstalledPackageId
}
instance Show ConfiguredId where
show = show . confSrcId
instance Package ConfiguredPackage where
packageId (ConfiguredPackage pkg _ _ _) = packageId pkg
instance PackageFixedDeps ConfiguredPackage where
depends (ConfiguredPackage _ _ _ deps) = fmap (map confInstId) deps
instance HasInstalledPackageId ConfiguredPackage where
installedPackageId = fakeInstalledPackageId . packageId
-- | Like 'ConfiguredPackage', but with all dependencies guaranteed to be
-- installed already, hence itself ready to be installed.
data ReadyPackage = ReadyPackage
SourcePackage -- see 'ConfiguredPackage'.
FlagAssignment --
[OptionalStanza] --
(ComponentDeps [InstalledPackageInfo]) -- Installed dependencies.
deriving Show
instance Package ReadyPackage where
packageId (ReadyPackage pkg _ _ _) = packageId pkg
instance PackageFixedDeps ReadyPackage where
depends (ReadyPackage _ _ _ deps) = fmap (map installedPackageId) deps
instance HasInstalledPackageId ReadyPackage where
installedPackageId = fakeInstalledPackageId . packageId
-- | Extracts a package key from ReadyPackage, a common operation needed
-- to calculate build paths.
readyPackageKey :: Compiler -> ReadyPackage -> PackageKey
readyPackageKey comp (ReadyPackage pkg _ _ deps) =
mkPackageKey (packageKeySupported comp) (packageId pkg)
(map Info.packageKey (CD.nonSetupDeps deps)) []
-- | Sometimes we need to convert a 'ReadyPackage' back to a
-- 'ConfiguredPackage'. For example, a failed 'PlanPackage' can be *either*
-- Ready or Configured.
readyPackageToConfiguredPackage :: ReadyPackage -> ConfiguredPackage
readyPackageToConfiguredPackage (ReadyPackage srcpkg flags stanzas deps) =
ConfiguredPackage srcpkg flags stanzas (fmap (map aux) deps)
where
aux :: InstalledPackageInfo -> ConfiguredId
aux info = ConfiguredId {
confSrcId = Info.sourcePackageId info
, confInstId = installedPackageId info
}
-- | A package description along with the location of the package sources.
--
data SourcePackage = SourcePackage {
packageInfoId :: PackageId,
packageDescription :: GenericPackageDescription,
packageSource :: PackageLocation (Maybe FilePath),
packageDescrOverride :: PackageDescriptionOverride
}
deriving Show
-- | We sometimes need to override the .cabal file in the tarball with
-- the newer one from the package index.
type PackageDescriptionOverride = Maybe ByteString
instance Package SourcePackage where packageId = packageInfoId
data OptionalStanza
= TestStanzas
| BenchStanzas
deriving (Eq, Ord, Show)
enableStanzas
:: [OptionalStanza]
-> GenericPackageDescription
-> GenericPackageDescription
enableStanzas stanzas gpkg = gpkg
{ condBenchmarks = flagBenchmarks $ condBenchmarks gpkg
, condTestSuites = flagTests $ condTestSuites gpkg
}
where
enableTest t = t { testEnabled = TestStanzas `elem` stanzas }
enableBenchmark bm = bm { benchmarkEnabled = BenchStanzas `elem` stanzas }
flagBenchmarks = map (\(n, bm) -> (n, mapTreeData enableBenchmark bm))
flagTests = map (\(n, t) -> (n, mapTreeData enableTest t))
-- ------------------------------------------------------------
-- * Package locations and repositories
-- ------------------------------------------------------------
data PackageLocation local =
-- | An unpacked package in the given dir, or current dir
LocalUnpackedPackage FilePath
-- | A package as a tarball that's available as a local tarball
| LocalTarballPackage FilePath
-- | A package as a tarball from a remote URI
| RemoteTarballPackage URI local
-- | A package available as a tarball from a repository.
--
-- It may be from a local repository or from a remote repository, with a
-- locally cached copy. ie a package available from hackage
| RepoTarballPackage Repo PackageId local
--TODO:
-- * add support for darcs and other SCM style remote repos with a local cache
-- | ScmPackage
deriving (Show, Functor)
data LocalRepo = LocalRepo
deriving (Show,Eq)
data RemoteRepo =
RemoteRepo {
remoteRepoName :: String,
remoteRepoURI :: URI,
remoteRepoRootKeys :: ()
}
-- FIXME: discuss this type some more.
deriving (Show,Eq,Ord)
-- | Construct a partial 'RemoteRepo' value to fold the field parser list over.
emptyRemoteRepo :: String -> RemoteRepo
emptyRemoteRepo name = RemoteRepo name nullURI ()
data Repo = Repo {
repoKind :: Either RemoteRepo LocalRepo,
repoLocalDir :: FilePath
}
deriving (Show,Eq)
-- ------------------------------------------------------------
-- * Build results
-- ------------------------------------------------------------
type BuildResult = Either BuildFailure BuildSuccess
data BuildFailure = PlanningFailed
| DependentFailed PackageId
| DownloadFailed SomeException
| UnpackFailed SomeException
| ConfigureFailed SomeException
| BuildFailed SomeException
| TestsFailed SomeException
| InstallFailed SomeException
data BuildSuccess = BuildOk DocsResult TestsResult
(Maybe InstalledPackageInfo)
data DocsResult = DocsNotTried | DocsFailed | DocsOk
data TestsResult = TestsNotTried | TestsOk
| ian-ross/cabal | cabal-install/Distribution/Client/Types.hs | bsd-3-clause | 10,643 | 0 | 11 | 2,183 | 1,481 | 871 | 610 | 143 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : RefacTypeSig
-- Copyright : (c) Christopher Brown 2006
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- This module contains a transformation for HaRe.
-- Add type signatures for top-level function definitions
-----------------------------------------------------------------------------
module RefacTypeSig where
import PrettyPrint
import PrettyPrint
import PosSyntax
import AbstractIO
import Maybe
import TypedIds
import UniqueNames hiding (srcLoc)
import PNT
import TiPNT
import List
import RefacUtils
import PFE0 (findFile)
import MUtils (( # ))
import RefacLocUtils
import System
import IO
refacTypeSig args
= do let fileName = args!!0
AbstractIO.putStrLn "refacTypeSig"
modName1 <- fileNameToModName fileName
let modName = convertModName modName1
-- Parse the input file.
(inscps, exps, mod, tokList) <- parseSourceFile fileName
let newRefactoredDecls = hsDecls mod
sigs <- mapM (getSig fileName modName) (filter (/="") (map declToName newRefactoredDecls))
res <- applyRefac (addTypes (dropWhile (\x -> x == defaultPN) (map (declToPName (map declToName newRefactoredDecls)) newRefactoredDecls)) sigs) (Just (inscps, exps, mod, tokList)) fileName
-- ((_,m), (newToks, newMod)) <- applyRefac (addType ses modName ) (Just (inscps, exps, mod, tokList)) fileName
-- res <- findDefsWithType ses mod
-- AbstractIO.putStrLn $ show res
writeRefactoredFiles True [res]
(inscps5, exps5, mod5, tokList5) <- parseSourceFile fileName
(mod',((tokList'',modified),_))<-(doCommenting (dropWhile (\x -> x == defaultPN) (map (declToPName (map declToName newRefactoredDecls)) newRefactoredDecls))) fileName mod5 tokList5
writeRefactoredFiles True [((fileName, True), (tokList'', mod'))]
AbstractIO.putStrLn "Completed."
doCommenting (x:xs) fileName mod tokList
= runStateT (applyTP ((once_tdTP (failTP `adhocTP` (rmInMod (x:xs) )
))) mod)
((tokList,unmodified),fileName)
where
--1. The definition to be removed is one of the module's top level declarations.
rmInMod [] mod = return mod
rmInMod (p:ps) (mod@(HsModule loc name exps imps ds):: HsModuleP)
= do ds'<-commentOutTypeSig p ds
res2 <- rmInMod ps (HsModule loc name exps imps ds')
return res2
addTypes [] _ (_,_,mod) = return mod
addTypes _ [] (_,_,mod) = return mod
addTypes (x:xs) (y:ys) (a,b,mod) = do
mod' <- addTypeSigDecl mod (Just x) ([y], Nothing) True
-- commentOutTypeSig x (y:ys)
res <- addTypes xs ys (a,b,mod')
return mod'
{- declToName :: HsDeclP -> String
declToName (Dec (HsFunBind _ ((HsMatch _ pnt _ _ _):xs)))
= pNTtoName pnt
declToName (Dec (HsPatBind _ pnt _ _)) = pNTtoName (patToPNT pnt)
declToName _ = "" -}
{- declToPName :: [ String ] -> HsDeclP -> PName
declToPName [] _ = defaultPN
declToPName (name: names) d@(Dec (HsFunBind _ ((HsMatch _ pnt _ _ _):xs)))
| name == pNTtoName pnt = pNTtoPN pnt
| otherwise = declToPName names d
declToPName (name:names) d@(Dec (HsPatBind _ pnt _ _)) -- = pNTtoPN (patToPNT pnt)
| name == pNTtoName (patToPNT pnt) = pNTtoPN (patToPNT pnt)
| otherwise = declToPName names d
declToPName _ _ = defaultPN
-}
addType ses modName (inscps, exps, mod)
= do
res <- findDefsWithType ses mod modName
return res
convertModName (PlainModule s) = s
convertModName m@(MainModule f) = modNameToStr m
findDefsWithType ses t modName
= applyTP (stop_tdTP (failTP `adhocTP` inMatch)) t
where
inMatch (mat@(Dec (HsFunBind _ ((HsMatch loc1 pnt pats (HsBody e) ds):xs)))::HsDeclP)
= do
res3 <- findType pnt t
if res3 == [True]
then do
addTypeSigDecl t Nothing ([], Nothing) True
else do
-- create a type signature!
res <- getSig ses modName (pNTtoName pnt)
-- addTypeDecl mat (Just (pNTtoPN pnt)) ([res], Nothing) True
addTypeSigDecl t (Just (declToPName [declToName mat] mat)) ([res], Nothing) True
-- return [res2]
-- inMatch _ = fail ""
inMatch (dec@(Dec (HsPatBind _ pnt _ _))::HsDeclP)
= do
res3 <- findType (patToPNT pnt) t
if res3 == [True]
then
return dec
else do
-- create a type signature!
res <- getSig ses modName (pNTtoName (patToPNT pnt))
-- addTypeDecl dec (Just (pNTtoPN (patToPNT pnt))) ([res], Nothing) True
addTypeSigDecl dec (Just (declToPName [declToName dec] dec)) ([res], Nothing) True
-- return [res2]
inMatch _ = fail ""
{- getSig ses modName name
= do
let types = getTypes name ses modName
-- error $ show types
let types1 = cleanTypes (tail types)
let (context, l) = getContext (head types)
let types2 = l : types1
-- let context2 = init context
let types3 = map (filter (/= '\n')) types2
let newSig = createTypeSig name context types3
-- error $ show newSig
return newSig
getSig name ses modName
= do
let types = getTypes name ses modName
let types1 = cleanTypes (tail types) -- modName
let (context, l) = getContext (head types) -- modName
let types2 = l : types1
-- let context2 = init context
let newSig = createTypeSig name context types2
-- error $ show newSig
return newSig -}
--createTypeSig :: String -> [String] -> [String] -> HsDeclP
createTypeSig name [] types
= Dec (HsTypeSig loc0 [nameToPNT name] [] (createApplication types))
createTypeSig name context types
= Dec (HsTypeSig loc0 [nameToPNT name] [(Typ (HsTyVar (nameToPNT context)))] (createApplication types))
-- (Typ (HsTyVar (nameToTypePNT (head types))) ) )
nameToTypePNT :: String -> PNT
nameToTypePNT id = (PNT (PN (UnQual id) (S loc0)) (Type (TypeInfo {defType = Nothing, constructors = [], fields = []})) (N (Just loc0)))
createApplication [var]
= (Typ (HsTyVar (nameToTypePNT var)))
createApplication (var:vars)
= createApplication' (Typ (HsTyVar (nameToTypePNT var))) vars
createApplication' x [y]
= (Typ (HsTyFun x (Typ (HsTyVar (nameToTypePNT y)))))
createApplication' x (y:ys)
= (createApplication' (Typ (HsTyFun x (Typ (HsTyVar (nameToTypePNT y))))) ys)
findType pnt t
= applyTU (stop_tdTU (failTU `adhocTU` inSig)) t
where
inSig (dec@(Dec (HsTypeSig _ _ _ _))::HsDeclP)
= do
let res = definesTypeSig (pNTtoPN pnt) dec
if res == True
then return [True]
else fail ""
inSig _ = fail ""
| forste/haReFork | refactorer/RefacTypeSig.hs | bsd-3-clause | 7,685 | 0 | 20 | 2,558 | 1,702 | 895 | 807 | -1 | -1 |
-- Copyright 2013 Kevin Backhouse.
{-|
An example of the use of the
'Control.Monad.MultiPass.Instrument.OrdCons.OrdCons' instrument.
-}
module Control.Monad.MultiPass.Example.OrdCons ( convertArray )
where
import Control.Monad.ST2
import Control.Monad.MultiPass
import Control.Monad.MultiPass.Instrument.CreateST2Array
import Control.Monad.MultiPass.Instrument.OrdCons
import Data.Ix
newtype ConvertArray i a r w p1 p2 tc
= ConvertArray (ConvertArrayType i a r w p1 p2 tc)
type ConvertArrayType i a r w p1 p2 tc
= OrdCons a r w p1 p2 tc
-> CreateST2Array r w p2 tc
-> MultiPassMain r w tc (p2 (ST2Array r w i Int))
instance MultiPassAlgorithm
(ConvertArray i a r w p1 p2 tc)
(ConvertArrayType i a r w p1 p2 tc)
where
unwrapMultiPassAlgorithm (ConvertArray f) = f
convertArray
:: (Ix i, Num i, Ord a)
=> NumThreads
-> ST2Array r w i a
-> ST2 r w (ST2Array r w i Int)
convertArray n xs =
run $ PassS $ PassS $ PassZ $ ConvertArray $
convertArrayMP n xs
convertArrayMP
:: (Ix i, Num i, Ord a, Monad p1, Monad p2)
=> NumThreads
-> ST2Array r w i a
-> ConvertArrayType i a r w p1 p2 tc
convertArrayMP n xs oc cr =
mkMultiPassMain
(return ())
(\() ->
pmapST2ArrayMP cr n xs $ \x ->
ordCons oc (return x))
return
| kevinbackhouse/Control-Monad-MultiPass | src/Control/Monad/MultiPass/Example/OrdCons.hs | bsd-3-clause | 1,304 | 0 | 12 | 301 | 445 | 240 | 205 | -1 | -1 |
-- |
-- Module : Network.SimpleIRC.Core
-- Copyright : (c) Dominik Picheta 2010
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- For information on how to use this library please take a look at the readme file on github, <http://github.com/dom96/SimpleIRC#readme>.
{-# LANGUAGE OverloadedStrings, CPP #-}
module Network.SimpleIRC.Core
(
-- * Types
MIrc
, EventFunc
, IrcConfig(..)
, IrcEvent(..)
-- * Functions
, connect
, disconnect
, reconnect
, sendRaw
, sendMsg
, sendCmd
, addEvent
, changeEvents
, remEvent
, mkDefaultConfig
-- * Accessors
, getChannels
, getNickname
, getAddress
, getPort
, getUsername
, getRealname
#ifdef TEST
, IrcServer(..)
, listenLoop
#endif
) where
import Network
import System.IO
import Data.Maybe
import Data.List (delete)
import Data.Char (isNumber)
import Control.Monad
import Control.Concurrent
import Network.SimpleIRC.Messages
import Data.Unique
import Control.Exception (try)
import System.Timeout
import Data.Time
#if ! MIN_VERSION_time(1,5,0)
import System.Locale
#endif
import qualified Data.ByteString.Char8 as B
import qualified Data.Map as Map
import qualified Data.Foldable as Foldable
internalEvents :: [IrcServer -> IrcMessage -> IO IrcServer]
internalEvents = [joinChans, pong, trackChanges]
internalNormEvents :: [IrcEvent]
internalNormEvents = [Privmsg ctcpHandler]
type MIrc = MVar IrcServer
data IrcConfig = IrcConfig
{ cAddr :: String -- ^ Server address to connect to
, cPort :: Int -- ^ Server port to connect to
, cNick :: String -- ^ Nickname
, cPass :: Maybe String -- ^ Optional server password
, cUsername :: String -- ^ Username
, cRealname :: String -- ^ Realname
, cChannels :: [String] -- ^ List of channels to join on connect
, cEvents :: [IrcEvent] -- ^ Events to bind
, cCTCPVersion :: String -- ^ What to send on CTCP VERSION
, cCTCPTime :: IO String -- ^ What to send on CTCP TIME
, cPingTimeoutInterval :: Int -- The time between server messages that causes ping timeout
}
data SIrcCommand =
SIrcAddEvent (Unique, IrcEvent)
| SIrcChangeEvents (Map.Map Unique IrcEvent)
| SIrcRemoveEvent Unique
data IrcServer = IrcServer
{ sAddr :: B.ByteString
, sPort :: Int
, sNickname :: B.ByteString
, sPassword :: Maybe B.ByteString
, sUsername :: B.ByteString
, sRealname :: B.ByteString
, sChannels :: [B.ByteString]
, sEvents :: Map.Map Unique IrcEvent
, sSock :: Maybe Handle
, sListenThread :: Maybe ThreadId
, sCmdThread :: Maybe ThreadId
, sCmdChan :: Chan SIrcCommand
, sDebug :: Bool
-- Other info
, sCTCPVersion :: String
, sCTCPTime :: IO String
, sPingTimeoutInterval :: Int
, sFloodControlTimestamp :: UTCTime
}
-- When adding events here, remember add them in callEvents and in eventFunc
-- AND also in the Show instance and Eq instance
data IrcEvent =
Privmsg EventFunc -- ^ PRIVMSG
| Numeric EventFunc -- ^ Numeric, 001, 002, 372 etc.
| Ping EventFunc -- ^ PING
| Join EventFunc -- ^ JOIN
| Part EventFunc -- ^ PART
| Mode EventFunc -- ^ MODE
| Topic EventFunc -- ^ TOPIC
| Invite EventFunc -- ^ INVITE
| Kick EventFunc -- ^ KICK
| Quit EventFunc -- ^ QUIT
| Nick EventFunc -- ^ NICK
| Notice EventFunc -- ^ NOTICE
| RawMsg EventFunc -- ^ This event gets called on every message received
| Disconnect (MIrc -> IO ()) -- ^ This event gets called whenever the
-- connection with the server is dropped
instance Show IrcEvent where
show (Privmsg _) = "IrcEvent - Privmsg"
show (Numeric _) = "IrcEvent - Numeric"
show (Ping _) = "IrcEvent - Ping"
show (Join _) = "IrcEvent - Join"
show (Part _) = "IrcEvent - Part"
show (Mode _) = "IrcEvent - Mode"
show (Topic _) = "IrcEvent - Topic"
show (Invite _) = "IrcEvent - Invite"
show (Kick _) = "IrcEvent - Kick"
show (Quit _) = "IrcEvent - Quit"
show (Nick _) = "IrcEvent - Nick"
show (Notice _) = "IrcEvent - Notice"
show (RawMsg _) = "IrcEvent - RawMsg"
show (Disconnect _) = "IrcEvent - Disconnect"
type EventFunc = (MIrc -> IrcMessage -> IO ())
-- |Connects to a server
connect :: IrcConfig -- ^ Configuration
-> Bool -- ^ Run in a new thread
-> Bool -- ^ Print debug messages
-> IO (Either IOError MIrc) -- ^ IrcServer instance
connect config threaded debug = try $ do
(when debug $
B.putStrLn $ "Connecting to " `B.append` B.pack (cAddr config))
h <- connectTo (cAddr config) (PortNumber $ fromIntegral $ cPort config)
hSetBuffering h NoBuffering
cmdChan <- newChan
server <- toServer config h cmdChan debug
-- Initialize connection with the server
_ <- greetServer server
-- Create a new MVar
res <- newMVar server
-- Start the loops, listen and exec cmds
if threaded
then do listenId <- forkIO (listenLoop res)
_ <- forkIO (execCmdsLoop res)
modifyMVar_ res (\srv -> return $ srv {sListenThread = Just listenId})
return res
else do listenLoop res
return res
-- |Sends a QUIT command to the server.
disconnect :: MIrc
-> B.ByteString -- ^ Quit message
-> IO ()
disconnect server quitMsg = do
s <- readMVar server
write s $ "QUIT :" `B.append` quitMsg
return ()
-- |Reconnects to the server.
reconnect :: MIrc -> IO (Either IOError MIrc)
reconnect mIrc = try $ do
server <- readMVar mIrc
h <- connectTo (B.unpack $ sAddr server) (PortNumber $ fromIntegral $ sPort server)
hSetBuffering h NoBuffering
modifyMVar_ mIrc (\s -> return $ s {sSock = Just h})
-- Initialize connection with the server
_ <- withMVar mIrc greetServer
-- Restart the listen loop.
listenId <- forkIO (listenLoop mIrc)
cmdId <- forkIO (execCmdsLoop mIrc)
modifyMVar_ mIrc (\s -> return $ s {sListenThread = Just listenId,
sCmdThread = Just cmdId})
return mIrc
{-
-- |Reconnects to the server.
reconnect :: MIrc -> IO (Either IOError MIrc)
reconnect server = do
s <- readMVar server
let conf = IrcConfig (B.unpack $ sAddr s) (sPort s)
(B.unpack $ sNickname s) (B.unpack $ sUsername s)
(B.unpack $ sRealname s) (map (B.unpack) (sChannels s))
(elems $ sEvents s) (sCTCPVersion s) (sCTCPTime s)
connect conf True (sDebug s)
-}
genUnique :: IrcEvent -> IO (Unique, IrcEvent)
genUnique e = do
u <- newUnique
return (u, e)
genUniqueMap :: [IrcEvent] -> IO (Map.Map Unique IrcEvent)
genUniqueMap evts = do
uEvents <- mapM genUnique evts
return $ Map.fromList uEvents
toServer :: IrcConfig -> Handle -> Chan SIrcCommand -> Bool -> IO IrcServer
toServer config h cmdChan debug = do
uniqueEvents <- genUniqueMap $ internalNormEvents ++ cEvents config
now <- getCurrentTime
return $ IrcServer (B.pack $ cAddr config) (cPort config)
(B.pack $ cNick config) (B.pack `fmap` cPass config) (B.pack $ cUsername config)
(B.pack $ cRealname config) (map B.pack $ cChannels config)
uniqueEvents (Just h) Nothing Nothing cmdChan debug
(cCTCPVersion config) (cCTCPTime config) (cPingTimeoutInterval config) now
greetServer :: IrcServer -> IO IrcServer
greetServer server = do
case mpass of
Nothing -> return ()
Just pass -> write server $ "PASS " `B.append` pass
write server $ "NICK " `B.append` nick
write server $ "USER " `B.append` user `B.append` " " `B.append`
user `B.append` " " `B.append` addr `B.append` " :" `B.append` real
return server
where nick = sNickname server
mpass = sPassword server
user = sUsername server
real = sRealname server
addr = sAddr server
execCmdsLoop :: MIrc -> IO ()
execCmdsLoop mIrc = do
server <- readMVar mIrc
cmd <- readChan $ sCmdChan server
case cmd of (SIrcAddEvent uEvent) -> do
_ <- swapMVar mIrc (server {sEvents =
(uncurry Map.insert uEvent) (sEvents server)})
execCmdsLoop mIrc
(SIrcChangeEvents evts) -> do
_ <- swapMVar mIrc (server {sEvents = evts})
execCmdsLoop mIrc
(SIrcRemoveEvent key) -> do
_ <- swapMVar mIrc (server {sEvents =
Map.delete key (sEvents server)})
execCmdsLoop mIrc
listenLoop :: MIrc -> IO ()
listenLoop s = do
server <- readMVar s
let h = fromJust $ sSock server
eof <- timeout (sPingTimeoutInterval server) $ hIsEOF h
-- If EOF then we are disconnected
if (eof /= Just False)
then do
modifyMVar_ s (\serv -> return $ serv {sSock = Nothing})
Foldable.mapM_ (callDisconnectFunction s) (sEvents server)
else do
line <- B.hGetLine h
server1 <- takeMVar s
-- Print the received line.
debugWrite server1 $ (B.pack ">> ") `B.append` line
-- Call the internal events
newServ <- foldM (\sr f -> f sr (parse line)) server1 internalEvents
putMVar s newServ -- Put the MVar back.
let parsed = (parse line)
-- Call the events
callEvents s parsed
-- Call the RawMsg Events.
events s (RawMsg undefined) parsed
listenLoop s
where
callDisconnectFunction mIrc (Disconnect f) = f mIrc
callDisconnectFunction _ _ = return ()
-- Internal Events - They can edit the server
joinChans :: IrcServer -> IrcMessage -> IO IrcServer
joinChans server msg =
if code == "001"
then do mapM_ (\chan -> write server $ "JOIN " `B.append` chan) (sChannels server)
return server {sChannels = []}
else return server
where code = mCode msg
pong :: IrcServer -> IrcMessage -> IO IrcServer
pong server msg =
if code == "PING"
then do
write server $ "PONG :" `B.append` pingMsg
return server
else return server
where pingMsg = mMsg msg
code = mCode msg
trackChanges :: IrcServer -> IrcMessage -> IO IrcServer
trackChanges server msg
| code == "JOIN" = do
let nick = fromJust $ mNick msg
chan = mMsg msg
if nick == sNickname server
then return server { sChannels = chan:(sChannels server) }
else return server
| code == "NICK" = do
let nick = fromJust $ mNick msg
newNick = mMsg msg
if nick == sNickname server
then return server { sNickname = newNick }
else return server
| code == "KICK" = do
let nick = (fromJust $ mOther msg) !! 0
chan = fromJust $ mChan msg
if nick == sNickname server
then return server { sChannels = delete chan (sChannels server) }
else return server
| code == "PART" = do
let nick = fromJust $ mNick msg
chan = mMsg msg
if nick == sNickname server
then return server { sChannels = delete chan (sChannels server) }
else return server
| otherwise = return server
where code = mCode msg
-- Internal normal events
ctcpHandler :: EventFunc
ctcpHandler mServ iMsg
| msg == "\x01VERSION\x01" = do
server <- readMVar mServ
sendCmd mServ
(MNotice origin ("\x01VERSION " `B.append`
B.pack (sCTCPVersion server) `B.append` "\x01"))
| msg == "\x01TIME\x01" = do
server <- readMVar mServ
time <- sCTCPTime server
sendCmd mServ
(MNotice origin ("\x01TIME " `B.append`
(B.pack time) `B.append` "\x01"))
| "\x01PING " `B.isPrefixOf` msg = do
sendCmd mServ
(MNotice origin msg)
| otherwise = return ()
where msg = mMsg iMsg
origin = fromJust $ mOrigin iMsg
-- Event code
events :: MIrc -> IrcEvent -> IrcMessage -> IO ()
events mServ event msg = do
server <- readMVar mServ
let comp = (`eqEvent` event)
evts = Map.filter comp (sEvents server)
eventCall = (\obj -> (eventFunc $ snd obj) mServ msg)
mapM_ eventCall (Map.toList evts)
callEvents :: MIrc -> IrcMessage -> IO ()
callEvents mServ msg
| mCode msg == "PRIVMSG" =
events mServ (Privmsg undefined) msg
| mCode msg == "PING" =
events mServ (Ping undefined) msg
| mCode msg == "JOIN" =
events mServ (Join undefined) msg
| mCode msg == "PART" =
events mServ (Part undefined) msg
| mCode msg == "MODE" =
events mServ (Mode undefined) msg
| mCode msg == "TOPIC" =
events mServ (Topic undefined) msg
| mCode msg == "INVITE" =
events mServ (Invite undefined) msg
| mCode msg == "KICK" =
events mServ (Kick undefined) msg
| mCode msg == "QUIT" =
events mServ (Quit undefined) msg
| mCode msg == "NICK" =
events mServ (Nick undefined) msg
| mCode msg == "NOTICE" =
events mServ (Notice undefined) msg
| B.all isNumber (mCode msg) =
events mServ (Numeric undefined) msg
| otherwise = return ()
eqEvent :: IrcEvent -> IrcEvent -> Bool
(Privmsg _) `eqEvent` (Privmsg _) = True
(Numeric _) `eqEvent` (Numeric _) = True
(Ping _) `eqEvent` (Ping _) = True
(Join _) `eqEvent` (Join _) = True
(Part _) `eqEvent` (Part _) = True
(Mode _) `eqEvent` (Mode _) = True
(Topic _) `eqEvent` (Topic _) = True
(Invite _) `eqEvent` (Invite _) = True
(Kick _) `eqEvent` (Kick _) = True
(Quit _) `eqEvent` (Quit _) = True
(Nick _) `eqEvent` (Nick _) = True
(Notice _) `eqEvent` (Notice _) = True
(RawMsg _) `eqEvent` (RawMsg _) = True
(Disconnect _) `eqEvent` (Disconnect _) = True
_ `eqEvent` _ = False
eventFunc :: IrcEvent -> EventFunc
eventFunc (Privmsg f) = f
eventFunc (Numeric f) = f
eventFunc (Ping f) = f
eventFunc (Join f) = f
eventFunc (Part f) = f
eventFunc (Mode f) = f
eventFunc (Topic f) = f
eventFunc (Invite f) = f
eventFunc (Kick f) = f
eventFunc (Quit f) = f
eventFunc (Nick f) = f
eventFunc (Notice f) = f
eventFunc (RawMsg f) = f
eventFunc (Disconnect _) = error "SimpleIRC: unexpected event"
-- |Sends a raw command to the server
sendRaw :: MIrc -> B.ByteString -> IO ()
sendRaw mServ msg = do
server <- readMVar mServ
write server msg
-- |Sends a message to a channel
-- |Implements flood control according to RFC 2813, chapter 5.8
sendMsg :: MIrc
-> B.ByteString -- ^ Channel
-> B.ByteString -- ^ Message
-> IO ()
sendMsg mServ chan msg =
mapM_ s lins
where lins = B.lines msg
s m = do
now <- getCurrentTime
stamp <- (getFloodControlTimestamp mServ)
let latest = addUTCTime 2 $ max now stamp
diff = diffUTCTime latest now
setFloodControlTimestamp mServ latest
when (diff > 10) (threadDelay $ 1000000 * (round diff - 10))
sendCmd mServ (MPrivmsg chan m)
sendCmd :: MIrc
-> Command -- Command to send
-> IO ()
sendCmd mServ cmd = sendRaw mServ (showCommand cmd)
addEvent :: MIrc -> IrcEvent -> IO Unique
addEvent mIrc event = do
s <- readMVar mIrc
u <- newUnique
writeChan (sCmdChan s) (SIrcAddEvent (u, event))
return u
changeEvents :: MIrc -> [IrcEvent] -> IO ()
changeEvents mIrc evts = do
s <- readMVar mIrc
uniqueEvents <- genUniqueMap evts
writeChan (sCmdChan s) (SIrcChangeEvents uniqueEvents)
remEvent :: MIrc -> Unique -> IO ()
remEvent mIrc uniq = do
s <- readMVar mIrc
writeChan (sCmdChan s) (SIrcRemoveEvent uniq)
debugWrite :: IrcServer -> B.ByteString -> IO ()
debugWrite s msg =
(when (sDebug s) $ B.putStrLn msg)
write :: IrcServer -> B.ByteString -> IO ()
write s msg = do
debugWrite s $ "<< " `B.append` msg `B.append` "\\r\\n"
B.hPutStr h (msg `B.append` "\r\n")
where h = fromJust $ sSock s
mkDefaultConfig :: String -> String -> IrcConfig
mkDefaultConfig addr nick = IrcConfig
{ cAddr = addr
, cPort = 6667
, cNick = nick
, cPass = Nothing
, cUsername = "simpleirc"
, cRealname = "SimpleIRC Bot"
, cChannels = []
, cEvents = []
, cCTCPVersion = "SimpleIRC v0.3"
, cCTCPTime = fmap (formatTime defaultTimeLocale "%c") getZonedTime
, cPingTimeoutInterval = 350 * 10^(6::Int)
}
-- MIrc Accessors
-- |Returns a list of channels currently joined.
getChannels :: MIrc -> IO [B.ByteString]
getChannels mIrc = do
s <- readMVar mIrc
return $ sChannels s
-- |Returns the current nickname.
getNickname :: MIrc -> IO B.ByteString
getNickname mIrc = do
s <- readMVar mIrc
return $ sNickname s
-- |Returns the address
getAddress :: MIrc -> IO B.ByteString
getAddress mIrc = do
s <- readMVar mIrc
return $ sAddr s
-- |Returns the address
getPort :: MIrc -> IO Int
getPort mIrc = do
s <- readMVar mIrc
return $ sPort s
-- |Returns the User name
getUsername :: MIrc -> IO B.ByteString
getUsername mIrc = do
s <- readMVar mIrc
return $ sUsername s
-- |Returns the Real name
getRealname :: MIrc -> IO B.ByteString
getRealname mIrc = do
s <- readMVar mIrc
return $ sRealname s
-- |Returns the timestamp of the last sent message, possibly with flood control penalty
getFloodControlTimestamp :: MIrc -> IO UTCTime
getFloodControlTimestamp mIrc = do
s <- readMVar mIrc
return $ sFloodControlTimestamp s
-- |Updates the value of the flood control timestamp
setFloodControlTimestamp :: MIrc -> UTCTime -> IO ()
setFloodControlTimestamp mIrc stamp =
modifyMVar_ mIrc (\i -> return i { sFloodControlTimestamp = stamp })
| MagneticDuck/simpleirc | Network/SimpleIRC/Core.hs | bsd-3-clause | 17,535 | 0 | 20 | 4,651 | 5,436 | 2,764 | 2,672 | 433 | 5 |
{-# LANGUAGE CPP, ForeignFunctionInterface #-}
#include "ghcconfig.h"
-----------------------------------------------------------------------------
--
-- (c) The University of Glasgow, 2004
--
-- runghc program, for invoking from a #! line in a script. For example:
--
-- script.lhs:
-- #!/usr/bin/env /usr/bin/runghc
-- > main = putStrLn "hello!"
--
-- runghc accepts one flag:
--
-- -f <path> specify the path
--
-- -----------------------------------------------------------------------------
module Main (main) where
import Control.Exception
import Data.Monoid
import System.Cmd
import System.Directory
import System.Environment
import System.Exit
import System.FilePath
import System.IO
#if defined(mingw32_HOST_OS)
import Foreign
import Foreign.C.String
#endif
#if defined(mingw32_HOST_OS)
# if defined(i386_HOST_ARCH)
# define WINDOWS_CCONV stdcall
# elif defined(x86_64_HOST_ARCH)
# define WINDOWS_CCONV ccall
# else
# error Unknown mingw32 arch
# endif
#endif
main :: IO ()
main = do
args <- getArgs
case parseRunGhcFlags args of
(Help, _) -> printUsage
(ShowVersion, _) -> printVersion
(RunGhcFlags (Just ghc), args') -> doIt ghc args'
(RunGhcFlags Nothing, args') -> do
mbPath <- getExecPath
case mbPath of
Nothing -> dieProg ("cannot find ghc")
Just path ->
let ghc = takeDirectory (normalise path) </> "ghc"
in doIt ghc args'
data RunGhcFlags = RunGhcFlags (Maybe FilePath) -- GHC location
| Help -- Print help text
| ShowVersion -- Print version info
instance Monoid RunGhcFlags where
mempty = RunGhcFlags Nothing
Help `mappend` _ = Help
_ `mappend` Help = Help
ShowVersion `mappend` _ = ShowVersion
_ `mappend` ShowVersion = ShowVersion
RunGhcFlags _ `mappend` right@(RunGhcFlags (Just _)) = right
left@(RunGhcFlags _) `mappend` RunGhcFlags Nothing = left
parseRunGhcFlags :: [String] -> (RunGhcFlags, [String])
parseRunGhcFlags = f mempty
where f flags ("-f" : ghc : args)
= f (flags `mappend` RunGhcFlags (Just ghc)) args
f flags (('-' : 'f' : ghc) : args)
= f (flags `mappend` RunGhcFlags (Just ghc)) args
f flags ("--help" : args) = f (flags `mappend` Help) args
f flags ("--version" : args) = f (flags `mappend` ShowVersion) args
-- If you need the first GHC flag to be a -f flag then
-- you can pass -- first
f flags ("--" : args) = (flags, args)
f flags args = (flags, args)
printVersion :: IO ()
printVersion = do
putStrLn ("runghc " ++ VERSION)
printUsage :: IO ()
printUsage = do
putStrLn "Usage: runghc [runghc flags] [GHC flags] module [program args]"
putStrLn ""
putStrLn "The runghc flags are"
putStrLn " -f /path/to/ghc Tell runghc where GHC is"
putStrLn " --help Print this usage information"
putStrLn " --version Print version number"
doIt :: String -> [String] -> IO ()
doIt ghc args = do
let (ghc_args, rest) = getGhcArgs args
case rest of
[] -> do
-- behave like typical perl, python, ruby interpreters:
-- read from stdin
tmpdir <- getTemporaryDirectory
bracket
(openTempFile tmpdir "runghcXXXX.hs")
(\(filename,h) -> do hClose h; removeFile filename)
$ \(filename,h) -> do
getContents >>= hPutStr h
hClose h
doIt ghc (ghc_args ++ [filename])
filename : prog_args -> do
-- If the file exists, and is not a .lhs file, then we
-- want to treat it as a .hs file.
--
-- If the file doesn't exist then GHC is going to look for
-- filename.hs and filename.lhs, and use the appropriate
-- type.
exists <- doesFileExist filename
let xflag = if exists && (takeExtension filename /= ".lhs")
then ["-x", "hs"]
else []
c1 = ":set prog " ++ show filename
c2 = ":main " ++ show prog_args
res <- rawSystem ghc (["-ignore-dot-ghci"] ++
xflag ++
ghc_args ++
[ "-e", c1, "-e", c2, filename])
exitWith res
getGhcArgs :: [String] -> ([String], [String])
getGhcArgs args
= let (ghcArgs, otherArgs) = case break pastArgs args of
(xs, "--":ys) -> (xs, ys)
(xs, ys) -> (xs, ys)
in (map unescape ghcArgs, otherArgs)
where unescape ('-':'-':'g':'h':'c':'-':'a':'r':'g':'=':arg) = arg
unescape arg = arg
pastArgs :: String -> Bool
-- You can use -- to mark the end of the flags, in case you need to use
-- a file called -foo.hs for some reason. You almost certainly shouldn't,
-- though.
pastArgs "--" = True
pastArgs ('-':_) = False
pastArgs _ = True
dieProg :: String -> IO a
dieProg msg = do
p <- getProgName
hPutStrLn stderr (p ++ ": " ++ msg)
exitWith (ExitFailure 1)
-- usage :: String
-- usage = "syntax: runghc [-f GHC-PATH | --] [GHC-ARGS] [--] FILE ARG..."
getExecPath :: IO (Maybe String)
#if defined(mingw32_HOST_OS)
getExecPath = try_size 2048 -- plenty, PATH_MAX is 512 under Win32.
where
try_size size = allocaArray (fromIntegral size) $ \buf -> do
ret <- c_GetModuleFileName nullPtr buf size
case ret of
0 -> return Nothing
_ | ret < size -> fmap Just $ peekCWString buf
| otherwise -> try_size (size * 2)
foreign import WINDOWS_CCONV unsafe "windows.h GetModuleFileNameW"
c_GetModuleFileName :: Ptr () -> CWString -> Word32 -> IO Word32
#else
getExecPath = return Nothing
#endif
| nomeata/ghc | utils/runghc/runghc.hs | bsd-3-clause | 5,945 | 9 | 25 | 1,819 | 1,451 | 764 | 687 | 100 | 6 |
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE OverloadedStrings #-}
module Views.UserSplices where
import Control.Arrow (second)
import Control.Monad.Trans
import Data.Maybe (fromMaybe)
import qualified Data.Text as T
import Data.Time
import Heist
import qualified Heist.Interpreted as I
import Snap.Snaplet.Auth
import Application
import Models.Exception
import Models.User
import Models.Utils
import Views.Types
import Views.Utils
------------------------------------------------------------------------------
instance SpliceRenderable User where
toSplice = renderUser
------------------------------------------------------------------------------
-- | Splices used at user Detail page.
-- Display either a user or error msg.
--
userDetailSplices :: Either UserException User -> Splices (I.Splice AppHandler)
userDetailSplices = eitherToSplices
------------------------------------------------------------------------------
-- | Single user to Splice.
--
renderUser :: User -> I.Splice AppHandler
renderUser user = I.runChildrenWith $ foldSplices $
[ ("userEditable", hasEditPermissionSplice user)
--, ("userLastLoginAt", userLastLoginAtSplice $ _authUser user)
, ("isAuthUser", isAuthUserRetrieved $ _authUser user)
]
++
map (second I.textSplice)
[ ("userLogin", maybe "" userLogin $ _authUser user)
, ("userEmail", _userEmail user)
, ("userDisplayName", _userDisplayName user)
, ("userSite", fromMaybe "" $ _userSite user)
, ("userId", maybe "error-no-user-id-found" sToText $ getUserId' user)
]
formatUTCTimeMaybe :: Maybe UTCTime -> T.Text
formatUTCTimeMaybe Nothing = ""
formatUTCTimeMaybe (Just x) = formatUTCTime x
isAuthUserRetrieved :: Maybe AuthUser
-> I.Splice AppHandler
isAuthUserRetrieved Nothing = return []
isAuthUserRetrieved (Just authusr) =
I.runChildrenWithText $ do
"lastLoginTime" ## formatUTCTimeMaybe $ userLastLoginAt authusr
"createdAt" ## formatUTCTimeMaybe $ userCreatedAt authusr
----------------------------------------------------------------------------
-- | Has Edit premission when either current user is Admin or Author.
--
hasEditPermissionSplice :: User -- ^ Author of some.
-> I.Splice AppHandler
hasEditPermissionSplice author = do
has <- lift $ hasUpdatePermission author
if has then I.runChildren else return []
----------------------------------------------------------------------------
| HaskellCNOrg/snap-web | src/Views/UserSplices.hs | bsd-3-clause | 2,837 | 0 | 10 | 750 | 482 | 263 | 219 | 47 | 2 |
import Prelude
{-import Data.AltComposition-}
(%*.**) :: (c -> d -> e) -> (a -> b -> c) -> a -> b -> d -> e
(f %*.** g) x y z = f (g x y) z
infixr 9 %*.**
(%*-**) :: (c -> d -> e) -> (a -> b -> c) -> (a,b,d) -> e
(f %*-** g) (x,y,z) = f (g x y) z
infixr 9 %*-**
add :: Int -> Int -> Int
add x y = x + y
mult :: Int -> Int -> Int
mult x y = x * y
(§) :: (a -> b) -> a -> b
f § x = f x
infixr 8 §
(*%.***) :: (d -> e -> f) -> (a -> b -> c -> e) -> a -> b -> c -> d -> f
(f *%.*** g) x y w z = f z (g x y w)
infixr 9 *%.***
{-(*§) :: ((a,b,c) -> d) -> a -> b -> c -> d-}
{-f *§ x y z = curry3 f-}
{-infixr 0 *§-}
{-xpto = uncurry3 *%.** ((,,))-}
{-xpto = uncurry *%.** (,)-}
(*§) :: (a -> b -> c -> d) -> a -> b -> c -> d
f *§ x y z = f x y z
{-uncurry3 *%.*** (,,)-}
infixl 5 *§
(§§) = curry . uncurry
infixr 8 §§
{-# INLINE curry3 #-}
curry3 :: ((a, b, c) -> d) -> a -> b -> c -> d
curry3 f a b c = f (a,b,c)
{-# INLINE uncurry3 #-}
uncurry3 :: (a -> b -> c -> d) -> ((a, b, c) -> d)
uncurry3 f ~(a,b,c) = f a b c
(§§§) = curry3 . uncurry3
infixr 9 §§§
main :: IO ()
main = do
let a = 5 :: Int
b = 7 :: Int
fr = add %*.** mult
r = fr a b 3 -- 38
-- r' = fr $ a b 3 -- invalid
r' = add %*.** mult *§ a b 3
print r
| jcristovao/altcomposition | test/test0.hs | bsd-3-clause | 1,291 | 0 | 14 | 406 | 701 | 380 | 321 | -1 | -1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies, QuasiQuotes, MultiParamTypeClasses,TemplateHaskell, OverloadedStrings #-}
module Handler.Caption (postCaptionR) where
import Foundation
import Forms.Caption
import Helpers.Document
import qualified Data.Text as T
postCaptionR :: ImagesId ->Handler RepHtml
postCaptionR id = do
((result, widget), enctype) <- runFormPost captionForm
case result of
FormSuccess cap -> do
runDB (update id [ImagesCaption =. T.unpack cap])
redirect RedirectTemporary (ImageR id)
_ -> redirect RedirectTemporary $ ImageR id
| pankajmore/Imghost | Handler/Caption.hs | bsd-3-clause | 632 | 0 | 18 | 146 | 148 | 77 | 71 | 15 | 2 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.PGI.VertexHints
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.PGI.VertexHints (
-- * Extension Support
glGetPGIVertexHints,
gl_PGI_vertex_hints,
-- * Enums
pattern GL_COLOR3_BIT_PGI,
pattern GL_COLOR4_BIT_PGI,
pattern GL_EDGEFLAG_BIT_PGI,
pattern GL_INDEX_BIT_PGI,
pattern GL_MATERIAL_SIDE_HINT_PGI,
pattern GL_MAT_AMBIENT_AND_DIFFUSE_BIT_PGI,
pattern GL_MAT_AMBIENT_BIT_PGI,
pattern GL_MAT_COLOR_INDEXES_BIT_PGI,
pattern GL_MAT_DIFFUSE_BIT_PGI,
pattern GL_MAT_EMISSION_BIT_PGI,
pattern GL_MAT_SHININESS_BIT_PGI,
pattern GL_MAT_SPECULAR_BIT_PGI,
pattern GL_MAX_VERTEX_HINT_PGI,
pattern GL_NORMAL_BIT_PGI,
pattern GL_TEXCOORD1_BIT_PGI,
pattern GL_TEXCOORD2_BIT_PGI,
pattern GL_TEXCOORD3_BIT_PGI,
pattern GL_TEXCOORD4_BIT_PGI,
pattern GL_VERTEX23_BIT_PGI,
pattern GL_VERTEX4_BIT_PGI,
pattern GL_VERTEX_CONSISTENT_HINT_PGI,
pattern GL_VERTEX_DATA_HINT_PGI
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
| haskell-opengl/OpenGLRaw | src/Graphics/GL/PGI/VertexHints.hs | bsd-3-clause | 1,353 | 0 | 5 | 175 | 152 | 99 | 53 | 28 | 0 |
Subsets and Splits