code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# OPTIONS_GHC -Wno-missing-export-lists #-}
-- TODO: try to make it more type-safe somehow?
-- | Javascript methods used for the site.
--
-- Some Javascript can also be found in .widget files. Hopefully, in the
-- future this whole module would be removed.
module Guide.JS where
-- Shared imports
import Imports
import NeatInterpolation
import Guide.Uid
import qualified Data.Text as T
import qualified Data.Text.Lazy.Builder as B
-- | Javascript code.
newtype JS = JS {fromJS :: Text}
deriving (Show, Buildable, Semigroup, Monoid)
-- | A concatenation of all Javascript functions defined in this module.
allJSFunctions :: JS
allJSFunctions = JS . T.unlines . map fromJS $ [
-- Utilities
replaceWithData, prependData, appendData,
moveNodeUp, moveNodeDown,
switchSection, switchSectionsEverywhere,
fadeIn, fadeOutAndRemove,
focusOn,
-- Misc
createAjaxIndicator,
autosizeTextarea,
expandHash,
expandItemNotes,
showDiffPopup,
-- Creating parts of interface
makeItemNotesEditor,
-- Add methods
addCategoryAndRedirect, addItem,
addPro, addCon,
-- Set methods
submitCategoryInfo, submitCategoryNotes,
submitItemDescription,
submitItemNotes, submitItemEcosystem,
-- Other things
deleteCategoryAndRedirect,
-- Admin things
acceptEdit, undoEdit,
acceptBlock, undoBlock,
createCheckpoint,
saveToArchiveOrg]
-- | A class for things that can be converted to Javascript syntax.
class ToJS a where toJS :: a -> JS
instance ToJS Bool where
toJS True = JS "true"
toJS False = JS "false"
instance ToJS JS where
toJS = id
instance ToJS Text where
toJS = JS . escapeJSString
instance ToJS Integer where
toJS = JS . toText . show
instance ToJS Int where
toJS = JS . toText . show
instance ToJS (Uid a) where
toJS = toJS . uidToText
-- | A helper class for calling Javascript functions.
class JSParams a where
jsParams :: a -> [JS]
instance JSParams () where
jsParams () = []
instance ToJS a => JSParams [a] where
jsParams = map toJS
instance (ToJS a,ToJS b) => JSParams (a,b) where
jsParams (a,b) = [toJS a, toJS b]
instance (ToJS a,ToJS b,ToJS c) => JSParams (a,b,c) where
jsParams (a,b,c) = [toJS a, toJS b, toJS c]
instance (ToJS a,ToJS b,ToJS c,ToJS d) => JSParams (a,b,c,d) where
jsParams (a,b,c,d) = [toJS a, toJS b, toJS c, toJS d]
instance (ToJS a,ToJS b,ToJS c,ToJS d,ToJS e) => JSParams (a,b,c,d,e) where
jsParams (a,b,c,d,e) = [toJS a, toJS b, toJS c, toJS d, toJS e]
instance (ToJS a,ToJS b,ToJS c,ToJS d,ToJS e,ToJS f) => JSParams (a,b,c,d,e,f) where
jsParams (a,b,c,d,e,f) = [toJS a, toJS b, toJS c, toJS d, toJS e, toJS f]
-- | This hacky class lets you construct and use Javascript functions; you
-- give 'makeJSFunction' function name, function parameters, and function
-- body, and you get a polymorphic value of type @JSFunction a => a@, which
-- you can use either as a complete function definition (if you set @a@ to
-- be @JS@), or as a function that you can give some parameters and it would
-- return a Javascript call:
--
-- > plus = makeJSFunction "plus" ["a", "b"] "return a+b;"
--
-- >>> plus :: JS
-- JS "function plus(a,b) {\nreturn a+b;}\n"
-- >>> plus (3, 5) :: JS
-- JS "plus(3,5);"
class JSFunction a where
makeJSFunction
:: Text -- ^ Name
-> [Text] -- ^ Parameter names
-> Text -- ^ Definition
-> a
-- | Generates a function definition
instance JSFunction JS where
makeJSFunction fName fParams fDef =
let paramList = T.intercalate "," fParams
in JS $ format "function "+|fName|+"("+|paramList|+") {\n"
+|indentF 2 (build fDef)|+
"}\n"
-- | Generates a function that takes arguments and produces a Javascript
-- function call
instance JSParams a => JSFunction (a -> JS) where
makeJSFunction fName _fParams _fDef = \args ->
let paramList = T.intercalate "," (map fromJS (jsParams args))
in JS $ format "{}({});" fName paramList
-- | Also produces a Javascript function call, but prefixes the function
-- with @this.@; this is needed for event handlers in Vue for some reason
newtype WithThis a = WithThis { withThis :: a }
instance JSFunction a => JSFunction (WithThis a) where
makeJSFunction fName fParams fDef = WithThis $
makeJSFunction ("this." <> fName) fParams fDef
-- | NB: this isn't a standalone function and so it doesn't have to be
-- listed in 'allJSFunctions'.
assign :: ToJS x => JS -> x -> JS
assign v x = JS $ format "{} = {};" v (toJS x)
-- TODO: all links here shouldn't be absolute [absolute-links]
replaceWithData :: JSFunction a => a
replaceWithData =
makeJSFunction "replaceWithData" ["node"]
[text|
return function(data) {$(node).replaceWith(data);};
|]
prependData :: JSFunction a => a
prependData =
makeJSFunction "prependData" ["node"]
[text|
return function(data) {$(node).prepend(data);};
|]
appendData :: JSFunction a => a
appendData =
makeJSFunction "appendData" ["node"]
[text|
return function(data) {$(node).append(data);};
|]
-- | Move node up (in a list of sibling nodes), ignoring anchor elements
-- inserted by 'thisNode'.
moveNodeUp :: JSFunction a => a
moveNodeUp =
makeJSFunction "moveNodeUp" ["node"]
[text|
var el = $(node);
while (el.prev().is(".dummy"))
el.prev().before(el);
if (el.not(':first-child'))
el.prev().before(el);
|]
-- | Move node down (in a list of sibling nodes), ignoring anchor elements
-- inserted by 'thisNode'.
moveNodeDown :: JSFunction a => a
moveNodeDown =
makeJSFunction "moveNodeDown" ["node"]
[text|
var el = $(node);
while (el.next().is(".dummy"))
el.next().after(el);
if (el.not(':last-child'))
el.next().after(el);
|]
-- | Given something that contains section divs (or spans), show one and
-- hide the rest. The div/span with the given @class@ will be chosen.
--
-- See Note [show-hide]
switchSection :: JSFunction a => a
switchSection =
makeJSFunction "switchSection" ["node", "section"]
[text|
$(node).children(".section").removeClass("shown");
$(node).children(".section."+section).addClass("shown");
// See Note [autosize]
autosize($('textarea'));
autosize.update($('textarea'));
|]
-- | Switch sections /everywhere/ inside the container.
--
-- See Note [show-hide]
switchSectionsEverywhere :: JSFunction a => a
switchSectionsEverywhere =
makeJSFunction "switchSectionsEverywhere" ["node", "section"]
[text|
$(node).find(".section").removeClass("shown");
$(node).find(".section."+section).addClass("shown");
// See Note [autosize]
autosize($('textarea'));
autosize.update($('textarea'));
|]
-- | This function makes the node half-transparent and then animates it to
-- full opaqueness. It's useful when e.g. something has been moved and you
-- want to “flash” the item to draw user's attention to it.
fadeIn :: JSFunction a => a
fadeIn =
makeJSFunction "fadeIn" ["node"]
[text|
$(node).fadeTo(0,0.2).fadeTo(600,1);
|]
-- | This function animates the node to half-transparency and then removes it
-- completely. It's useful when you're removing something and you want to
-- draw user's attention to the fact that it's being removed.
--
-- The reason there isn't a simple @fadeOut@ utility function here is that
-- removal has to be done by passing a callback to @fadeTo@. In jQuery you
-- can't simply wait until the animation has stopped.
fadeOutAndRemove :: JSFunction a => a
fadeOutAndRemove =
makeJSFunction "fadeOutAndRemove" ["node"]
[text|
$(node).fadeTo(400,0.2,function(){$(node).remove()});
|]
focusOn :: JSFunction a => a
focusOn =
makeJSFunction "focusOn" ["node"]
[text|
$(node).focus();
|]
createAjaxIndicator :: JSFunction a => a
createAjaxIndicator =
makeJSFunction "createAjaxIndicator" []
[text|
$("body").prepend('<div id="ajax-indicator"></div>');
$(document).ajaxStart(function() {
$("#ajax-indicator").show();
});
$(document).ajaxStop(function() {
$("#ajax-indicator").hide();
});
$("#ajax-indicator").hide();
|]
autosizeTextarea :: JSFunction a => a
autosizeTextarea =
makeJSFunction "autosizeTextarea" ["textareaNode"]
[text|
autosize(textareaNode);
autosize.update(textareaNode);
|]
-- | Read the anchor from the address bar (i.e. the thing after #) and use it
-- to expand something (e.g. notes). It's needed to implement linking
-- properly – e.g. notes are usually unexpanded, but when you're giving
-- someone a direct link to notes, it makes sense to expand them. If you call
-- 'expandHash' after the page has loaded, it will do just that.
expandHash :: JSFunction a => a
expandHash =
makeJSFunction "expandHash" []
[text|
hash = $(location).attr('hash');
if (hash.slice(0,12) == "#item-notes-") {
if (hash.indexOf('-', 12) != (-1))
// For links to sections of items' notes (from the TOC)
itemId = hash.slice(12, hash.indexOf('-', 12))
else
// For links to items' notes
itemId = hash.slice(12);
expandItemNotes(itemId);
} else
if (hash.slice(0,6) == "#item-") {
itemId = hash.slice(6);
expandItemNotes(itemId);
}
|]
expandItemNotes :: JSFunction a => a
expandItemNotes =
makeJSFunction "expandItemNotes" ["itemId"]
[text|
switchSection("#item-notes-"+itemId, "expanded");
|]
showDiffPopup :: JSFunction a => a
showDiffPopup =
makeJSFunction "showDiffPopup" ["ours", "modified", "merged", "send"]
[text|
dialog = $("<div>", {
"class" : "diff-popup"
})[0];
choices = $("<div>", {
"class" : "diff-choices"
})[0];
// our version
choiceOurs = $("<div>", {
"class" : "var-a" })[0];
textOurs = $("<div>", {
"class" : "text",
"text" : ours })[0];
headerOurs = $("<strong>", {
"text" : "Your version" })[0];
buttonOurs = $("<button>", {
"text" : "Submit this version, disregard changes on the server" })[0];
$(buttonOurs).click(function() {
send(ours); });
$(choiceOurs).append(headerOurs, textOurs, buttonOurs);
// modified version
choiceMod = $("<div>", {
"class" : "var-b" })[0];
textMod = $("<div>", {
"class" : "text",
"text" : modified })[0];
headerMod = $("<strong>", {
"text" : "Version on the server" })[0];
buttonMod = $("<button>", {
"text" : "Accept this version, disregard my changes" })[0];
$(buttonMod).click(function() {
send(modified); });
$(choiceMod).append(headerMod, textMod, buttonMod);
// building merged
choiceMerged = $("<div>", {
"class" : "var-merged" })[0];
areaMerged = $("<textarea>", {
"autocomplete" : "off",
"text" : merged })[0];
headerMerged = $("<strong>", {
"text" : "Merged version (edit if needed)" })[0];
buttonMerged = $("<button>", {
"text" : "Submit the merged version" })[0];
$(buttonMerged).click(function () {
send(areaMerged.value); });
$(choiceMerged).append(headerMerged, areaMerged, buttonMerged);
$(choices).append(choiceOurs, choiceMod, choiceMerged);
$(dialog).append(choices);
$.magnificPopup.open({
modal: true,
items: {
src: dialog,
type: 'inline' }
});
autosizeTextarea(areaMerged);
|]
{- Note [blurb diffing]
~~~~~~~~~~~~~~~~~~~~~~~
A note on why we need the 'wasEmpty' parameter in 'makeItemNotesEditor'.
Assume that the notes are empty. The text in the area, therefore, will be
some default blurb (“# Links, #Imports, #Usage”, etc). Suppose the user edits
it. What will be sent to the server?
* original: blurb
* our version: modified blurb
What will happen next? The server will compare it to the value currently at
the server (i.e. an empty string), and think that the blurb *was* on the
server but got deleted while the client was doing editing. This is wrong, and
will result in a diff popup comparing an edited blurb to an empty string. To
prevent this, we pass 'wasEmpty' to 'makeItemNotesEditor' – if we're using a
blurb, we'll pass an empty string as the original.
-}
-- | Dynamically creates a 'View.markdownEditor' (but specifically for item
-- notes). See Note [dynamic interface].
makeItemNotesEditor :: JSFunction a => a
makeItemNotesEditor =
-- See Note [blurb diffing]
makeJSFunction "makeItemNotesEditor"
["notesNode", "sectionNode", "textareaUid",
"wasEmpty", "content", "itemId"]
[text|
$(sectionNode).html("");
area = $("<textarea>", {
"autocomplete" : "off",
"rows" : "10",
"id" : textareaUid,
"class" : "big fullwidth",
"text" : content })[0];
saveBtn = $("<input>", {
"value" : "Save",
"type" : "button" })[0];
save = function () {
submitItemNotes(notesNode,
itemId,
wasEmpty ? "" : content,
area.value); };
saveBtn.onclick = save;
cancelBtn = $("<input>", {
"value" : "Cancel",
"type" : "button" })[0];
cancel = function () {
$(sectionNode).html("");
switchSection(notesNode, "expanded"); };
cancelBtn.onclick = cancel;
area.onkeydown = function (event) {
if ((event.keyCode == 13 || event.keyCode == 10) &&
(event.metaKey || event.ctrlKey)) {
save();
return false; }
if (event.keyCode == 27) {
cancel();
return false; }
};
// Can't use $()-generation here because then the <span> would have
// to be cloned (since we're inserting it multiple times) and I don't
// know how to do that.
space = "<span style='margin-left:6px'></span>";
enter = $("<span>", {
"class": "edit-field-instruction",
"text" : "or press Ctrl+Enter to save" })[0];
markdownSupported = $("<img>", {
"src": "/markdown.svg",
"class": " markdown-supported "
})[0];
markdown = $("<a>", {
"href" : "/markdown",
"target" : "_blank"})[0];
$(sectionNode).append(
area, saveBtn, $(space), cancelBtn, $(space), enter, $(markdown).append(markdownSupported));
|]
-- | Create a new category and redirect to it (or redirect to an old category
-- if it exists already).
addCategoryAndRedirect :: JSFunction a => a
addCategoryAndRedirect =
makeJSFunction "addCategoryAndRedirect" ["s"]
[text|
$.post("/haskell/add/category", {content: s})
.done(function (url) {
window.location.href = url;
});
|]
-- | Add a new item to some category.
addItem :: JSFunction a => a
addItem =
makeJSFunction "addItem" ["node", "catId", "s"]
[text|
$.post("/haskell/add/category/"+catId+"/item", {name: s})
.done(appendData(node));
|]
submitCategoryInfo :: JSFunction a => a
submitCategoryInfo =
makeJSFunction "submitCategoryInfo" ["infoNode", "catId", "form"]
[text|
$.post("/haskell/set/category/"+catId+"/info", $(form).serialize())
.done(function (data) {
$(infoNode).replaceWith(data);
// If pros-cons-enabled and other *enabled properties were changed, we
// have to show/hide relevant sections in all items of the category.
// See Note [enabled sections] for details.
if ($(form)[0]["pros-cons-enabled"].checked)
$(".pros-cons-wrapper").show();
else $(".pros-cons-wrapper").hide();
if ($(form)[0]["ecosystem-enabled"].checked)
$(".ecosystem-wrapper").show();
else $(".ecosystem-wrapper").hide();
if ($(form)[0]["notes-enabled"].checked)
$(".notes-wrapper").show();
else $(".notes-wrapper").hide();
});
|]
submitCategoryNotes :: JSFunction a => a
submitCategoryNotes =
makeJSFunction "submitCategoryNotes"
["node", "catId", "original, ours"]
[text|
$.post({
url: "/haskell/set/category/"+catId+"/notes",
data: {
original: original,
content: ours },
success: function (data) {
$.magnificPopup.close();
$(node).replaceWith(data); },
statusCode: {
409: function (xhr, st, err) {
modified = xhr.responseJSON["modified"];
merged = xhr.responseJSON["merged"];
showDiffPopup(ours, modified, merged, function (x) {
submitCategoryNotes(node, catId, modified, x) }); } }
});
|]
submitItemDescription :: JSFunction a => a
submitItemDescription =
makeJSFunction "submitItemDescription"
["node", "itemId", "original", "ours"]
[text|
$.post({
url: "/haskell/set/item/"+itemId+"/description",
data: {
original: original,
content: ours },
success: function (data) {
$.magnificPopup.close();
$(node).replaceWith(data); },
statusCode: {
409: function (xhr, st, err) {
modified = xhr.responseJSON["modified"];
merged = xhr.responseJSON["merged"];
showDiffPopup(ours, modified, merged, function (x) {
submitItemDescription(node, itemId, modified, x) }); } }
});
|]
submitItemEcosystem :: JSFunction a => a
submitItemEcosystem =
makeJSFunction "submitItemEcosystem"
["node", "itemId", "original", "ours"]
[text|
$.post({
url: "/haskell/set/item/"+itemId+"/ecosystem",
data: {
original: original,
content: ours },
success: function (data) {
$.magnificPopup.close();
$(node).replaceWith(data); },
statusCode: {
409: function (xhr, st, err) {
modified = xhr.responseJSON["modified"];
merged = xhr.responseJSON["merged"];
showDiffPopup(ours, modified, merged, function (x) {
submitItemEcosystem(node, itemId, modified, x) }); } }
});
|]
submitItemNotes :: JSFunction a => a
submitItemNotes =
makeJSFunction "submitItemNotes"
["node", "itemId", "original", "ours"]
[text|
$.post({
url: "/haskell/set/item/"+itemId+"/notes",
data: {
original: original,
content: ours },
success: function (data) {
$.magnificPopup.close();
$(node).replaceWith(data);
// Switching has to be done here and not in 'Main.renderItemNotes'
// because $.post is asynchronous and will be done *after*
// switchSection has worked.
switchSection(node, "expanded"); },
statusCode: {
409: function (xhr, st, err) {
modified = xhr.responseJSON["modified"];
merged = xhr.responseJSON["merged"];
showDiffPopup(ours, modified, merged, function (x) {
submitItemNotes(node, itemId, modified, x) }); } }
});
|]
-- | Add a pro to some item.
addPro :: JSFunction a => a
addPro =
makeJSFunction "addPro" ["node", "itemId", "s"]
[text|
$.post("/haskell/add/item/"+itemId+"/pro", {content: s})
.done(function (data) {
var jData = $(data);
jData.appendTo(node);
switchSection(jData, "editable");
});
|]
-- | Add a con to some item.
addCon :: JSFunction a => a
addCon =
makeJSFunction "addCon" ["node", "itemId", "s"]
[text|
$.post("/haskell/add/item/"+itemId+"/con", {content: s})
.done(function (data) {
var jData = $(data);
jData.appendTo(node);
switchSection(jData, "editable");
});
|]
deleteCategoryAndRedirect :: JSFunction a => a
deleteCategoryAndRedirect =
makeJSFunction "deleteCategoryAndRedirect" ["catId"]
[text|
if (confirm("Confirm deletion?")) {
$.post("/haskell/delete/category/"+catId)
.done(function () {
window.location.href = "/haskell";
});
}
|]
acceptEdit :: JSFunction a => a
acceptEdit =
makeJSFunction "acceptEdit" ["editId", "editNode"]
[text|
$.post("/admin/edit/"+editId+"/accept")
.done(function () {
fadeOutAndRemove(editNode);
});
|]
undoEdit :: JSFunction a => a
undoEdit =
makeJSFunction "undoEdit" ["editId", "editNode"]
[text|
$.post("/admin/edit/"+editId+"/undo")
.done(function (data) {
if (data == "")
fadeOutAndRemove(editNode);
else
alert("couldn't undo edit: " + data);
});
|]
acceptBlock :: JSFunction a => a
acceptBlock =
makeJSFunction "acceptBlock" ["editLatest", "editEarliest", "blockNode"]
[text|
$.post("/admin/edits/"+editLatest+"/"+editEarliest+"/accept")
.done(function () {
fadeOutAndRemove(blockNode);
});
|]
undoBlock :: JSFunction a => a
undoBlock =
makeJSFunction "undoBlock" ["editLatest", "editEarliest", "blockNode"]
[text|
$.post("/admin/edits/"+editLatest+"/"+editEarliest+"/undo")
.done(function (data) {
if (data == "")
fadeOutAndRemove(blockNode);
else
$(blockNode).replaceWith(data);
});
|]
createCheckpoint :: JSFunction a => a
createCheckpoint =
makeJSFunction "createCheckpoint" ["buttonNode"]
[text|
$.post("/admin/create-checkpoint")
.done(function () {
fadeIn(buttonNode);
});
|]
saveToArchiveOrg :: JSFunction a => a
saveToArchiveOrg =
makeJSFunction "saveToArchiveOrg" ["link"]
[text|
$.post('http://web.archive.org/save/' + link)
.done(function () {
console.log(link + " saved to archive.org")
});
|]
-- When adding a function, don't forget to add it to 'allJSFunctions'!
escapeJSString :: Text -> Text
escapeJSString s =
toText $
B.singleton '"' <> quote s <> B.singleton '"'
where
quote q = case T.uncons t of
Nothing -> toTextBuilder h
Just (!c, t') -> toTextBuilder h <> escape c <> quote t'
where
(h, t) = T.break isEscape q
-- 'isEscape' doesn't mention \n, \r and \t because they are handled by
-- the “< '\x20'” case; yes, later 'escape' escapes them differently,
-- but it's irrelevant
isEscape c = c == '\"' || c == '\\' ||
c == '\x2028' || c == '\x2029' ||
c < '\x20'
escape '\"' = "\\\""
escape '\\' = "\\\\"
escape '\n' = "\\n"
escape '\r' = "\\r"
escape '\t' = "\\t"
escape c
| c < '\x20' || c == '\x2028' || c == '\x2029' =
"\\u" <> padLeftF 4 '0' (hexF (fromEnum c))
| otherwise =
B.singleton c
newtype JQuerySelector = JQuerySelector Text
deriving (ToJS, Buildable)
selectId :: Text -> JQuerySelector
selectId x = JQuerySelector $ format "#{}" x
selectUid :: Uid a -> JQuerySelector
selectUid x = JQuerySelector $ format "#{}" x
selectClass :: Text -> JQuerySelector
selectClass x = JQuerySelector $ format ".{}" x
selectParent :: JQuerySelector -> JQuerySelector
selectParent x = JQuerySelector $ format ":has(> {})" x
selectChildren :: JQuerySelector -> JQuerySelector -> JQuerySelector
selectChildren a b = JQuerySelector $ format "{} > {}" a b
selectSection :: JQuerySelector -> Text -> JQuerySelector
selectSection a b = JQuerySelector $ format "{} > .section.{}" a b
|
aelve/guide
|
back/src/Guide/JS.hs
|
bsd-3-clause
| 23,170 | 0 | 15 | 5,362 | 2,812 | 1,591 | 1,221 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE Strict #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
module Visor where
import Types
import Vector
import Network
import Network.Label
import qualified Network.Runners as R
import Util
import Static as S
import qualified Static.Image as I
import Data.Proxy
import Data.Singletons.Prelude
import Data.Singletons.TypeLits
import Data.Singletons.Prelude.List
import Data.Array.Repa hiding (extract, (++))
feedImage :: (WVector (Widgets a), Monad m) => Visor a -> Screenshot a -> m (LabelVec a)
feedImage (Visor visor) img = do xs <- extract img
LabelVec <$> forward visor xs
trainImage :: (WVector (Widgets a), Monad m)
=> Visor a
-> Screenshot a
-> LabelVec a
-> m (Visor a, Loss)
trainImage (Visor v) shot (LabelVec ys) =
do xs <- extract shot
(v', l) <- trainOnce v xs ys
return (Visor v', l)
class WVector ws where
extract :: Monad m => Screenshot a -> m (Vec WInput ws)
forward :: Monad m => Vec WNetwork ws -> Vec WInput ws -> m (Vec WLabel ws)
dumpCrops :: Int -> FilePath -> Vec WInput ws -> IO ()
trainOnce :: Monad m
=> Vec WNetwork ws
-> Vec WInput ws
-> Vec WLabel ws
-> m (Vec WNetwork ws, Loss)
instance WVector '[]
where extract _ = return $! Nil
forward _ _ = return $! Nil
dumpCrops _ _ _ = return ()
trainOnce _ _ _ = return $! (Nil, ((0,0),0))
instance (Widget a, WVector ts) => WVector (a ': ts) where
extract shot =
do crop <- extractWidget shot
crops <- extract shot
return$ crop :- crops
forward (WNetwork n :- ns) (WInput x :- xs) =
do y <- R.forward n x
ls <- forward ns xs
let l = WLabel $ fromArray y
return$ l:-ls
trainOnce (WNetwork n :- ns) (WInput x :- xs) (WLabel l :- ls) =
do let Params lparams = params :: Params a
(n', ((c,t), l')) <- R.trainOnce n lparams x (toArray l)
(ns', ((cs, ts), ls')) <- trainOnce ns xs ls
return$! (WNetwork n' :- ns', ((c+cs, t+ts), l' + ls'))
dumpCrops i p (WInput x :- xs) = do x' <- I.sHcat x
I.saveImg (p ++ "/" ++ show i) x'
dumpCrops (i + 1) p xs
extractWidget :: forall w s m.
( Widget w, Monad m
) => Screenshot s -> m (WInput w)
extractWidget (Screenshot img) = WInput <$> sComputeP (sFromFunction fn)
where
ww = fromInteger$ natVal (Proxy :: Proxy (Width w))
wh = fromInteger$ natVal (Proxy :: Proxy (Height w))
iw = fromInteger$ natVal (Proxy :: Proxy (ScreenWidth (Parent w)))
ih = fromInteger$ natVal (Proxy :: Proxy (ScreenHeight (Parent w)))
wps = fromSing (sing :: Sing (Positions w))
regions = (\ (x,y) -> Rect (fromInteger x/iw) (fromInteger y/ih) (ww/iw) (wh/ih)) <$> wps
delayedCrops = (\ !r -> (I.extract img r :: SArray D (ZZ ::. 3 ::. SampleHeight w ::. SampleWidth w))) <$> regions
fn (Z :. n :. d :. y :. x) = let SArray crop = delayedCrops !! n in crop ! (Z :. d :. y :. x)
class WVector ws => Stack n ws where
stack :: [(Vec WInput ws, Vec WLabel ws)] -> Maybe (Vec (WBatch n) ws)
trainBatch :: Monad m
=> Vec WNetwork ws
-> Vec (WBatch n) ws
-> m (Vec WNetwork ws, Loss)
instance Stack n '[] where
stack _ = Just Nil
trainBatch _ _ = return $! (Nil, ((0,0),0))
instance ( Stack n ws
, Widget a
, KnownNat n
, KnownNat (n :* Length (Positions a))
, NOutput (Network (BatchInputShape a n) (NetConfig a)) ~ (BatchOutputShape a n)
, Cast (Network (InputShape a) (NetConfig a)) (Network (BatchInputShape a n) (NetConfig a))
, Cast (Network (BatchInputShape a n) (NetConfig a)) (Network (InputShape a) (NetConfig a))
) => Stack n (a ': ws) where
stack ps
| length ps /= n = Nothing
| otherwise = (WBatch (S.sConcat xs, S.sConcat ys) :-) <$> stack ts
where
unvec :: (Vec WInput (a ': ws), Vec WLabel (a ': ws))
-> ( SArray U (InputShape a)
, SArray U (BatchOutputShape a 1)
, (Vec WInput ws, Vec WLabel ws)
)
unvec (WInput sarr :- xs, WLabel l :- ls) = (sarr, toArray l, (xs, ls))
(xs, ys, ts) = unzip3 . fmap unvec $ ps
n = fromInteger$ natVal (Proxy :: Proxy n)
trainBatch (WNetwork n :- ns) (WBatch (x, y) :- ts) =
do let Params lparams = params :: Params a
cn = cast n :: Network (BatchInputShape a n) (NetConfig a)
(n', ((c,t), l')) <- R.trainOnce cn lparams x y
(ns', ((cs, ts), ls')) <- trainBatch ns ts
return$! (WNetwork (cast n') :- ns', ((c+cs, t+ts), l' + ls'))
|
jonascarpay/visor
|
src/Visor.hs
|
bsd-3-clause
| 5,057 | 0 | 16 | 1,471 | 2,193 | 1,126 | 1,067 | 115 | 1 |
{-# LANGUAGE BangPatterns, CPP #-}
-- | A CSV parser. The parser defined here is RFC 4180 compliant, with
-- the following extensions:
--
-- * Empty lines are ignored.
--
-- * Non-escaped fields may contain any characters except
-- double-quotes, commas, carriage returns, and newlines.
--
-- * Escaped fields may contain any characters (but double-quotes
-- need to be escaped).
--
-- The functions in this module can be used to implement e.g. a
-- resumable parser that is fed input incrementally.
module Data.Csv.Parser
( DecodeOptions(..)
, defaultDecodeOptions
, csv
, csvWithHeader
, header
, record
, name
, field
) where
import Blaze.ByteString.Builder (fromByteString, toByteString)
import Blaze.ByteString.Builder.Char.Utf8 (fromChar)
import Control.Applicative
import Data.Attoparsec.Char8 hiding (Parser, Result, parse)
import qualified Data.Attoparsec as A
import qualified Data.Attoparsec.Lazy as AL
import Data.Attoparsec.Types (Parser)
import qualified Data.Attoparsec.Zepto as Z
import qualified Data.ByteString as S
import qualified Data.ByteString.Unsafe as S
import qualified Data.HashMap.Strict as HM
import Data.Monoid
import qualified Data.Vector as V
import Data.Word
import Data.Csv.Types
import Data.Csv.Util ((<$!>))
-- | Options that controls how data is decoded. These options can be
-- used to e.g. decode tab-separated data instead of comma-separated
-- data.
--
-- To avoid having your program stop compiling when new fields are
-- added to 'DecodeOptions', create option records by overriding
-- values in 'defaultDecodeOptions'. Example:
--
-- > myOptions = defaultDecodeOptions {
-- > decDelimiter = fromIntegral (ord '\t')
-- > }
data DecodeOptions = DecodeOptions
{ -- | Field delimiter.
decDelimiter :: {-# UNPACK #-} !Word8
} deriving (Eq, Show)
-- | Decoding options for parsing CSV files.
defaultDecodeOptions :: DecodeOptions
defaultDecodeOptions = DecodeOptions
{ decDelimiter = 44 -- comma
}
-- | Parse a CSV file that does not include a header.
csv :: DecodeOptions -> AL.Parser Csv
csv !opts = do
vals <- record (decDelimiter opts) `sepBy1'` endOfLine
_ <- optional endOfLine
endOfInput
let nonEmpty = removeBlankLines vals
return $! V.fromList nonEmpty
{-# INLINE csv #-}
-- | @sepBy1' p sep@ applies /one/ or more occurrences of @p@,
-- separated by @sep@. Returns a list of the values returned by @p@.
-- The value returned by @p@ is forced to WHNF.
--
-- > commaSep p = p `sepBy1'` (symbol ",")
sepBy1' :: (Alternative f, Monad f) => f a -> f s -> f [a]
sepBy1' p s = go
where
go = do
!a <- p
as <- (s *> go) <|> pure []
return (a : as)
#if __GLASGOW_HASKELL__ >= 700
{-# SPECIALIZE sepBy1' :: Parser S.ByteString a -> Parser S.ByteString s
-> Parser S.ByteString [a] #-}
#endif
-- | Parse a CSV file that includes a header.
csvWithHeader :: DecodeOptions -> AL.Parser (Header, V.Vector NamedRecord)
csvWithHeader !opts = do
!hdr <- header (decDelimiter opts)
vals <- map (toNamedRecord hdr) . removeBlankLines <$>
(record (decDelimiter opts)) `sepBy1'` endOfLine
_ <- optional endOfLine
endOfInput
let !v = V.fromList vals
return (hdr, v)
toNamedRecord :: Header -> Record -> NamedRecord
toNamedRecord hdr v = HM.fromList . V.toList $ V.zip hdr v
-- | Parse a header, including the terminating line separator.
header :: Word8 -- ^ Field delimiter
-> AL.Parser Header
header !delim = V.fromList <$!> name delim `sepBy1'` (A.word8 delim) <* endOfLine
-- | Parse a header name. Header names have the same format as regular
-- 'field's.
name :: Word8 -> AL.Parser Name
name !delim = field delim
removeBlankLines :: [Record] -> [Record]
removeBlankLines = filter (not . blankLine)
where blankLine v = V.length v == 1 && (S.null (V.head v))
-- | Parse a record, not including the terminating line separator. The
-- terminating line separate is not included as the last record in a
-- CSV file is allowed to not have a terminating line separator. You
-- most likely want to use the 'endOfLine' parser in combination with
-- this parser.
record :: Word8 -- ^ Field delimiter
-> AL.Parser Record
record !delim = do
fs <- field delim `sepBy1'` (A.word8 delim)
return $! V.fromList fs
{-# INLINE record #-}
-- | Parse a field. The field may be in either the escaped or
-- non-escaped format. The return value is unescaped.
field :: Word8 -> AL.Parser Field
field !delim = do
mb <- A.peekWord8
-- We purposely don't use <|> as we want to commit to the first
-- choice if we see a double quote.
case mb of
Just b | b == doubleQuote -> escapedField
_ -> unescapedField delim
{-# INLINE field #-}
escapedField :: AL.Parser S.ByteString
escapedField = do
_ <- dquote
-- The scan state is 'True' if the previous character was a double
-- quote. We need to drop a trailing double quote left by scan.
s <- S.init <$> (A.scan False $ \s c -> if c == doubleQuote
then Just (not s)
else if s then Nothing
else Just False)
if doubleQuote `S.elem` s
then case Z.parse unescape s of
Right r -> return r
Left err -> fail err
else return s
unescapedField :: Word8 -> AL.Parser S.ByteString
unescapedField !delim = A.takeWhile (\ c -> c /= doubleQuote &&
c /= newline &&
c /= delim &&
c /= cr)
dquote :: AL.Parser Char
dquote = char '"'
unescape :: Z.Parser S.ByteString
unescape = toByteString <$!> go mempty where
go acc = do
h <- Z.takeWhile (/= doubleQuote)
let rest = do
start <- Z.take 2
if (S.unsafeHead start == doubleQuote &&
S.unsafeIndex start 1 == doubleQuote)
then go (acc `mappend` fromByteString h `mappend` fromChar '"')
else fail "invalid CSV escape sequence"
done <- Z.atEnd
if done
then return (acc `mappend` fromByteString h)
else rest
doubleQuote, newline, cr :: Word8
doubleQuote = 34
newline = 10
cr = 13
|
solidsnack/cassava
|
Data/Csv/Parser.hs
|
bsd-3-clause
| 6,364 | 0 | 20 | 1,654 | 1,344 | 729 | 615 | 115 | 5 |
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Control.Variable where
import Control.Monad
import Control.Monad.ST
import Control.Monad.Trans.State
import Data.Tuple ( swap )
import Data.IORef
import Data.STRef
import qualified Data.StateVar as StateVar
import Data.StateVar hiding ( get )
import Control.Concurrent.MVar
import Control.Concurrent.STM
-- | The class @Variable m v@ defines operations for reading and writing to
-- a variable of type @v@ in the monad @m@.
class (Monad m) => Variable m v | v -> m where
-- | @writeVar v s@ stores the value @s@ in @v@.
writeVar :: v s -> s -> m ()
writeVar v s = modifyVar v $ const ((), s)
-- | @readVar v@ retrieves the value stored in @v@.
readVar :: v s -> m s
readVar v = modifyVar v $ \s -> (s, s)
-- | @modifyVar v f@ applies @f@ to the value stored in @v@, storing
-- the new value in @v@.
modifyVar :: v s -> (s -> (a, s)) -> m a
modifyVar v f = do
(a, s) <- f `liftM` readVar v
writeVar v s
return a
-- | A version of `modifyVar` that applies the function strictly.
modifyVar' :: v s -> (s -> (a, s)) -> m a
modifyVar' v f = modifyVar v $ \s0 ->
let (a, s) = f s0
in s `seq` (a, s)
{-# MINIMAL writeVar, readVar | modifyVar #-}
-- | The class @Locked m v@ is defined for instances of @Variable m v@ that
-- admit a proper definition of @atomicVar@. An instance of @Locked IO v@
-- is only correct if @v@ implements proper locking behavior, e.g. @MVar@.
--
-- This class is trivially satisfied by any instance @Variable STM v@ or
-- @Variable ST v@.
--
class (Variable m v) => Locked m v | v -> m where
atomicVar :: v s -> (s -> m (a, s)) -> m a
atomicVar v f = do
(a, s) <- f =<< readVar v
writeVar v s
return a
atomicVar' :: v s -> (s -> m (a, s)) -> m a
atomicVar' v f = atomicVar v $ \s0 -> do
(a, s) <- f s0
return $! seq s (a, s)
-- | A default definition of @modifyVar@ for instances of @Locked m v@.
modifyVarDefault :: (Locked m v) => v s -> (s -> (a, s)) -> m a
modifyVarDefault v f = atomicVar v (return . f)
-- | defined with atomicModifyIORef
instance Variable IO IORef where
modifyVar v f = atomicModifyIORef v $ swap . f
instance Variable IO StateVar where
readVar = StateVar.get
writeVar = (StateVar.$=)
instance Variable IO MVar where
modifyVar = modifyVarDefault
instance Locked IO MVar where
atomicVar v f = modifyMVar v $ liftM swap . f
instance Variable (ST s) (STRef s) where
readVar = readSTRef
writeVar = writeSTRef
-- | trivially atomic due to ST
instance Locked (ST s) (STRef s) where
instance Variable STM TVar where
readVar = readTVar
writeVar = writeTVar
-- | trivially atomic due to STM
instance Locked STM TVar where
instance Variable STM TMVar where
readVar = takeTMVar
writeVar = putTMVar
-- | trivially atomic due to STM
instance Locked STM TMVar where
-- | @varState v m@ runs @m@ over the value stored in @v@, storing the resulting
-- value in @v@.
varState :: (Variable m v) => v s -> State s a -> m a
varState v = modifyVar v . runState
-- | A strict version of `varState`.
varState' :: (Variable m v) => v s -> State s a -> m a
varState' v = modifyVar' v . runState
-- | @varStateT v m@ runs @m@ over the value stored in @v@, storing the
-- resulting value in @v@.
--
-- Note that @m@ may perform arbitrary computations in the base monad of
-- the variable. Therefore, the variable has the constraint @Locked m v@
-- which should only be defined for variables with the proper locking
-- semantics.
--
varStateT :: (Locked m v) => v s -> StateT s m a -> m a
varStateT v = atomicVar v . runStateT
-- | An infix version of `varState`.
(@:) :: (Variable m v) => v s -> State s a -> m a
(@:) = varState
infix 2 @:
-- | An infix version of `varState'`.
(@!) :: (Variable m v) => v s -> State s a -> m a
(@!) = varState'
infix 2 @!
-- | An infix version of `varStateT`.
(@@) :: (Locked m v) => v s -> StateT s m a -> m a
(@@) = varStateT
infix 2 @@
|
cdxr/haskell-variables
|
Control/Variable.hs
|
bsd-3-clause
| 4,120 | 0 | 13 | 1,019 | 1,176 | 631 | 545 | 76 | 1 |
-- |
-- Module : Basement.String.Encoding.ISO_8859_1
-- License : BSD-style
-- Maintainer : Foundation
-- Stability : experimental
-- Portability : portable
--
{-# LANGUAGE MagicHash #-}
module Basement.String.Encoding.ISO_8859_1
( ISO_8859_1(..)
, ISO_8859_1_Invalid(..)
) where
import Basement.Compat.Base
import Basement.Types.OffsetSize
import Basement.Numerical.Additive
import Basement.Monad
import GHC.Prim
import GHC.Word
import GHC.Types
import Basement.UArray
import Basement.UArray.Mutable (MUArray)
import Basement.MutableBuilder
import Basement.String.Encoding.Encoding
-- offset of size one
aone :: Offset Word8
aone = Offset 1
data ISO_8859_1_Invalid
= NotISO_8859_1 Char
deriving (Typeable, Show, Eq)
instance Exception ISO_8859_1_Invalid
data ISO_8859_1 = ISO_8859_1
instance Encoding ISO_8859_1 where
type Unit ISO_8859_1 = Word8
type Error ISO_8859_1 = ISO_8859_1_Invalid
encodingNext _ = next
encodingWrite _ = write
next :: (Offset Word8 -> Word8)
-> Offset Word8
-> Either ISO_8859_1_Invalid (Char, Offset Word8)
next getter off = Right (toChar w, off + aone)
where
!(W8# w) = getter off
toChar :: Word# -> Char
toChar a = C# (chr# (word2Int# a))
write :: (PrimMonad st, Monad st)
=> Char
-> Builder (UArray Word8) (MUArray Word8) Word8 st err ()
write c@(C# ch)
| c <= toEnum 0xFF = builderAppend (W8# x)
| otherwise = throw $ NotISO_8859_1 c
where
x :: Word#
!x = int2Word# (ord# ch)
|
vincenthz/hs-foundation
|
basement/Basement/String/Encoding/ISO_8859_1.hs
|
bsd-3-clause
| 1,527 | 0 | 11 | 317 | 440 | 237 | 203 | -1 | -1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, OverloadedStrings, DeriveGeneric, TypeFamilies, FlexibleInstances, MultiParamTypeClasses, OverloadedLabels, Arrows, TypeApplications, UndecidableInstances, FlexibleContexts #-}
module Api.Post where
import Prelude hiding (id)
import Data.Aeson
import Data.Profunctor
import Data.Profunctor.Product.Default hiding (def)
import qualified Data.Profunctor.Product.Default as P (def)
import Opaleye
import Data.Text as T
import Data.Time.Clock
import GHC.Generics
import Control.Arrow
import Control.Monad.Reader
import Web.HttpApiData
import Database
import Database.PostgreSQL.Simple.FromField
newtype PostId = PostId Int
deriving (Generic, FromJSON, ToJSON, FromHttpApiData, FromField)
instance Default Constant PostId (Column PGInt4) where
def = Constant (pgInt4 . unPost)
where unPost (PostId i) = i
instance QueryRunnerColumnDefault PGInt4 PostId where
queryRunnerColumnDefault = fieldQueryRunnerColumn
data Post f = Post
{ id :: TableField f PostId PGInt4 NN Opt
, title :: TableField f Text PGText NN Req
, body :: TableField f Text PGText NN Req
, created_at :: TableField f UTCTime PGTimestamptz NN Req
, updated_at :: TableField f UTCTime PGTimestamptz NN Req
} deriving (Generic)
instance ( Profunctor p
, Applicative (p (Post f))
, Default p (TableField f PostId PGInt4 NN Opt) (TableField g PostId PGInt4 NN Opt)
, Default p (TableField f Text PGText NN Req) (TableField g Text PGText NN Req)
, Default p (TableField f Text PGText NN Req) (TableField g Text PGText NN Req)
, Default p (TableField f UTCTime PGTimestamptz NN Req) (TableField g UTCTime PGTimestamptz NN Req)
, Default p (TableField f UTCTime PGTimestamptz NN Req) (TableField g UTCTime PGTimestamptz NN Req)
) => Default p (Post f) (Post g) where
def = Post <$> lmap id P.def
<*> lmap title P.def
<*> lmap body P.def
<*> lmap created_at P.def
<*> lmap updated_at P.def
instance FromJSON (Post Hask) where
parseJSON (Object o)
= Post <$>
o .: "id" <*>
o .: "title" <*>
o .: "body" <*>
o .: "created_at" <*>
o .: "updated_at"
parseJSON _ = mzero
instance ToJSON (Post Hask) where
toJSON (Post id title body c_at u_at)
= object [ "id" .= id
, "title" .= title
, "body" .= body
, "created_at" .= c_at
, "updated_at" .= u_at
]
findById :: PostId -> Query (Post Op)
findById postId = proc () -> do
post <- queryTable (postsTable) -< ()
restrict -< (id post) .== constant postId
returnA -< post
postsTable :: Table (Post W) (Post Op)
postsTable = Table "posts_table" (Post <$> lmap id (optional "id")
<*> lmap title (required "title")
<*> lmap body (required "body")
<*> lmap created_at (required "created_at")
<*> lmap updated_at (required "updated_at")
)
|
xldenis/xavio
|
src/Api/Post.hs
|
bsd-3-clause
| 3,139 | 1 | 15 | 873 | 940 | 490 | 450 | 70 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module QCommon.QFiles.MD2.DTriangleT where
import Control.Applicative ((<*>))
import Control.Lens (makeLenses)
import Data.Functor ((<$>))
import Data.Int (Int16)
import Linear (V3)
import qualified Data.ByteString.Lazy as BL
import Types
import Util.Binary
makeLenses ''DTriangleT
newDTriangleT :: BL.ByteString -> DTriangleT
newDTriangleT = runGet getDTriangleT
getDTriangleT :: Get DTriangleT
getDTriangleT = DTriangleT <$> getV3Int16
<*> getV3Int16
|
ksaveljev/hake-2
|
src/QCommon/QFiles/MD2/DTriangleT.hs
|
bsd-3-clause
| 518 | 0 | 6 | 85 | 126 | 76 | 50 | 16 | 1 |
module Principal
( someFunc
) where
someFunc :: IO ()
someFunc = putStrLn "Hola"
|
EnriqueNB/Exercitium
|
src/Principal.hs
|
bsd-3-clause
| 90 | 0 | 6 | 23 | 27 | 15 | 12 | 4 | 1 |
module ETests.Pretty.Expression
( prettyExpressionSpecs
) where
import Text.Parsec
import Text.PrettyPrint.HughesPJ
import Test.Hspec
import Test.Hspec.HUnit()
import Language.TheExperiment.Parser
import Language.TheExperiment.Pretty.Expression
import ETests.Pretty.Literal
import ETests.Parser.Expression
import ETests.Utils
prettyExpressionTestCases :: (String -> String -> IO ()) -> [Specs]
prettyExpressionTestCases prettyFrom =
prettyLiteralTestCases prettyFrom ++
[ it "pretty prints a variable" $
"a" `prettyFrom` "a"
, it "pretty prints basic call" $
"foo(a, b)" `prettyFrom` "foo(a, b)"
, it "pretty prints numeric literal" $
"1234" `prettyFrom` "1234"
, it "pretty prints operators (with correct precedence)" $
"a + b * c" `prettyFrom` "a + (b * c)"
, it "pretty prints operators adding parens as needed" $
"(a + b) * c" `prettyFrom` "(a + b) * c"
, it "pretty prints left assoc operator adding parens as needed" $
"a + (b + c)" `prettyFrom` "a + (b + c)"
, it "pretty prints infix left assoc operator with parens" $
"(a + b) + c" `prettyFrom` "a + b + c"
, it "pretty prints left assoc operator without parens if not needed" $
pending "not sure if we really need this"
, it "pretty prints prefix with infix" $
"a + -b" `prettyFrom` "a + (-b)"
]
prettyExpressionSpecs :: Specs
prettyExpressionSpecs = describe "prettyExpression" $ prettyExpressionTestCases prettyFrom
where
prettyFrom expected input =
case runEParser "tests" (opDefs ++ input) (parseLex $ many anOpDef >>= putState . Operators >> anExpr) of
Right result -> eTestAssertEqual "prettyType"
expected
(render $ prettyExpression result)
Left e -> error $ "fix your stupid test: " ++ show e
|
jvranish/TheExperiment
|
test/ETests/Pretty/Expression.hs
|
bsd-3-clause
| 1,842 | 0 | 14 | 431 | 369 | 201 | 168 | 40 | 2 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Ircbrowse.Types where
import Ircbrowse.Data
import Ircbrowse.Types.Import
import Ircbrowse.Monads
import Control.Applicative
import Data.Text
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.FromRow
import Network.Mail.Mime (Address)
import Snap.App.Cache
import Snap.App.Types
import Data.Time
-- | Site-wide configuration.
data Config = Config
{ configPostgres :: !ConnectInfo
, configDomain :: !String
, configAdmin :: !Address
, configSiteAddy :: !Address
, configCacheDir :: !FilePath
, configLogDir :: !FilePath
}
instance AppConfig Config where
getConfigDomain = configDomain
instance CacheDir Config where
getCacheDir = configCacheDir
data PState = PState
-- | Statistics.
data Stats = Stats
{ stEventCount :: !Integer
, stMsgCount :: !Integer
, stNickCount :: !Integer
, stActiveTimes :: ![(Integer,Integer)]
, stDailyActivity :: ![(Integer,Integer)]
, stActiveNicks :: ![(String,Integer)]
, stNetworks :: ![(String,String)]
, stChannels :: ![(String,String)]
, stActivityByYear :: ![(Integer,Integer)]
, stConversationByYear :: ![(Integer,Integer)]
} deriving Show
instance Default Stats where
def = Stats
{ stEventCount = 0
, stMsgCount = 0
, stNickCount = 0
, stActiveNicks = []
, stActiveTimes = []
, stDailyActivity = []
, stNetworks = []
, stChannels = []
, stConversationByYear = []
, stActivityByYear = []
}
instance AppLiftModel Config PState where
liftModel action = do
conn <- env controllerStateConn
anns <- env controllerState
conf <- env controllerStateConfig
let st = ModelState conn anns conf
io $ runReaderT (runModel action) st
data Range = Range
{ rangeFrom :: !Day, rangeTo :: !Day }
deriving (Eq,Show)
data CacheKey
= StatsOverview Channel
| Overview
| NickCloud Channel
| Social (Maybe Channel)
| BrowseDay Channel Day Text
| BrowseToday Channel Text
| Profile Text Bool
| AllNicks Channel Text
| UniquePDFs Channel
| Calendar Channel
| Channel Channel
instance Key CacheKey where
keyToString (Calendar channel) = norm $ "calendar-" ++ showChan channel ++ ".html"
keyToString (BrowseToday channel mode) = norm $ "browse-today-" ++ showChan channel ++ "-" ++ unpack mode ++ ".html"
keyToString (BrowseDay channel day mode) = norm $ "browse-day-" ++ showDay day ++ "-" ++ showChan channel ++ "-" ++ unpack mode ++ ".html"
keyToString (UniquePDFs channel) = norm $ "unique-pdfs-" ++ showChan channel ++ ".html"
keyToString (StatsOverview channel) = norm $ contexted "overview" (Just channel)
keyToString Overview = norm $ "overview.html"
keyToString (NickCloud channel) = norm $ contexted "nick-cloud" (Just channel)
keyToString (Social channel) = norm $ contexted "social" channel
keyToString (Profile nick recent) = norm $
"profile-" ++ unpack nick ++ "-" ++ (if recent then "recent" else "all") ++
".html"
keyToString (AllNicks channel mode) = norm $
"nicks-" ++ showChan channel ++ "-" ++ unpack mode ++
".html"
keyToString (Channel channel) = norm $ "channel-" ++ showChan channel ++ ".html"
norm :: [Char] -> [Char]
norm = go where
go (x:xs) | isDigit x || isLetter x || x == '.' || x == '-' = x : go xs
| otherwise = show (fromEnum x) ++ go xs
go [] = []
contexted :: [Char] -> Maybe Channel -> [Char]
contexted name channel =
name ++ "-" ++ opt (fmap showChan channel) ++ ".html"
where opt Nothing = "_"
opt (Just x) = x
showDay :: Day -> String
showDay = formatTime defaultTimeLocale "%Y-%m-%d"
data Event = Event
{ eventId :: !Int
, eventTimestamp :: !ZonedTime
, eventNetwork :: !Int
, eventChannel :: !Int
, eventType :: !Text
, eventNick :: !(Maybe Text)
, eventText :: !Text
} deriving (Show)
instance FromRow Event where
fromRow = Event <$> field <*> field <*> field <*> field <*> field <*> field <*> field
|
chrisdone/ircbrowse
|
src/Ircbrowse/Types.hs
|
bsd-3-clause
| 4,138 | 0 | 15 | 956 | 1,284 | 688 | 596 | 161 | 2 |
module Main
( main
) where
import Control.Concurrent
import Control.Concurrent.STM
import Control.Monad (when)
import Data.List (find)
import Data.Maybe (isJust)
import System.Environment (getArgs)
import System.Console.GetOpt
import System.Log.Logger
import System.Log.Handler.Simple
import System.Log.Handler (setFormatter)
import System.Log.Formatter
import System.IO
import System.Random
import Process.Console as Console
import Process.Status as Status
import Process.TorrentManager as TorrentManager
import Process.TorrentManagerChan (TorrentManagerMessage(AddTorrent))
import Torrent
import Version (version)
main :: IO ()
main = do
args <- getArgs
opts <- handleArgs args
run opts
printVersion :: IO ()
printVersion = putStrLn $ "PROGRAM version " ++ version ++ "\n"
data Option
= Version
| Debug
| Help
deriving (Show, Eq)
options :: [OptDescr Option]
options =
[ Option ['h', '?'] ["help"] (NoArg Help) "Выводит это сообщение"
, Option ['d'] ["debug"] (NoArg Debug) "Печатает дополнительную информацию"
, Option ['v'] ["version"] (NoArg Version) "Показывает версию программы"
]
getOption :: Option -> [Option] -> Maybe Option
getOption x = find (x ~=)
where
(~=) :: Option -> Option -> Bool
Version ~= Version = True
Debug ~= Debug = True
Help ~= Help = True
_ ~= _ = False
handleArgs :: [String] -> IO ([Option], [String])
handleArgs args = case getOpt Permute options args of
(o, n, [] ) -> return (o, n)
(_, _, err) -> error (concat err ++ usageMessage)
usageMessage = usageInfo header options
where
header = "Usage: PROGRAM [option...] FILE"
run :: ([Option], [String]) -> IO ()
run (opts, files) =
if showHelp then putStrLn usageMessage
else if showVersion then printVersion
else if null files then putStrLn "No torrent file"
else download opts files
where
showHelp = Help `elem` opts
showVersion = Version `elem` opts
setupLogging :: [Option] -> IO ()
setupLogging opts = do
logStream <- streamHandler stdout DEBUG >>= \l -> return $
setFormatter l (tfLogFormatter "%F %X" "[$time] $prio $loggername: $msg")
when (Debug `elem` opts) $
updateGlobalLogger rootLoggerName $
(setHandlers [logStream]) . (setLevel DEBUG)
download :: [Option] -> [String] -> IO ()
download opts files = do
setupLogging opts
debugM "Main" "Инициализация"
peerId <- newStdGen >>= (return . mkPeerId)
debugM "Main" $ "Присвоен peer_id: " ++ peerId
statusTV <- newTVarIO []
statusChan <- newTChanIO
torrentChan <- newTChanIO
waitMutex <- newEmptyTMVarIO
_ <- Status.start statusTV statusChan
_ <- Console.start waitMutex statusChan
_ <- TorrentManager.start peerId statusTV statusChan torrentChan
atomically $ writeTChan torrentChan (map AddTorrent files)
atomically $ takeTMVar waitMutex
infoM "Main" "Завершаем работу"
return ()
|
artems/htorr
|
server/Main.hs
|
bsd-3-clause
| 3,101 | 0 | 13 | 657 | 959 | 508 | 451 | 83 | 4 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module TAPL.Meow (
exec
, exec_
, meow
, Meow
) where
import Control.Monad.Writer hiding (Writer)
newtype Meow a = Meow (WriterT [String] IO a)
deriving (Functor, Applicative, Monad)
exec :: Meow a -> IO (a, [String])
exec (Meow m) = runWriterT m
exec_ :: Meow a -> IO [String]
exec_ (Meow m) = execWriterT m
meow :: String -> Meow ()
meow x = Meow $ tell [x]
|
foreverbell/unlimited-plt-toys
|
tapl/tapl-base/TAPL/Meow.hs
|
bsd-3-clause
| 414 | 0 | 8 | 83 | 178 | 97 | 81 | 15 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TypeFamilies #-}
-- Create a source distribution tarball
module Stack.SDist
( getSDistTarball
, checkSDistTarball
, checkSDistTarball'
, SDistOpts (..)
) where
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Tar.Entry as Tar
import qualified Codec.Compression.GZip as GZip
import Control.Applicative
import Control.Concurrent.Execute (ActionContext(..))
import Control.Monad (unless, liftM, filterM, when)
import Control.Monad.IO.Unlift
import Control.Monad.Logger
import Control.Monad.Reader.Class (local)
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S8
import qualified Data.ByteString.Lazy as L
import Data.Char (toLower)
import Data.Data (Data, Typeable, cast, gmapT)
import Data.Either (partitionEithers)
import Data.IORef (newIORef, readIORef, writeIORef)
import Data.List
import Data.List.Extra (nubOrd)
import Data.List.NonEmpty (NonEmpty)
import qualified Data.List.NonEmpty as NE
import qualified Data.Map.Strict as Map
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TLE
import Data.Time.Clock.POSIX
import Distribution.Package (Dependency (..))
import qualified Distribution.PackageDescription as Cabal
import qualified Distribution.PackageDescription.Check as Check
import Distribution.PackageDescription.PrettyPrint (showGenericPackageDescription)
import Distribution.Text (display)
import Distribution.Version (simplifyVersionRange, orLaterVersion, earlierVersion, hasUpperBound, hasLowerBound)
import Lens.Micro (set)
import Path
import Path.IO hiding (getModificationTime, getPermissions)
import Prelude -- Fix redundant import warnings
import Stack.Build (mkBaseConfigOpts, build)
import Stack.Build.Execute
import Stack.Build.Installed
import Stack.Build.Source (loadSourceMap, getDefaultPackageConfig)
import Stack.Build.Target hiding (PackageType (..))
import Stack.PackageLocation (resolveMultiPackageLocation)
import Stack.Constants
import Stack.Package
import Stack.Types.Build
import Stack.Types.BuildPlan
import Stack.Types.Config
import Stack.Types.Package
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
import Stack.Types.StackT
import Stack.Types.StringError
import Stack.Types.Version
import System.Directory (getModificationTime, getPermissions)
import qualified System.FilePath as FP
-- | Special exception to throw when you want to fail because of bad results
-- of package check.
data SDistOpts = SDistOpts
{ sdoptsDirsToWorkWith :: [String]
-- ^ Directories to package
, sdoptsPvpBounds :: Maybe PvpBounds
-- ^ PVP Bounds overrides
, sdoptsIgnoreCheck :: Bool
-- ^ Whether to ignore check of the package for common errors
, sdoptsSign :: Bool
-- ^ Whether to sign the package
, sdoptsSignServerUrl :: String
-- ^ The URL of the signature server
, sdoptsBuildTarball :: Bool
-- ^ Whether to build the tarball
}
newtype CheckException
= CheckException (NonEmpty Check.PackageCheck)
deriving (Typeable)
instance Exception CheckException
instance Show CheckException where
show (CheckException xs) =
"Package check reported the following errors:\n" ++
(intercalate "\n" . fmap show . NE.toList $ xs)
-- | Given the path to a local package, creates its source
-- distribution tarball.
--
-- While this yields a 'FilePath', the name of the tarball, this
-- tarball is not written to the disk and instead yielded as a lazy
-- bytestring.
getSDistTarball
:: (StackM env m, HasEnvConfig env)
=> Maybe PvpBounds -- ^ Override Config value
-> Path Abs Dir -- ^ Path to local package
-> m (FilePath, L.ByteString, Maybe (PackageIdentifier, L.ByteString))
-- ^ Filename, tarball contents, and option cabal file revision to upload
getSDistTarball mpvpBounds pkgDir = do
config <- view configL
let PvpBounds pvpBounds asRevision = fromMaybe (configPvpBounds config) mpvpBounds
tweakCabal = pvpBounds /= PvpBoundsNone
pkgFp = toFilePath pkgDir
lp <- readLocalPackage pkgDir
$logInfo $ "Getting file list for " <> T.pack pkgFp
(fileList, cabalfp) <- getSDistFileList lp
$logInfo $ "Building sdist tarball for " <> T.pack pkgFp
files <- normalizeTarballPaths (lines fileList)
-- We're going to loop below and eventually find the cabal
-- file. When we do, we'll upload this reference, if the
-- mpvpBounds value indicates that we should be uploading a cabal
-- file revision.
cabalFileRevisionRef <- liftIO (newIORef Nothing)
-- NOTE: Could make this use lazy I/O to only read files as needed
-- for upload (both GZip.compress and Tar.write are lazy).
-- However, it seems less error prone and more predictable to read
-- everything in at once, so that's what we're doing for now:
let tarPath isDir fp = either throwString return
(Tar.toTarPath isDir (forceUtf8Enc (pkgId FP.</> fp)))
-- convert a String of proper characters to a String of bytes
-- in UTF8 encoding masquerading as characters. This is
-- necessary for tricking the tar package into proper
-- character encoding.
forceUtf8Enc = S8.unpack . T.encodeUtf8 . T.pack
packWith f isDir fp = liftIO $ f (pkgFp FP.</> fp) =<< tarPath isDir fp
packDir = packWith Tar.packDirectoryEntry True
packFile fp
-- This is a cabal file, we're going to tweak it, but only
-- tweak it as a revision.
| tweakCabal && isCabalFp fp && asRevision = do
lbsIdent <- getCabalLbs pvpBounds (Just 1) $ toFilePath cabalfp
liftIO (writeIORef cabalFileRevisionRef (Just lbsIdent))
packWith packFileEntry False fp
-- Same, except we'll include the cabal file in the
-- original tarball upload.
| tweakCabal && isCabalFp fp = do
(_ident, lbs) <- getCabalLbs pvpBounds Nothing $ toFilePath cabalfp
currTime <- liftIO getPOSIXTime -- Seconds from UNIX epoch
tp <- liftIO $ tarPath False fp
return $ (Tar.fileEntry tp lbs) { Tar.entryTime = floor currTime }
| otherwise = packWith packFileEntry False fp
isCabalFp fp = toFilePath pkgDir FP.</> fp == toFilePath cabalfp
tarName = pkgId FP.<.> "tar.gz"
pkgId = packageIdentifierString (packageIdentifier (lpPackage lp))
dirEntries <- mapM packDir (dirsFromFiles files)
fileEntries <- mapM packFile files
mcabalFileRevision <- liftIO (readIORef cabalFileRevisionRef)
return (tarName, GZip.compress (Tar.write (dirEntries ++ fileEntries)), mcabalFileRevision)
-- | Get the PVP bounds-enabled version of the given cabal file
getCabalLbs :: (StackM env m, HasEnvConfig env)
=> PvpBoundsType
-> Maybe Int -- ^ optional revision
-> FilePath
-> m (PackageIdentifier, L.ByteString)
getCabalLbs pvpBounds mrev fp = do
bs <- liftIO $ S.readFile fp
(_warnings, gpd) <- readPackageUnresolvedBS Nothing bs
(_, sourceMap) <- loadSourceMap AllowNoTargets defaultBuildOptsCLI
menv <- getMinimalEnvOverride
(installedMap, _, _, _) <- getInstalled menv GetInstalledOpts
{ getInstalledProfiling = False
, getInstalledHaddock = False
, getInstalledSymbols = False
}
sourceMap
let gpd' = gtraverseT (addBounds sourceMap installedMap) gpd
gpd'' =
case mrev of
Nothing -> gpd'
Just rev -> gpd'
{ Cabal.packageDescription
= (Cabal.packageDescription gpd')
{ Cabal.customFieldsPD
= (("x-revision", show rev):)
$ filter (\(x, _) -> map toLower x /= "x-revision")
$ Cabal.customFieldsPD
$ Cabal.packageDescription gpd'
}
}
ident <- parsePackageIdentifierFromString $ display $ Cabal.package $ Cabal.packageDescription gpd''
return
( ident
, TLE.encodeUtf8 $ TL.pack $ showGenericPackageDescription gpd''
)
where
addBounds :: SourceMap -> InstalledMap -> Dependency -> Dependency
addBounds sourceMap installedMap dep@(Dependency cname range) =
case lookupVersion (fromCabalPackageName cname) of
Nothing -> dep
Just version -> Dependency cname $ simplifyVersionRange
$ (if toAddUpper && not (hasUpperBound range) then addUpper version else id)
$ (if toAddLower && not (hasLowerBound range) then addLower version else id)
range
where
lookupVersion name =
case Map.lookup name sourceMap of
Just (PSLocal lp) -> Just $ packageVersion $ lpPackage lp
Just (PSUpstream version _ _ _ _) -> Just version
Nothing ->
case Map.lookup name installedMap of
Just (_, installed) -> Just (installedVersion installed)
Nothing -> Nothing
addUpper version = intersectVersionRanges
(earlierVersion $ toCabalVersion $ nextMajorVersion version)
addLower version = intersectVersionRanges
(orLaterVersion (toCabalVersion version))
(toAddLower, toAddUpper) =
case pvpBounds of
PvpBoundsNone -> (False, False)
PvpBoundsUpper -> (False, True)
PvpBoundsLower -> (True, False)
PvpBoundsBoth -> (True, True)
-- | Traverse a data type.
gtraverseT :: (Data a,Typeable b) => (Typeable b => b -> b) -> a -> a
gtraverseT f =
gmapT (\x -> case cast x of
Nothing -> gtraverseT f x
Just b -> fromMaybe x (cast (f b)))
-- | Read in a 'LocalPackage' config. This makes some default decisions
-- about 'LocalPackage' fields that might not be appropriate for other
-- use-cases.
readLocalPackage :: (StackM env m, HasEnvConfig env) => Path Abs Dir -> m LocalPackage
readLocalPackage pkgDir = do
cabalfp <- findOrGenerateCabalFile pkgDir
config <- getDefaultPackageConfig
(warnings,package) <- readPackage config cabalfp
mapM_ (printCabalFileWarning cabalfp) warnings
return LocalPackage
{ lpPackage = package
, lpWanted = False -- HACK: makes it so that sdist output goes to a log instead of a file.
, lpDir = pkgDir
, lpCabalFile = cabalfp
-- NOTE: these aren't the 'correct values, but aren't used in
-- the usage of this function in this module.
, lpTestDeps = Map.empty
, lpBenchDeps = Map.empty
, lpTestBench = Nothing
, lpForceDirty = False
, lpDirtyFiles = Nothing
, lpNewBuildCache = Map.empty
, lpFiles = Set.empty
, lpComponents = Set.empty
, lpUnbuildable = Set.empty
}
-- | Returns a newline-separate list of paths, and the absolute path to the .cabal file.
getSDistFileList :: (StackM env m, HasEnvConfig env) => LocalPackage -> m (String, Path Abs File)
getSDistFileList lp =
withRunIO $ \run -> withSystemTempDir (stackProgName <> "-sdist") $ \tmpdir -> run $ do
menv <- getMinimalEnvOverride
let bopts = defaultBuildOpts
let boptsCli = defaultBuildOptsCLI
baseConfigOpts <- mkBaseConfigOpts boptsCli
(locals, _) <- loadSourceMap NeedTargets boptsCli
runInBase <- askRunIO
withExecuteEnv menv bopts boptsCli baseConfigOpts locals
[] [] [] -- provide empty list of globals. This is a hack around custom Setup.hs files
$ \ee ->
withSingleContext runInBase ac ee task Nothing (Just "sdist") $ \_package cabalfp _pkgDir cabal _announce _console _mlogFile -> do
let outFile = toFilePath tmpdir FP.</> "source-files-list"
cabal KeepTHLoading ["sdist", "--list-sources", outFile]
contents <- liftIO (readFile outFile)
return (contents, cabalfp)
where
package = lpPackage lp
ac = ActionContext Set.empty []
task = Task
{ taskProvides = PackageIdentifier (packageName package) (packageVersion package)
, taskType = TTLocal lp
, taskConfigOpts = TaskConfigOpts
{ tcoMissing = Set.empty
, tcoOpts = \_ -> ConfigureOpts [] []
}
, taskPresent = Map.empty
, taskAllInOne = True
, taskCachePkgSrc = CacheSrcLocal (toFilePath (lpDir lp))
}
normalizeTarballPaths :: (StackM env m) => [FilePath] -> m [FilePath]
normalizeTarballPaths fps = do
-- TODO: consider whether erroring out is better - otherwise the
-- user might upload an incomplete tar?
unless (null outsideDir) $
$logWarn $ T.concat
[ "Warning: These files are outside of the package directory, and will be omitted from the tarball: "
, T.pack (show outsideDir)]
return (nubOrd files)
where
(outsideDir, files) = partitionEithers (map pathToEither fps)
pathToEither fp = maybe (Left fp) Right (normalizePath fp)
normalizePath :: FilePath -> Maybe FilePath
normalizePath = fmap FP.joinPath . go . FP.splitDirectories . FP.normalise
where
go [] = Just []
go ("..":_) = Nothing
go (_:"..":xs) = go xs
go (x:xs) = (x :) <$> go xs
dirsFromFiles :: [FilePath] -> [FilePath]
dirsFromFiles dirs = Set.toAscList (Set.delete "." results)
where
results = foldl' (\s -> go s . FP.takeDirectory) Set.empty dirs
go s x
| Set.member x s = s
| otherwise = go (Set.insert x s) (FP.takeDirectory x)
-- | Check package in given tarball. This will log all warnings
-- and will throw an exception in case of critical errors.
--
-- Note that we temporarily decompress the archive to analyze it.
checkSDistTarball :: (StackM env m, HasEnvConfig env)
=> SDistOpts -- ^ The configuration of what to check
-> Path Abs File -- ^ Absolute path to tarball
-> m ()
checkSDistTarball opts tarball = withTempTarGzContents tarball $ \pkgDir' -> do
pkgDir <- (pkgDir' </>) `liftM`
(parseRelDir . FP.takeBaseName . FP.takeBaseName . toFilePath $ tarball)
-- ^ drop ".tar" ^ drop ".gz"
when (sdoptsBuildTarball opts) (buildExtractedTarball pkgDir)
unless (sdoptsIgnoreCheck opts) (checkPackageInExtractedTarball pkgDir)
checkPackageInExtractedTarball :: (StackM env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Absolute path to tarball
-> m ()
checkPackageInExtractedTarball pkgDir = do
cabalfp <- findOrGenerateCabalFile pkgDir
name <- parsePackageNameFromFilePath cabalfp
config <- getDefaultPackageConfig
(gdesc, pkgDesc) <- readPackageDescriptionDir config pkgDir
$logInfo $
"Checking package '" <> packageNameText name <> "' for common mistakes"
let pkgChecks = Check.checkPackage gdesc (Just pkgDesc)
fileChecks <- liftIO $ Check.checkPackageFiles pkgDesc (toFilePath pkgDir)
let checks = pkgChecks ++ fileChecks
(errors, warnings) =
let criticalIssue (Check.PackageBuildImpossible _) = True
criticalIssue (Check.PackageDistInexcusable _) = True
criticalIssue _ = False
in partition criticalIssue checks
unless (null warnings) $
$logWarn $ "Package check reported the following warnings:\n" <>
T.pack (intercalate "\n" . fmap show $ warnings)
case NE.nonEmpty errors of
Nothing -> return ()
Just ne -> throwM $ CheckException ne
buildExtractedTarball :: (StackM env m, HasEnvConfig env) => Path Abs Dir -> m ()
buildExtractedTarball pkgDir = do
projectRoot <- view projectRootL
envConfig <- view envConfigL
menv <- getMinimalEnvOverride
localPackageToBuild <- readLocalPackage pkgDir
let packageEntries = bcPackages (envConfigBuildConfig envConfig)
getPaths = resolveMultiPackageLocation menv projectRoot
allPackagePaths <- fmap (map fst . mconcat) (mapM getPaths packageEntries)
-- We remove the path based on the name of the package
let isPathToRemove path = do
localPackage <- readLocalPackage path
return $ packageName (lpPackage localPackage) == packageName (lpPackage localPackageToBuild)
pathsToKeep <- filterM (fmap not . isPathToRemove) allPackagePaths
newPackagesRef <- liftIO (newIORef Nothing)
let adjustEnvForBuild env =
let updatedEnvConfig = envConfig
{envConfigPackagesRef = newPackagesRef
,envConfigBuildConfig = updatePackageInBuildConfig (envConfigBuildConfig envConfig)
}
in set envConfigL updatedEnvConfig env
updatePackageInBuildConfig buildConfig = buildConfig
{ bcPackages = map (PLFilePath . toFilePath) $ pkgDir : pathsToKeep
, bcConfig = (bcConfig buildConfig)
{ configBuild = defaultBuildOpts
{ boptsTests = True
}
}
}
local adjustEnvForBuild $
build (const (return ())) Nothing defaultBuildOptsCLI
-- | Version of 'checkSDistTarball' that first saves lazy bytestring to
-- temporary directory and then calls 'checkSDistTarball' on it.
checkSDistTarball' :: (StackM env m, HasEnvConfig env)
=> SDistOpts
-> String -- ^ Tarball name
-> L.ByteString -- ^ Tarball contents as a byte string
-> m ()
checkSDistTarball' opts name bytes = withRunIO $ \run -> withSystemTempDir "stack" $ \tpath -> run $ do
npath <- (tpath </>) `liftM` parseRelFile name
liftIO $ L.writeFile (toFilePath npath) bytes
checkSDistTarball opts npath
withTempTarGzContents :: (MonadUnliftIO m)
=> Path Abs File -- ^ Location of tarball
-> (Path Abs Dir -> m a) -- ^ Perform actions given dir with tarball contents
-> m a
withTempTarGzContents apath f = withRunIO $ \run -> withSystemTempDir "stack" $ \tpath -> run $ do
archive <- liftIO $ L.readFile (toFilePath apath)
liftIO . Tar.unpack (toFilePath tpath) . Tar.read . GZip.decompress $ archive
f tpath
--------------------------------------------------------------------------------
-- Copy+modified from the tar package to avoid issues with lazy IO ( see
-- https://github.com/commercialhaskell/stack/issues/1344 )
packFileEntry :: FilePath -- ^ Full path to find the file on the local disk
-> Tar.TarPath -- ^ Path to use for the tar Entry in the archive
-> IO Tar.Entry
packFileEntry filepath tarpath = do
mtime <- getModTime filepath
perms <- getPermissions filepath
content <- S.readFile filepath
let size = fromIntegral (S.length content)
return (Tar.simpleEntry tarpath (Tar.NormalFile (L.fromStrict content) size)) {
Tar.entryPermissions = if executable perms then Tar.executableFilePermissions
else Tar.ordinaryFilePermissions,
Tar.entryTime = mtime
}
getModTime :: FilePath -> IO Tar.EpochTime
getModTime path = do
t <- getModificationTime path
return . floor . utcTimeToPOSIXSeconds $ t
|
martin-kolinek/stack
|
src/Stack/SDist.hs
|
bsd-3-clause
| 19,966 | 0 | 24 | 5,214 | 4,551 | 2,390 | 2,161 | 348 | 11 |
{-# LANGUAGE PostfixOperators #-}
module Haskellplusplus
( PlusPlus(..)
, int
, float
) where
import Prelude hiding (print)
import Prelude as P
import Data.IORef
int :: Int -> IO (IORef Int)
int x = newIORef x
float :: Float -> IO (IORef Float)
float x = newIORef x
class PlusPlus a where
(++) :: IORef a -> IO ()
(+=) :: IORef a -> a -> IO ()
(-=) :: IORef a -> a -> IO ()
print :: IORef a -> IO ()
instance PlusPlus Int where
(++) r = do x <- readIORef r; writeIORef r $ x + 1
(+=) r v = do x <- readIORef r; writeIORef r $ x + v
(-=) r v = do x <- readIORef r; writeIORef r $ x - v
print r = do x <- readIORef r; P.print $ P.show x
instance PlusPlus Float where
(++) r = do x <- readIORef r; writeIORef r $ x + 1.0
(+=) r v = do x <- readIORef r; writeIORef r $ x + v
(-=) r v = do x <- readIORef r; writeIORef r $ x - v
print r = do x <- readIORef r; P.print $ P.show x
|
Heather/io-ref-tests
|
src/Haskellplusplus.hs
|
bsd-3-clause
| 977 | 0 | 10 | 307 | 488 | 245 | 243 | 27 | 1 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, ScopedTypeVariables #-}
-- | Types definitions
module Instagram.Types (
Credentials(..)
,clientIDBS
,clientSecretBS
,OAuthToken(..)
,AccessToken(..)
,UserID
,User(..)
,UserCounts(..)
,Scope(..)
,IGException(..)
,Envelope(..)
,ErrEnvelope(..)
,IGError(..)
,Pagination(..)
,MediaID
,Media(..)
,Position(..)
,UserPosition(..)
,LocationID
,Location(..)
,ImageData(..)
,Images(..)
,CommentID
,Comment(..)
,Count(..)
,Aspect(..)
,media
,CallbackUrl
,Subscription(..)
,Update(..)
,TagName
,Tag(..)
,OutgoingStatus(..)
,IncomingStatus(..)
,Relationship(..)
,NoResult
,GeographyID
)where
import Control.Applicative
import Data.Text
import Data.Typeable (Typeable)
import Data.Data (Data)
import Data.ByteString (ByteString)
import Data.Aeson
import qualified Data.Text.Encoding as TE
import Control.Exception.Base (Exception)
import Data.Time.Clock.POSIX (POSIXTime)
import qualified Data.Text as T (pack)
import Data.Aeson.Types (Parser)
import qualified Data.HashMap.Strict as HM (lookup)
-- | the app credentials
data Credentials = Credentials {
cClientID :: Text -- ^ client id
,cClientSecret :: Text -- ^ client secret
}
deriving (Show,Read,Eq,Ord,Typeable, Data)
-- | get client id in ByteString form
clientIDBS :: Credentials -> ByteString
clientIDBS=TE.encodeUtf8 . cClientID
-- | get client secret in ByteString form
clientSecretBS :: Credentials -> ByteString
clientSecretBS=TE.encodeUtf8 . cClientSecret
-- | the oauth token returned after authentication
data OAuthToken = OAuthToken {
oaAccessToken :: AccessToken -- ^ the access token
,oaUser :: User -- ^ the user structure returned
}
deriving (Show,Read,Eq,Ord,Typeable)
-- | to json as per Instagram format
instance ToJSON OAuthToken where
toJSON oa=object ["access_token" .= oaAccessToken oa, "user" .= oaUser oa]
-- | from json as per Instagram format
instance FromJSON OAuthToken where
parseJSON (Object v) =OAuthToken <$>
v .: "access_token" <*>
v .: "user"
parseJSON _= fail "OAuthToken"
-- | the access token is simply a Text
newtype AccessToken=AccessToken Text
deriving (Eq, Ord, Read, Show, Typeable)
-- | simple string
instance ToJSON AccessToken where
toJSON (AccessToken at)=String at
-- | simple string
instance FromJSON AccessToken where
parseJSON (String s)=pure $ AccessToken s
parseJSON _= fail "AccessToken"
-- | User ID
type UserID = Text
-- | the User partial profile returned by the authentication
data User = User {
uID :: UserID,
uUsername :: Text,
uFullName :: Text,
uProfilePicture :: Maybe Text,
uWebsite :: Maybe Text,
uBio :: Maybe Text,
uCounts :: Maybe UserCounts
}
deriving (Show,Read,Eq,Ord,Typeable)
-- | to json as per Instagram format
instance ToJSON User where
toJSON u = object
[ "id" .= uID u
, "username" .= uUsername u
, "full_name" .= uFullName u
, "profile_picture" .= uProfilePicture u
, "website" .= uWebsite u
, "bio" .= uBio u
, "counts" .= uCounts u
]
-- | from json as per Instagram format
instance FromJSON User where
parseJSON (Object v) =User <$>
v .: "id" <*>
v .: "username" <*>
v .: "full_name" <*>
v .:? "profile_picture" <*>
v .:? "website" <*>
v .:? "bio" <*>
v .:? "counts"
parseJSON _= fail "User"
-- | the User counts info returned by some endpoints
data UserCounts = UserCounts
{ ucMedia :: Int
, ucFollows :: Int
, ucFollowedBy :: Int
}
deriving (Show,Read,Eq,Ord,Typeable)
-- | from json as per Instagram format
instance FromJSON UserCounts where
parseJSON (Object v) = UserCounts <$>
v .: "media" <*>
v .: "follows" <*>
v .: "followed_by"
parseJSON _= fail "UserCounts"
-- | to json as per Instagram format
instance ToJSON UserCounts where
toJSON uc = object
[ "media" .= ucMedia uc
, "follows" .= ucFollows uc
, "followed_by" .= ucFollowedBy uc
]
-- | the scopes of the authentication
data Scope = Basic | PublicContent | FollowerList | Comments | Relationships | Likes
deriving (Read,Eq,Ord,Enum,Bounded,Typeable)
instance Show Scope where
show Basic = "basic"
show PublicContent = "public_content"
show FollowerList = "follower_list"
show Comments = "comments"
show Relationships = "relationships"
show Likes = "likes"
-- | an error returned to us by Instagram
data IGError = IGError {
igeCode :: Int
,igeType :: Maybe Text
,igeMessage :: Maybe Text
}
deriving (Show,Read,Eq,Ord,Typeable)
-- | to json as per Instagram format
instance ToJSON IGError where
toJSON e=object ["code" .= igeCode e, "error_type" .= igeType e , "error_message" .= igeMessage e]
-- | from json as per Instagram format
instance FromJSON IGError where
parseJSON (Object v) =IGError <$>
v .: "code" <*>
v .:? "error_type" <*>
v .:? "error_message"
parseJSON _= fail "IGError"
-- | an exception that a call to instagram may throw
data IGException = JSONException String -- ^ JSON parsingError
| IGAppException IGError -- ^ application exception
deriving (Show,Typeable)
-- | make our exception type a normal exception
instance Exception IGException
-- | envelope for Instagram OK response
data Envelope d=Envelope{
eMeta :: IGError -- ^ this should only say 200, no error, but put here for completeness
,eData :: d -- ^ data, garanteed to be present (otherwise we get an ErrEnvelope)
,ePagination :: Maybe Pagination
}
deriving (Show,Read,Eq,Ord,Typeable)
-- | to json as per Instagram format
instance (ToJSON d)=>ToJSON (Envelope d) where
toJSON e=object ["meta" .= eMeta e, "data" .= eData e, "pagination" .= ePagination e]
-- | from json as per Instagram format
instance (FromJSON d)=>FromJSON (Envelope d) where
parseJSON (Object v) =Envelope <$>
v .: "meta" <*>
v .: "data" <*>
v .:? "pagination"
parseJSON _= fail "Envelope"
-- | error envelope for Instagram error response
data ErrEnvelope=ErrEnvelope{
eeMeta :: IGError
}
deriving (Show,Read,Eq,Ord,Typeable)
-- | to json as per Instagram format
instance ToJSON ErrEnvelope where
toJSON e=object ["meta" .= eeMeta e]
-- | from json as per Instagram format
instance FromJSON ErrEnvelope where
parseJSON (Object v) =ErrEnvelope <$>
v .: "meta"
parseJSON _= fail "ErrEnvelope"
-- | pagination info for responses that can return a lot of data
data Pagination = Pagination {
pNextUrl :: Maybe Text
,pNextMaxID :: Maybe Text
,pNextMinID :: Maybe Text
,pNextMaxTagID :: Maybe Text
,pMinTagID :: Maybe Text
}
deriving (Show,Read,Eq,Ord,Typeable)
-- | to json as per Instagram format
instance ToJSON Pagination where
toJSON p=object ["next_url" .= pNextUrl p, "next_max_id" .= pNextMaxID p, "next_min_id" .= pNextMinID p, "next_max_tag_id" .= pNextMaxTagID p,"min_tag_id" .= pMinTagID p]
-- | from json as per Instagram format
instance FromJSON Pagination where
parseJSON (Object v) =Pagination <$>
v .:? "next_url" <*>
v .:? "next_max_id" <*>
v .:? "next_min_id" <*>
v .:? "next_max_tag_id" <*>
v .:? "min_tag_id"
parseJSON _= fail "Pagination"
-- | Media ID
type MediaID=Text
-- | instagram media object
data Media = Media {
mID :: MediaID
,mCaption :: Maybe Comment
,mLink :: Text
,mUser :: User
,mCreated :: POSIXTime
,mImages :: Images
,mType :: Text
,mUsersInPhoto :: [UserPosition]
,mFilter :: Maybe Text
,mTags :: [Text]
,mLocation :: Maybe Location
,mComments :: Count
,mLikes :: Count
,mUserHasLiked :: Bool
,mAttribution :: Maybe Object -- ^ seems to be open format https://groups.google.com/forum/?fromgroups#!topic/instagram-api-developers/KvGH1cnjljQ
}
deriving (Show,Eq,Typeable)
-- | to json as per Instagram format
instance ToJSON Media where
toJSON m=object ["id" .= mID m,"caption" .= mCaption m,"user".= mUser m,"link" .= mLink m, "created_time" .= toJSON (show ((round $ mCreated m) :: Integer))
,"images" .= mImages m,"type" .= mType m,"users_in_photo" .= mUsersInPhoto m, "filter" .= mFilter m,"tags" .= mTags m
,"location" .= mLocation m,"comments" .= mComments m,"likes" .= mLikes m,"user_has_liked" .= mUserHasLiked m,"attribution" .= mAttribution m]
-- | from json as per Instagram format
instance FromJSON Media where
parseJSON (Object v) =do
ct::String<-v .: "created_time"
Media <$>
v .: "id" <*>
v .:? "caption" <*>
v .: "link" <*>
v .: "user" <*>
pure (fromIntegral (read ct::Integer)) <*>
v .: "images" <*>
v .: "type" <*>
v .: "users_in_photo" <*>
v .:? "filter" <*>
v .: "tags" <*>
v .:? "location" <*>
v .:? "comments" .!= Count 0 <*>
v .:? "likes" .!= Count 0 <*>
v .:? "user_has_liked" .!= False <*>
v .:? "attribution"
parseJSON _= fail "Media"
-- | position in picture
data Position = Position {
pX ::Double
,pY :: Double
} deriving (Show,Eq,Typeable)
-- | to json as per Instagram format
instance ToJSON Position where
toJSON p=object ["x" .= pX p,"y" .= pY p]
-- | from json as per Instagram format
instance FromJSON Position where
parseJSON (Object v) = Position <$>
v .: "x" <*>
v .: "y"
parseJSON _=fail "Position"
-- | position of a user
data UserPosition = UserPosition {
upPosition :: Position
,upUser :: User
} deriving (Show,Eq,Typeable)
-- | to json as per Instagram format
instance ToJSON UserPosition where
toJSON p=object ["position" .= upPosition p,"user" .= upUser p]
-- | from json as per Instagram format
instance FromJSON UserPosition where
parseJSON (Object v) = UserPosition <$>
v .: "position" <*>
v .: "user"
parseJSON _=fail "UserPosition"
-- | location ID
type LocationID = Text
-- | geographical location info
data Location = Location {
lID :: Maybe LocationID
,lLatitude :: Maybe Double
,lLongitude :: Maybe Double
,lStreetAddress :: Maybe Text
,lName :: Maybe Text
}
deriving (Show,Eq,Ord,Typeable)
-- | to json as per Instagram format
instance ToJSON Location where
toJSON l=object ["id" .= lID l,"latitude" .= lLatitude l,"longitude" .= lLongitude l, "street_address" .= lStreetAddress l,"name" .= lName l]
-- | from json as per Instagram format
instance FromJSON Location where
parseJSON (Object v) =
Location <$>
parseID v <*>
v .:? "latitude" <*>
v .:? "longitude" <*>
v .:? "street_address" <*>
v .:? "name"
where
-- | the Instagram API hasn't made its mind up, sometimes location id is an int, sometimes a string
parseID :: Object -> Parser (Maybe LocationID)
parseID obj=case HM.lookup "id" obj of
Just (String s)->pure $ Just s
Just (Number n)->pure $ Just $ T.pack $ show $ (round n :: Int)
Nothing->pure Nothing
_->fail "LocationID"
parseJSON _= fail "Location"
-- | data for a single image
data ImageData = ImageData {
idURL :: Text,
idWidth :: Integer,
idHeight :: Integer
}
deriving (Show,Eq,Ord,Typeable)
-- | to json as per Instagram format
instance ToJSON ImageData where
toJSON i=object ["url" .= idURL i,"width" .= idWidth i,"height" .= idHeight i]
-- | from json as per Instagram format
instance FromJSON ImageData where
parseJSON (Object v) = ImageData <$>
v .: "url" <*>
v .: "width" <*>
v .: "height"
parseJSON _= fail "ImageData"
-- | different images for the same media
data Images = Images {
iLowRes :: ImageData
,iThumbnail :: ImageData
,iStandardRes :: ImageData
}
deriving (Show,Eq,Ord,Typeable)
-- | to json as per Instagram format
instance ToJSON Images where
toJSON i=object ["low_resolution" .= iLowRes i,"thumbnail" .= iThumbnail i,"standard_resolution" .= iStandardRes i]
-- | from json as per Instagram format
instance FromJSON Images where
parseJSON (Object v) = Images <$>
v .: "low_resolution" <*>
v .: "thumbnail" <*>
v .: "standard_resolution"
parseJSON _= fail "Images"
-- | comment id
type CommentID = Text
-- | Commenton on a medium
data Comment = Comment {
cID :: CommentID
,cCreated :: POSIXTime
,cText :: Text
,cFrom :: User
}
deriving (Show,Eq,Ord,Typeable)
-- | to json asCommentstagram format
instance ToJSON Comment where
toJSON c=object ["id" .= cID c,"created_time" .= toJSON (show ((round $ cCreated c) :: Integer))
,"text" .= cText c,"from" .= cFrom c]
-- | from json asCommentstagram format
instance FromJSON Comment where
parseJSON (Object v) =do
ct::String<-v .: "created_time"
Comment <$>
v .: "id" <*>
pure (fromIntegral (read ct::Integer)) <*>
v .: "text" <*>
v .: "from"
parseJSON _= fail "Caption"
data Count = Count {
cCount :: Integer
}
deriving (Show,Eq,Ord,Typeable)
-- | to json as per Instagram format
instance ToJSON Count where
toJSON igc=object ["count" .= cCount igc]
-- | from json as per Instagram format
instance FromJSON Count where
parseJSON (Object v) = Count <$>
v .: "count"
parseJSON _= fail "Count"
-- | the URL to receive notifications to
type CallbackUrl = Text
-- | notification aspect
data Aspect = Aspect Text
deriving (Show, Read, Eq, Ord, Typeable)
-- | to json as per Instagram format
instance ToJSON Aspect where
toJSON (Aspect t)=String t
-- | from json as per Instagram format
instance FromJSON Aspect where
parseJSON (String t) = pure $ Aspect t
parseJSON _= fail "Aspect"
-- | the media Aspect, the only one supported for now
media :: Aspect
media = Aspect "media"
-- | a subscription to a real time notification
data Subscription= Subscription {
sID :: Text
,sType :: Text
,sObject :: Text
,sObjectID :: Maybe Text
,sAspect :: Aspect
,sCallbackUrl :: CallbackUrl
,sLatitude :: Maybe Double
,sLongitude :: Maybe Double
,sRadius :: Maybe Integer
}
deriving (Show,Eq,Typeable)
-- | to json as per Instagram format
instance ToJSON Subscription where
toJSON s=object ["id" .= sID s,"type" .= sType s,"object" .= sObject s,"object_id" .= sObjectID s,"aspect" .= sAspect s
,"callback_url".=sCallbackUrl s,"lat".= sLatitude s,"lng".=sLongitude s,"radius".=sRadius s]
-- | from json as per Instagram format
instance FromJSON Subscription where
parseJSON (Object v) = Subscription <$>
v .: "id" <*>
v .: "type" <*>
v .: "object" <*>
v .:? "object_id" <*>
v .: "aspect" <*>
v .: "callback_url" <*>
v .:? "lat" <*>
v .:? "lng" <*>
v .:? "radius"
parseJSON _= fail "Subscription"
-- | an update from a subscription
data Update = Update {
uSubscriptionID :: Integer
,uObject :: Text
,uObjectID :: Text
,uChangedAspect :: Aspect
,uTime :: POSIXTime
}
deriving (Show,Eq,Typeable)
-- | to json as per Instagram format
instance ToJSON Update where
toJSON u=object ["subscription_id" .= uSubscriptionID u ,"object" .= uObject u,"object_id" .= uObjectID u
,"changed_aspect" .= uChangedAspect u,"time" .= toJSON ((round $ uTime u) :: Integer)]
-- | from json as per Instagram format
instance FromJSON Update where
parseJSON (Object v) =do
ct::Integer<-v .: "time"
Update <$>
v .: "subscription_id" <*>
v .: "object" <*>
v .: "object_id" <*>
v .: "changed_aspect" <*>
pure (fromIntegral ct)
parseJSON _= fail "Update"
-- | Tag Name
type TagName = Text
-- | a Tag
data Tag = Tag {
tName :: TagName,
tMediaCount :: Integer
}
deriving (Show,Read,Eq,Ord,Typeable)
-- | to json as per Instagram format
instance ToJSON Tag where
toJSON t=object ["name" .= tName t,"media_count" .= tMediaCount t]
-- | from json as per Instagram format
instance FromJSON Tag where
parseJSON (Object v) = Tag <$>
v .: "name" <*>
v .:? "media_count" .!= 0
parseJSON _= fail "Tag"
-- | outgoing relationship status
data OutgoingStatus = Follows | Requested | OutNone
deriving (Show,Read,Eq,Ord,Bounded,Enum,Typeable)
-- | to json as per Instagram format
instance ToJSON OutgoingStatus where
toJSON Follows = String "follows"
toJSON Requested = String "requested"
toJSON OutNone = String "none"
-- | from json as per Instagram format
instance FromJSON OutgoingStatus where
parseJSON (String "follows")=pure Follows
parseJSON (String "requested")=pure Requested
parseJSON (String "none")=pure OutNone
parseJSON _= fail "OutgoingStatus"
-- | incoming relationship status
data IncomingStatus = FollowedBy | RequestedBy | BlockedByYou | InNone
deriving (Show,Read,Eq,Ord,Bounded,Enum,Typeable)
-- | to json as per Instagram format
instance ToJSON IncomingStatus where
toJSON FollowedBy = String "followed_by"
toJSON RequestedBy = String "requested_by"
toJSON BlockedByYou = String "blocked_by_you"
toJSON InNone = String "none"
-- | from json as per Instagram format
instance FromJSON IncomingStatus where
parseJSON (String "followed_by")=pure FollowedBy
parseJSON (String "requested_by")=pure RequestedBy
parseJSON (String "blocked_by_you")=pure BlockedByYou
parseJSON (String "none")=pure InNone
parseJSON _= fail "IncomingStatus"
-- | a relationship between two users
data Relationship = Relationship {
rOutgoing :: OutgoingStatus
,rIncoming :: IncomingStatus
,rTargetUserPrivate :: Bool -- ^ not present in doc
}
deriving (Show,Read,Eq,Ord,Typeable)
-- | to json as per Instagram format
instance ToJSON Relationship where
toJSON r=object ["outgoing_status" .= rOutgoing r,"incoming_status" .= rIncoming r,"target_user_is_private" .= rTargetUserPrivate r]
-- | from json as per Instagram format
instance FromJSON Relationship where
parseJSON (Object v) = Relationship <$>
v .:? "outgoing_status" .!= OutNone <*>
v .:? "incoming_status" .!= InNone <*>
v .:? "target_user_is_private" .!= False
parseJSON _= fail "Relationship"
-- | Instagram returns data:null for nothing, but Aeson considers that () maps to an empty array...
-- so we model the fact that we expect null via NoResult
data NoResult = NoResult
deriving (Show,Read,Eq,Ord,Typeable)
-- | to json as per Instagram format
instance ToJSON NoResult where
toJSON _=Null
-- | from json as per Instagram format
instance FromJSON NoResult where
parseJSON Null = pure NoResult
parseJSON _= fail "NoResult"
-- | geography ID
type GeographyID = Text
|
prowdsponsor/ig
|
src/Instagram/Types.hs
|
bsd-3-clause
| 19,817 | 0 | 39 | 5,295 | 5,148 | 2,763 | 2,385 | 443 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : ForSyDe.Shallow.Utility.PolyArith
-- Copyright : (c) ForSyDe Group, KTH 2007-2008
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- This is the polynomial arithematic library. The arithematic operations include
-- addition, multiplication, division and power. However, the computation time is
-- not optimized for multiplication and is O(n2), which could be considered to be
-- optimized by FFT algorithms later on.
-----------------------------------------------------------------------------
module ForSyDe.Shallow.Utility.PolyArith(
-- *Polynomial data type
Poly(..),
-- *Addition, DmMultiplication, division and power operations
addPoly, mulPoly, divPoly, powerPoly,
-- *Some helper functions
getCoef, scalePoly, addPolyCoef, subPolyCoef, scalePolyCoef
)
where
-- |Polynomial data type.
data Poly a = Poly [a]
| PolyPair (Poly a, Poly a) deriving (Eq)
-- |Multiplication operation of polynomials.
mulPoly :: Num a => Poly a -> Poly a -> Poly a
mulPoly (Poly []) _ = Poly []
mulPoly _ (Poly []) = Poly []
-- Here is the O(n^2) version of polynomial multiplication
mulPoly (Poly xs) (Poly ys) = Poly $ foldr (\y zs ->
let (v:vs) = scalePolyCoef y xs in v :addPolyCoef vs zs) [] ys
mulPoly (PolyPair (a, b)) (PolyPair (c, d)) =
PolyPair (mulPoly a c, mulPoly b d)
mulPoly (PolyPair (a, b)) (Poly c) =
PolyPair (mulPoly a (Poly c), b)
mulPoly (Poly c) (PolyPair (a, b)) =
mulPoly (PolyPair (a, b)) (Poly c)
-- |Division operation of polynomials.
divPoly :: Num a => Poly a -> Poly a -> Poly a
divPoly (Poly a) (Poly b) = PolyPair (Poly a,Poly b)
divPoly (PolyPair (a, b)) (PolyPair (c, d)) =
mulPoly (PolyPair (a, b)) (PolyPair (d, c))
divPoly (PolyPair (a, b)) (Poly c) =
PolyPair (a, mulPoly b (Poly c))
divPoly (Poly c) (PolyPair (a, b)) =
PolyPair (mulPoly b (Poly c), a)
-- |Addition operations of polynomials.
addPoly :: (Num a, Eq a) => Poly a -> Poly a -> Poly a
addPoly (Poly a) (Poly b) = Poly $ addPolyCoef a b
addPoly (PolyPair (a, b)) (PolyPair (c, d)) =
if b==d then -- simplifyPolyPair $
PolyPair (addPoly a c, d)
else -- simplifyPolyPair $
PolyPair (dividedPoly, divisorPoly)
where
divisorPoly = if b ==d then b else mulPoly b d
dividedPoly = if b == d then addPoly a c
else addPoly (mulPoly a d) (mulPoly b c)
addPoly (Poly a) (PolyPair (c, d) ) =
addPoly (PolyPair (multiPolyHelper, d)) (PolyPair (c,d) )
where
multiPolyHelper = mulPoly (Poly a) d
addPoly abPoly@(PolyPair _) cPoly@(Poly _) = addPoly cPoly abPoly
-- |Power operation of polynomials.
powerPoly :: Num a => Poly a -> Int -> Poly a
powerPoly p n = powerX' (Poly [1]) p n
where
powerX' :: Num a => Poly a -> Poly a -> Int -> Poly a
powerX' p' _ 0 = p'
powerX' p' p n = powerX' (mulPoly p' p) p (n-1)
-- |Some helper functions below.
-- |To get the coefficients of the polynomial.
getCoef :: Num a => Poly a -> ([a],[a])
getCoef (Poly xs) = (xs,[1])
getCoef (PolyPair (Poly xs,Poly ys)) = (xs,ys)
getCoef _ = error "getCoef: Nested fractions found"
scalePoly :: (Num a) => a -> Poly a -> Poly a
scalePoly s p = mulPoly (Poly [s]) p
addPolyCoef :: Num a => [a] -> [a] -> [a]
addPolyCoef = zipWithExt (0,0) (+)
subPolyCoef :: RealFloat a => [a] -> [a] -> [a]
subPolyCoef = zipWithExt (0,0) (-)
scalePolyCoef :: (Num a) => a -> [a] -> [a]
scalePolyCoef s p = map (s*) p
-- |Extended version of 'zipWith', which will add zeros to the shorter list.
zipWithExt :: (a,b) -> (a -> b -> c) -> [a] -> [b] -> [c]
zipWithExt _ _ [] [] = []
zipWithExt (x0,y0) f (x:xs) [] = f x y0 : (zipWithExt (x0,y0) f xs [])
zipWithExt (x0,y0) f [] (y:ys) = f x0 y : (zipWithExt (x0,y0) f [] ys)
zipWithExt (x0,y0) f (x:xs) (y:ys) = f x y : (zipWithExt (x0,y0) f xs ys)
|
forsyde/forsyde-shallow
|
src/ForSyDe/Shallow/Utility/PolyArith.hs
|
bsd-3-clause
| 3,987 | 0 | 14 | 840 | 1,619 | 866 | 753 | 60 | 4 |
module System.Win32.DHCP.CLIENT_UID
( CLIENT_UID (..)
, clientUid
, macCuid
, macCuidDrop5
, withMac
) where
import Foreign
import System.Win32.Types
import Data.Mac
import System.Win32.DHCP.DhcpStructure
import System.Win32.DHCP.LengthBuffer
-- typedef struct _DHCP_BINARY_DATA {
-- DWORD DataLength;
-- BYTE *Data;
-- } DHCP_BINARY_DATA, *LPDHCP_BINARY_DATA, DHCP_CLIENT_UID;
-- Byte 0 - 3: The result of a binary AND on the IP address and the subnet
-- mask in reverse order.
-- Byte 4: Hardware identifier. This value is always 0x01.
-- Byte 5 - 10: The Mac address of the client.
newtype CLIENT_UID = CLIENT_UID (LengthBuffer BYTE)
unwrap :: CLIENT_UID -> LengthBuffer BYTE
unwrap (CLIENT_UID uid) = uid
clientUid :: DhcpStructure CLIENT_UID
clientUid = newtypeDhcpStructure CLIENT_UID unwrap
$ lengthBuffer (basicDhcpArray storableDhcpStructure)
-- |Functions returning a CLIENT_UID often have the first 5 bytes hold
-- information about the subnet being used. Microsoft does not document this,
-- but it can be determined through experimentation.
macCuidDrop5 :: CLIENT_UID -> Mac
macCuidDrop5 (CLIENT_UID (LengthBuffer _ bytes)) = fromOctets a b c d e f
where
[a, b, c, d, e, f] = drop 5 bytes
macCuid :: CLIENT_UID -> Mac
macCuid (CLIENT_UID (LengthBuffer _ bytes)) = fromOctets a b c d e f
where
[a, b, c, d, e, f] = bytes
fromMac :: Mac -> CLIENT_UID
fromMac mac = CLIENT_UID (LengthBuffer 6 [a, b, c, d, e, f])
where
(a, b, c, d, e, f) = toOctets mac
-- |When creating CLIENT_UID structures in memory we only need 6 bytes
-- representing the Mac address. This is contrary to MSDN documentation, which
-- states that there should be 11 bytes, with the first 5 being constructed
-- from the IP and subnet.
withMac :: Mac -> (Ptr CLIENT_UID -> IO b) -> IO b
withMac mac f = withDhcp clientUid (fromMac mac) f
|
mikesteele81/Win32-dhcp-server
|
src/System/Win32/DHCP/CLIENT_UID.hs
|
bsd-3-clause
| 1,891 | 0 | 9 | 368 | 415 | 236 | 179 | 28 | 1 |
module ContextFilter (filterContext) where
filterContext ::
Int {- ^ context before -} ->
Int {- ^ context after -} ->
(a -> Bool) {- ^ predicate -} ->
[a] {- ^ inputs -} ->
[a] {- ^ matches with context -}
filterContext before after p xs
| before < 0 = error "filterContext: bad before"
| after < 0 = error "filterContext: bad after"
| otherwise = selectList (dropSelection before selects) xs
where
width = before + after
selects = go 0 xs
go n [] = replicateKeep n
go n (y : ys)
| p y = Keep (go width ys)
| n > 0 = Keep (go (n - 1) ys)
| otherwise = Skip (go n ys)
data Selection = End | Keep Selection | Skip Selection
deriving (Show)
replicateKeep :: Int -> Selection
replicateKeep 0 = End
replicateKeep i = Keep (replicateKeep (i - 1))
dropSelection :: Int -> Selection -> Selection
dropSelection 0 x = x
dropSelection _ End = End
dropSelection i (Keep x) = dropSelection (i - 1) x
dropSelection i (Skip x) = dropSelection (i - 1) x
selectList :: Selection -> [a] -> [a]
selectList (Keep x) (y : ys) = y : selectList x ys
selectList (Skip x) (_ : ys) = selectList x ys
selectList _ _ = []
|
glguy/irc-core
|
src/ContextFilter.hs
|
isc
| 1,274 | 0 | 12 | 399 | 482 | 245 | 237 | 32 | 2 |
{-# LANGUAGE FlexibleInstances, OverloadedStrings, FlexibleContexts #-}
{-|
Module : VizHaskell.TreeRepresentation
Description : Representation of all the types that can be shown as a tree.
This module defines the 'TreeRepresentation' class, with allows one to define
a data type as being representable by a tree.
-}
module VizHaskell.TreeRepresentation(
TreeRepresentable(..),
RepresentationTree(..)
) where
import VizHaskell.Core
import Data.Aeson
import Data.Aeson.Types(Pair)
import Data.String
import qualified Data.Text
import qualified Data.Vector as V
{-|
Class defining all the datatypes that may be represented
as a tree.
-}
class TreeRepresentable t where
{-|
It returns the contents of a node.
-}
contents :: t b -> Maybe b
{-|
It returns the children of a node.
-}
children :: t b -> [t b]
{-|
Additional info attached to the given node.
Unless explicitly overwritten by concrete representations,
it defaults to @Nothing@.
-}
label :: t b -> Maybe String
{-|
CSS style being applied to the
given node. Unless explicitly overwritten by concrete representations,
it defaults to @Nothing@.
-}
className :: t b -> Maybe String
label _ = Nothing
className _ = Nothing
{-|
This representation type specifies that the value associated with
this representation (see 'VizHaskell.Core.RPair') must be shown as
a tree. It receives another representation type, which refers to
the representation type of the values contained within the nodes.
-}
data RepresentationTree rep = RepresentationTree rep
instance Representation rep => Representation (RepresentationTree rep)
{-|
All the types implementing 'TreeRepresentable' are representable with
a 'RepresentationTree', provided their nodes are also representable.
-}
instance (TreeRepresentable t, Representation rep, VizRepresentable (RPair rep b))
=> VizRepresentable (RPair (RepresentationTree rep) (t b)) where
vizToJSON rpair =
case contents repValue of
Nothing -> object [ "value" .= object [] , "children" .= Array (V.empty), "repType" .= String "tree"]
Just val -> object ([
"repType" .= String "tree",
"value" .= vizToJSON (buildRPair repNodes val),
"children" .= Array (V.fromList
(map (vizToJSON . buildRPair rep) (children repValue)))
] ++ showIfMaybe "label" (label repValue)
++ showIfMaybe "className" (className repValue))
where rep@(RepresentationTree repNodes) = rPairRepresentation rpair
repValue = rPairValue rpair
showIfMaybe :: Data.Text.Text -> Maybe String -> [Pair]
showIfMaybe attr Nothing = []
showIfMaybe attr (Just str) = [attr .= String (fromString str)]
|
robarago/vizhaskell
|
hs/CoreRepresentacion/VizHaskell/TreeRepresentation.hs
|
gpl-2.0
| 2,919 | 0 | 22 | 744 | 516 | 270 | 246 | 36 | 0 |
{-| Utilities related to livelocks and death detection
-}
{-
Copyright (C) 2014 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Utils.Livelock
( Livelock
, mkLivelockFile
, isDead
) where
import qualified Control.Exception as E
import Control.Monad
import Control.Monad.Error
import System.Directory (doesFileExist)
import System.IO
import System.Posix.IO
import System.Posix.Types (Fd)
import System.Time (ClockTime(..), getClockTime)
import Ganeti.BasicTypes
import Ganeti.Logging
import Ganeti.Path (livelockFile)
import Ganeti.Utils (lockFile)
type Livelock = FilePath
-- | Appends the current time to the given prefix, creates
-- the lockfile in the appropriate directory, and locks it.
-- Returns its full path and the file's file descriptor.
mkLivelockFile :: (Error e, MonadError e m, MonadIO m)
=> FilePath -> m (Fd, Livelock)
mkLivelockFile prefix = do
(TOD secs _) <- liftIO getClockTime
lockfile <- liftIO . livelockFile $ prefix ++ "_" ++ show secs
fd <- liftIO (lockFile lockfile) >>= \r -> case r of
Bad msg -> failError $ "Locking the livelock file " ++ lockfile
++ ": " ++ msg
Ok fd -> return fd
return (fd, lockfile)
-- | Detect whether a the process identified by the given path
-- does not exist any more. This function never fails and only
-- returns True if it has positive knowledge that the process
-- does not exist any more (i.e., if it managed successfully
-- obtain a shared lock on the file).
isDead :: Livelock -> IO Bool
isDead fpath = fmap (isOk :: Result () -> Bool) . runResultT . liftIO $ do
filepresent <- doesFileExist fpath
when filepresent
. E.bracket (openFd fpath ReadOnly Nothing defaultFileFlags) closeFd
$ \fd -> do
logDebug $ "Attempting to get a lock of " ++ fpath
setLock fd (ReadLock, AbsoluteSeek, 0, 0)
logDebug "Got the lock, the process is dead"
|
ribag/ganeti-experiments
|
src/Ganeti/Utils/Livelock.hs
|
gpl-2.0
| 2,647 | 0 | 16 | 589 | 438 | 236 | 202 | 36 | 2 |
{-# LANGUAGE ForeignFunctionInterface #-}
module Data.PHash.Image ( imageHash ) where
import Control.Applicative
import Foreign
import Foreign.C.String
import Foreign.C.Types
import Prelude
import Data.PHash.Types
-- $setup
-- >>> let imgPath = "test/fixtures/grump.jpg"
-- >>> let bogusPath = "bogus"
{-|
Obtain the hash of an image. Returns Nothing on failure. pHash's API does
not provide any error information when this fails, but CImg may dump
something to stderr.
Examples:
>>> import Data.PHash
>>> imageHash imgPath
Just (PHash 17549625427362946731)
-}
imageHash :: FilePath -> IO (Maybe PHash)
imageHash path = withCString path $ \cs ->
with startingPhash $ \pHPtr -> do
res <- c_ph_dct_imagehash cs pHPtr
if success res
then Just . fromCPHash <$> peek pHPtr
else return Nothing
where startingPhash = CULong 0
success (CInt (-1)) = False
success _ = True
foreign import ccall "pHash.h ph_dct_imagehash" c_ph_dct_imagehash :: CString -> Ptr CULong -> IO CInt
|
bitemyapp/phash
|
src/Data/PHash/Image.hs
|
gpl-3.0
| 1,085 | 0 | 13 | 261 | 199 | 107 | 92 | 19 | 3 |
{-# LANGUAGE OverloadedStrings #-}
-- Module : Test.AWS.CodeDeploy
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Test.AWS.CodeDeploy
( tests
, fixtures
) where
import Network.AWS.CodeDeploy
import Test.AWS.Gen.CodeDeploy
import Test.Tasty
tests :: [TestTree]
tests = []
fixtures :: [TestTree]
fixtures = []
|
fmapfmapfmap/amazonka
|
amazonka-codedeploy/test/Test/AWS/CodeDeploy.hs
|
mpl-2.0
| 752 | 0 | 5 | 201 | 73 | 50 | 23 | 11 | 1 |
module Baz (
module X
) where
import Ignore as X
import Foo as X
|
carymrobbins/intellij-haskforce
|
tests/gold/resolve/Module00003/Baz.hs
|
apache-2.0
| 72 | 0 | 4 | 22 | 20 | 15 | 5 | 4 | 0 |
-- http://www.codewars.com/kata/5541f58a944b85ce6d00006a
module Codewars.Kata.Fib where
productFib :: Integer -> (Integer, Integer, Bool)
productFib n = wrap . head . dropWhile ((< n) . uncurry (*)) $ fibs where
fib = 1 : 1 : zipWith (+) fib (tail fib)
fibs = zip fib (tail fib)
wrap (a, b) = (a, b, a * b == n)
|
Bodigrim/katas
|
src/haskell/B-Product-of-consecutive-Fib-numbers.hs
|
bsd-2-clause
| 322 | 0 | 10 | 66 | 142 | 80 | 62 | 6 | 1 |
{-# LANGUAGE DeriveDataTypeable, TypeFamilies, TemplateHaskell,
FlexibleInstances, FlexibleContexts, MultiParamTypeClasses,
TypeOperators, TypeSynonymInstances #-}
module Distribution.Server.Packages.Downloads where
import Distribution.Server.Framework.Instances ()
import Distribution.Package
import Distribution.Version
import Data.Acid
import Data.SafeCopy (base, deriveSafeCopy)
import Data.Time.Calendar
import Data.Typeable (Typeable)
import Data.Map (Map)
import Data.Maybe (fromMaybe)
import qualified Data.Map as Map
import Control.Monad.State (put, get)
import Control.Monad.Reader (ask, asks)
import Control.DeepSeq
-----------------------------------------
-- DownloadCounts is where the download records are converted to an historical
-- format at leisure
data DownloadCounts = DownloadCounts {
totalDownloads :: Int,
downloadMap :: Map PackageName DownloadInfo
} deriving (Eq, Show, Typeable)
emptyDownloadCounts :: DownloadCounts
emptyDownloadCounts = DownloadCounts 0 Map.empty
data DownloadInfo = DownloadInfo {
monthDownloads :: Map (Int, Int) PackageDownloads,
dayDownloads :: Map Day PackageDownloads,
packageDownloads :: PackageDownloads
} deriving (Eq, Show, Typeable)
emptyDownloadInfo :: DownloadInfo
emptyDownloadInfo = DownloadInfo Map.empty Map.empty emptyPackageDownloads
data PackageDownloads = PackageDownloads {
allDownloads :: Int,
versionDownloads :: Map Version Int
} deriving (Eq, Show, Typeable)
emptyPackageDownloads :: PackageDownloads
emptyPackageDownloads = PackageDownloads 0 Map.empty
packageDowns :: DownloadInfo -> Int
packageDowns = allDownloads . packageDownloads
lookupPackageDowns :: DownloadCounts -> PackageName -> Int
lookupPackageDowns dcs pkgname = maybe 0 packageDowns $ Map.lookup pkgname (downloadMap dcs)
incrementCounts :: Day -> PackageName -> Version -> Int -> DownloadCounts -> DownloadCounts
incrementCounts day pkgname version count (DownloadCounts total perPackage) =
DownloadCounts
(total + count)
(adjustFrom (incrementInfo day version count) pkgname emptyDownloadInfo perPackage)
incrementInfo :: Day -> Version -> Int -> DownloadInfo -> DownloadInfo
incrementInfo day version count (DownloadInfo perMonth perDay total) =
DownloadInfo
(adjustFrom (incrementPackage version count) (fromIntegral year, month) emptyPackageDownloads perMonth)
(adjustFrom (incrementPackage version count) day emptyPackageDownloads perDay)
(incrementPackage version count total)
where
(year, month, _) = toGregorian day
incrementPackage :: Version -> Int -> PackageDownloads -> PackageDownloads
incrementPackage version count (PackageDownloads total perVersion) =
PackageDownloads (total + count) (adjustFrom (+count) version 0 perVersion)
adjustFrom :: Ord k => (a -> a) -> k -> a -> Map k a -> Map k a
adjustFrom func key value = Map.alter (Just . func . fromMaybe value) key
----
replacePackageDownloads :: DownloadCounts -> Update DownloadCounts ()
replacePackageDownloads = put
registerDownload :: Day -> PackageId -> Int -> Update DownloadCounts (Int, Int)
registerDownload day pkgid count = do
dc <- get
let pkgname = packageName pkgid
dc' = incrementCounts day pkgname (packageVersion pkgid) count dc
put dc'
return (lookupPackageDowns dc pkgname, lookupPackageDowns dc' pkgname)
getDownloadCounts :: Query DownloadCounts DownloadCounts
getDownloadCounts = ask
getDownloadInfo :: PackageName -> Query DownloadCounts DownloadInfo
getDownloadInfo pkgname = asks (Map.findWithDefault emptyDownloadInfo pkgname . downloadMap)
--------------------------------------------------------------------------------
$(deriveSafeCopy 0 'base ''DownloadCounts)
$(deriveSafeCopy 0 'base ''DownloadInfo)
$(deriveSafeCopy 0 'base ''PackageDownloads)
instance NFData PackageDownloads where
rnf (PackageDownloads a b) = rnf a `seq` rnf b
instance NFData DownloadInfo where
rnf (DownloadInfo a b c) = rnf a `seq` rnf b `seq` rnf c
instance NFData DownloadCounts where
rnf (DownloadCounts a b) = rnf a `seq` rnf b
initialDownloadCounts :: DownloadCounts
initialDownloadCounts = emptyDownloadCounts
$(makeAcidic ''DownloadCounts ['replacePackageDownloads
,'registerDownload
,'getDownloadCounts
,'getDownloadInfo
])
|
isomorphism/hackage2
|
Distribution/Server/Packages/Downloads.hs
|
bsd-3-clause
| 4,426 | 0 | 12 | 756 | 1,138 | 604 | 534 | 85 | 1 |
findPrefix :: String -> String -> String
findPrefix [] _ = []
findPrefix _ [] = []
findPrefix (x:xs) (y:ys) = if x == y then x : findPrefix xs ys
else []
solve :: String -> String -> [String]
solve x y = [prefix, (drop l x), (drop l y)]
where prefix = findPrefix x y
l = length prefix
withLength :: String -> String
withLength x = (show $ length x) ++ " " ++ x
main :: IO()
main = do
x <- getLine
y <- getLine
putStr $ unlines $ map withLength $ solve x y
|
EdisonAlgorithms/HackerRank
|
practice/fp/recursion/prefix-compression/prefix-compression.hs
|
mit
| 528 | 0 | 9 | 172 | 247 | 126 | 121 | 16 | 2 |
module Uni.SS04.Serie2 where
-- $Id$
import RAM
import RAM.Example
import RAM.Check
import RAM.Builtin
import Prime (prime)
import Sets (mkSet,union)
-- import Turing_Fun
import Wort
import ToDoc
import Random
import Array
import Data.List (inits)
import qualified Machine.Numerical.Type as N
import qualified Machine.Numerical.Inter as NI
import qualified Machine.Clock.Type as C
import qualified Machine.Clock.Inter as CI
import qualified Inter.Types as T
import Inter.Wrapper
-- Helper
-- Forderung an alle Maschinen
checkforall m = do
return ()
stdbuiltins = mkSet [ RAM.Builtin.Copy , RAM.Builtin.Plus , RAM.Builtin.Minus ]
-- ===========================================================
-- LOOP
-- ============================================================
loopexample = -- Wirkung: x0 := 2 + ( x1 + x1 )
[ Builtin { name = Plus , res = "x0" , args = ["x1","x1"] }
, Inc "zwei"
, Inc "zwei"
, Loop "zwei" [ Inc "x0" ] ]
conformloop bs prog =
do
loopy prog
builtins bs prog
loopfib, loopprim, loopsqrt:: IO ( T.Var N.Computer
( N.Type Program )
( Program )
)
-- -----------------------------------------------------------
-- LOOP-FIB
-- Builtin = None
-- -----------------------------------------------------------
-- Helper für loopfib
top :: Integer
top = 15
-- fibs = Cache-Array mit Fibonacci Zahlen bis top
fibs :: Array Integer Integer
fibs = array ( 0, top ) $ ( 0, 0) : ( 1, 1 ) : do
k <- [ 2 .. top ]
return ( k, fibs ! (k-1) + fibs ! (k-2) )
fibtestliste :: IO [[Integer]]
fibtestliste = sequence $ replicate 10 $ do
xy <- sequence $ replicate 1 $ randomRIO (0, top)
return xy
loopfib = do
tests <- fibtestliste
let it = N.Make
{ N.fun_info = text "x0 := fib( x1 )"
, N.fun = \ [ x ] -> fibs ! x
, N.args = tests
, N.cut = 10000
, N.check = conformloop stdbuiltins -- RAM.Builtin.every --RAM.Builtin.none
, N.start = loopexample
}
return $ NI.computer "LOOP" "FIB" it
-- -----------------------------------------------------------
-- LOOP-PRIM
-- Builtin = Mod
-- -----------------------------------------------------------
nums :: IO [Integer]
nums = do
let fixed = [ 0 .. 10] ++ [ 23 , 101 ]
rnds <- sequence $ replicate 10 $ randomRIO ( 10, 100 )
return $ fixed ++ rnds
iolooptests :: IO [[Integer]]
iolooptests = do
xs <- nums
return $ map return xs
loopprim = do
tests <- iolooptests
let it = N.Make
{ N.fun_info = text "x0 := if istPrimzahl( x1 ) then 1 else 0"
, N.fun = \ [x] -> if prime x then 1 else 0
, N.args = tests
, N.cut = 10000
, N.check = conformloop $ union stdbuiltins $ mkSet [ RAM.Builtin.Mod ]
, N.start = loopexample
}
return $ NI.computer "LOOP" "PRIM" it
-- -----------------------------------------------------------
-- LOOP-SQRT
-- Builtin = Times
-- -----------------------------------------------------------
loopsqrt = do
tests <- iolooptests
let it = N.Make
{ N.fun_info = text "x0 := abgerundete Quadratwurzel von x1"
, N.fun = \ [ x ] -> truncate $ sqrt $ fromIntegral x
, N.args = tests
, N.cut = 10000
, N.check = conformloop $ union stdbuiltins $ mkSet [ RAM.Builtin.Times ]
, N.start = loopexample
}
return $ NI.computer "LOOP" "SQRT" it
-- ===========================================================
-- WHILE
-- ============================================================
conformwhile bs prog =
do
builtins bs prog
whileexample = [ While "x1" [ Loop "x2" [ Inc "x0" ],Dec "x1"]]
-- -----------------------------------------------------------
-- WHILE-DIV
-- Builtin = ?
-- -----------------------------------------------------------
whilediv = do
tests <- iowhiletests
let it = N.Make
{ N.fun_info = text "x0 := x1 div x2"
, N.fun = \ [ x1 , x2 ] -> x1 `div` x2
, N.args = tests --foldr1 (++) [ [x1 , x2] | x1 <- [1 .. 10] ++ [ 21 .. 23] , x2 <- [1 .. 7] ]
, N.cut = 10000
, N.check = conformwhile stdbuiltins
, N.start = whileexample
}
return $ NI.computer "WHILE" "DIV" it
-- -----------------------------------------------------------
-- WHILE-MOD
-- Builtin = ?
-- -----------------------------------------------------------
iowhiletests :: IO [[Integer]]
iowhiletests =
sequence $ replicate 10 $ do
xy <- sequence $ replicate 2 $ randomRIO (1,20)
return xy
-- let xs = foldr1 (++) [ [ x1 , x2 ] | x1 <- [1 .. 10] ++ [ 21 .. 23] , x2 <- [1 .. 7] ]
-- return $ map return xs
whilemod = do
tests <- iowhiletests
let it = N.Make
{ N.fun_info = text "x0 := x1 mod x2"
, N.fun = \ [ x1 , x2 ] -> x1 `mod` x2
, N.args = tests -- foldr1 (++) [ [ x1 , x2 ] | x1 <- [1 .. 10] ++ [ 21 .. 23] , x2 <- [1 .. 7] ]
, N.cut = 10000
, N.check = conformwhile stdbuiltins
, N.start = whileexample
}
return $ NI.computer "WHILE" "MOD" it
-- -----------------------------------------------------------
generate :: [ IO T.Variant ]
generate =
[ do i <- loopfib ; return $ T.Variant i
, do i <- loopprim ; return $ T.Variant i
, do i <- loopsqrt ; return $ T.Variant i
, do i <- whilemod ; return $ T.Variant i
, do i <- whilediv ; return $ T.Variant i
]
-- -- --------------------
-- tmstep2 :: IO ( T.Var C.Clock
-- ( C.Type ( Program ) )
-- ( Program )
-- )
-- -- ---------------------------------------
-- -- Maschinen mit Schrittbeschränkungen
-- -- ---------------------------------------
-- tmstep2 = do
-- let it = C.Make
-- { C.fun = \ n -> (2*n) + 3
-- , C.fun_info = text "\\ n -> 2n + 3"
-- , C.args = [ 0 .. 7 ] ++ [ 13 ]
-- , C.cut = 1000
-- , C.check = checkforall
-- , C.start = stepexample
-- }
-- return $ CI.clock "RAM" "Linear" it
-- -- Beispiel für diese Maschinen
-- stepexample =
-- [ Inc "x0"
-- , Loop "x1" [ Loop "x0" [ Inc "x0" ] ]
-- ]
|
florianpilz/autotool
|
src/Uni/SS04/Serie2.hs
|
gpl-2.0
| 6,513 | 8 | 15 | 1,978 | 1,476 | 829 | 647 | 118 | 2 |
{-# LANGUAGE CPP #-}
#define dummy -- just to ensure cpp gets called on this file
-- | This is just a convenient way of bunching the XML combinators
-- together with some other things you are likely to want at the
-- same time.
module Text.XML.HaXml
( module Text.XML.HaXml.Types
, module Text.XML.HaXml.Combinators
, module Text.XML.HaXml.Parse
, module Text.XML.HaXml.Pretty
, module Text.XML.HaXml.Html.Generate
, module Text.XML.HaXml.Html.Parse
, module Text.XML.HaXml.Validate
, module Text.XML.HaXml.Wrappers
, module Text.XML.HaXml.Verbatim
, module Text.XML.HaXml.Escape
, render
, version
) where
import Text.XML.HaXml.Types
import Text.XML.HaXml.Combinators
import Text.XML.HaXml.Parse (xmlParse,dtdParse)
import Text.XML.HaXml.Pretty (element)
import Text.XML.HaXml.Html.Generate
import Text.XML.HaXml.Html.Parse (htmlParse)
import Text.XML.HaXml.Validate (validate)
import Text.XML.HaXml.Wrappers (fix2Args,processXmlWith)
import Text.XML.HaXml.Verbatim
import Text.XML.HaXml.Escape
import Text.PrettyPrint.HughesPJ (render)
-- | The version of the library.
version :: String
version = VERSION
-- expect cpp to fill in value
|
alexbaluta/courseography
|
dependencies/HaXml-1.25.3/src/Text/XML/HaXml.hs
|
gpl-3.0
| 1,193 | 0 | 5 | 186 | 228 | 163 | 65 | 27 | 1 |
{-# LANGUAGE GADTs #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module T6124 where
newtype A = MkA Int
newtype B = MkB Char
data T a where
A :: T A
B :: T B
f :: T A -> A
f A = undefined
|
snoyberg/ghc
|
testsuite/tests/pmcheck/should_compile/T6124.hs
|
bsd-3-clause
| 205 | 0 | 6 | 55 | 63 | 37 | 26 | 10 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
-- | It is well known that fully parallel loops can always be
-- interchanged inwards with a sequential loop. This module
-- implements that transformation.
--
-- This is also where we implement loop-switching (for branches),
-- which is semantically similar to interchange.
module Futhark.Pass.ExtractKernels.Interchange
( SeqLoop (..),
interchangeLoops,
Branch (..),
interchangeBranch,
WithAccStm (..),
interchangeWithAcc,
)
where
import Control.Monad.Identity
import Data.List (find)
import Data.Maybe
import Futhark.IR.SOACS
import Futhark.MonadFreshNames
import Futhark.Pass.ExtractKernels.Distribution
( KernelNest,
LoopNesting (..),
kernelNestLoops,
scopeOfKernelNest,
)
import Futhark.Tools
import Futhark.Transform.Rename
import Futhark.Util (splitFromEnd)
-- | An encoding of a sequential do-loop with no existential context,
-- alongside its result pattern.
data SeqLoop
= SeqLoop [Int] (Pat Type) [(FParam SOACS, SubExp)] (LoopForm SOACS) (Body SOACS)
loopPerm :: SeqLoop -> [Int]
loopPerm (SeqLoop perm _ _ _ _) = perm
seqLoopStm :: SeqLoop -> Stm SOACS
seqLoopStm (SeqLoop _ pat merge form body) =
Let pat (defAux ()) $ DoLoop merge form body
interchangeLoop ::
(MonadBuilder m, LocalScope SOACS m) =>
(VName -> Maybe VName) ->
SeqLoop ->
LoopNesting ->
m SeqLoop
interchangeLoop
isMapParameter
(SeqLoop perm loop_pat merge form body)
(MapNesting pat aux w params_and_arrs) = do
merge_expanded <-
localScope (scopeOfLParams $ map fst params_and_arrs) $
mapM expand merge
let loop_pat_expanded =
Pat $ map expandPatElem $ patElems loop_pat
new_params =
[Param attrs pname $ fromDecl ptype | (Param attrs pname ptype, _) <- merge]
new_arrs = map (paramName . fst) merge_expanded
rettype = map rowType $ patTypes loop_pat_expanded
-- If the map consumes something that is bound outside the loop
-- (i.e. is not a merge parameter), we have to copy() it. As a
-- small simplification, we just remove the parameter outright if
-- it is not used anymore. This might happen if the parameter was
-- used just as the inital value of a merge parameter.
((params', arrs'), pre_copy_stms) <-
runBuilder $
localScope (scopeOfLParams new_params) $
unzip . catMaybes <$> mapM copyOrRemoveParam params_and_arrs
let lam = Lambda (params' <> new_params) body rettype
map_stm =
Let loop_pat_expanded aux $
Op $ Screma w (arrs' <> new_arrs) (mapSOAC lam)
res = varsRes $ patNames loop_pat_expanded
pat' = Pat $ rearrangeShape perm $ patElems pat
pure $
SeqLoop perm pat' merge_expanded form $
mkBody (pre_copy_stms <> oneStm map_stm) res
where
free_in_body = freeIn body
copyOrRemoveParam (param, arr)
| not (paramName param `nameIn` free_in_body) =
return Nothing
| otherwise =
return $ Just (param, arr)
expandedInit _ (Var v)
| Just arr <- isMapParameter v =
pure $ Var arr
expandedInit param_name se =
letSubExp (param_name <> "_expanded_init") $
BasicOp $ Replicate (Shape [w]) se
expand (merge_param, merge_init) = do
expanded_param <-
newParam (param_name <> "_expanded") $
-- FIXME: Unique here is a hack to make sure the copy from
-- makeCopyInitial is not prematurely simplified away.
-- It'd be better to fix this somewhere else...
arrayOf (paramDeclType merge_param) (Shape [w]) Unique
expanded_init <- expandedInit param_name merge_init
return (expanded_param, expanded_init)
where
param_name = baseString $ paramName merge_param
expandPatElem (PatElem name t) =
PatElem name $ arrayOfRow t w
-- We need to copy some initial arguments because otherwise the result
-- of the loop might alias the input (if the number of iterations is
-- 0), which is a problem if the result is consumed.
maybeCopyInitial ::
(MonadBuilder m) =>
(VName -> Bool) ->
SeqLoop ->
m SeqLoop
maybeCopyInitial isMapInput (SeqLoop perm loop_pat merge form body) =
SeqLoop perm loop_pat <$> mapM f merge <*> pure form <*> pure body
where
f (p, Var arg)
| isMapInput arg =
(p,) <$> letSubExp (baseString (paramName p) <> "_inter_copy") (BasicOp $ Copy arg)
f (p, arg) =
pure (p, arg)
manifestMaps :: [LoopNesting] -> [VName] -> Stms SOACS -> ([VName], Stms SOACS)
manifestMaps [] res stms = (res, stms)
manifestMaps (n : ns) res stms =
let (res', stms') = manifestMaps ns res stms
(params, arrs) = unzip $ loopNestingParamsAndArrs n
lam =
Lambda
params
(mkBody stms' $ varsRes res')
(map rowType $ patTypes (loopNestingPat n))
in ( patNames $ loopNestingPat n,
oneStm $
Let (loopNestingPat n) (loopNestingAux n) $
Op $ Screma (loopNestingWidth n) arrs (mapSOAC lam)
)
-- | Given a (parallel) map nesting and an inner sequential loop, move
-- the maps inside the sequential loop. The result is several
-- statements - one of these will be the loop, which will then contain
-- statements with @map@ expressions.
interchangeLoops ::
(MonadFreshNames m, HasScope SOACS m) =>
KernelNest ->
SeqLoop ->
m (Stms SOACS)
interchangeLoops full_nest = recurse (kernelNestLoops full_nest)
where
recurse nest loop
| (ns, [n]) <- splitFromEnd 1 nest = do
let isMapParameter v =
snd <$> find ((== v) . paramName . fst) (loopNestingParamsAndArrs n)
isMapInput v =
v `elem` map snd (loopNestingParamsAndArrs n)
(loop', stms) <-
runBuilder . localScope (scopeOfKernelNest full_nest) $
maybeCopyInitial isMapInput
=<< interchangeLoop isMapParameter loop n
-- Only safe to continue interchanging if we didn't need to add
-- any new statements; otherwise we manifest the remaining nests
-- as Maps and hand them back to the flattener.
if null stms
then recurse ns loop'
else
let loop_stm = seqLoopStm loop'
names = rearrangeShape (loopPerm loop') (patNames (stmPat loop_stm))
in pure $ snd $ manifestMaps ns names $ stms <> oneStm loop_stm
| otherwise = pure $ oneStm $ seqLoopStm loop
-- | An encoding of a branch with alongside its result pattern.
data Branch
= Branch [Int] (Pat Type) SubExp (Body SOACS) (Body SOACS) (IfDec (BranchType SOACS))
branchStm :: Branch -> Stm SOACS
branchStm (Branch _ pat cond tbranch fbranch ret) =
Let pat (defAux ()) $ If cond tbranch fbranch ret
interchangeBranch1 ::
(MonadBuilder m) =>
Branch ->
LoopNesting ->
m Branch
interchangeBranch1
(Branch perm branch_pat cond tbranch fbranch (IfDec ret if_sort))
(MapNesting pat aux w params_and_arrs) = do
let ret' = map (`arrayOfRow` Free w) ret
pat' = Pat $ rearrangeShape perm $ patElems pat
(params, arrs) = unzip params_and_arrs
lam_ret = rearrangeShape perm $ map rowType $ patTypes pat
branch_pat' =
Pat $ map (fmap (`arrayOfRow` w)) $ patElems branch_pat
mkBranch branch = (renameBody =<<) $ do
let lam = Lambda params branch lam_ret
res = varsRes $ patNames branch_pat'
map_stm = Let branch_pat' aux $ Op $ Screma w arrs $ mapSOAC lam
return $ mkBody (oneStm map_stm) res
tbranch' <- mkBranch tbranch
fbranch' <- mkBranch fbranch
return $
Branch [0 .. patSize pat -1] pat' cond tbranch' fbranch' $
IfDec ret' if_sort
interchangeBranch ::
(MonadFreshNames m, HasScope SOACS m) =>
KernelNest ->
Branch ->
m (Stms SOACS)
interchangeBranch nest loop = do
(loop', stms) <-
runBuilder $ foldM interchangeBranch1 loop $ reverse $ kernelNestLoops nest
return $ stms <> oneStm (branchStm loop')
-- | An encoding of a WithAcc with alongside its result pattern.
data WithAccStm
= WithAccStm [Int] (Pat Type) [(Shape, [VName], Maybe (Lambda SOACS, [SubExp]))] (Lambda SOACS)
withAccStm :: WithAccStm -> Stm SOACS
withAccStm (WithAccStm _ pat inputs lam) =
Let pat (defAux ()) $ WithAcc inputs lam
interchangeWithAcc1 ::
(MonadBuilder m, Rep m ~ SOACS) =>
WithAccStm ->
LoopNesting ->
m WithAccStm
interchangeWithAcc1
(WithAccStm perm _withacc_pat inputs acc_lam)
(MapNesting map_pat map_aux w params_and_arrs) = do
inputs' <- mapM onInput inputs
lam_params' <- newAccLamParams $ lambdaParams acc_lam
iota_p <- newParam "iota_p" $ Prim int64
acc_lam' <- trLam (Var (paramName iota_p)) <=< mkLambda lam_params' $ do
let acc_params = drop (length inputs) lam_params'
orig_acc_params = drop (length inputs) $ lambdaParams acc_lam
iota_w <-
letExp "acc_inter_iota" . BasicOp $
Iota w (intConst Int64 0) (intConst Int64 1) Int64
let (params, arrs) = unzip params_and_arrs
maplam_ret = lambdaReturnType acc_lam
maplam = Lambda (iota_p : orig_acc_params ++ params) (lambdaBody acc_lam) maplam_ret
auxing map_aux . fmap subExpsRes . letTupExp' "withacc_inter" $
Op $ Screma w (iota_w : map paramName acc_params ++ arrs) (mapSOAC maplam)
let pat = Pat $ rearrangeShape perm $ patElems map_pat
pure $ WithAccStm perm pat inputs' acc_lam'
where
newAccLamParams ps = do
let (cert_ps, acc_ps) = splitAt (length ps `div` 2) ps
-- Should not rename the certificates.
acc_ps' <- forM acc_ps $ \(Param attrs v t) ->
Param attrs <$> newVName (baseString v) <*> pure t
pure $ cert_ps <> acc_ps'
num_accs = length inputs
acc_certs = map paramName $ take num_accs $ lambdaParams acc_lam
onArr v =
pure . maybe v snd $
find ((== v) . paramName . fst) params_and_arrs
onInput (shape, arrs, op) =
(Shape [w] <> shape,,) <$> mapM onArr arrs <*> traverse onOp op
onOp (op_lam, nes) = do
-- We need to add an additional index parameter because we are
-- extending the index space of the accumulator.
idx_p <- newParam "idx" $ Prim int64
pure (op_lam {lambdaParams = idx_p : lambdaParams op_lam}, nes)
trType :: TypeBase shape u -> TypeBase shape u
trType (Acc acc ispace ts u)
| acc `elem` acc_certs =
Acc acc (Shape [w] <> ispace) ts u
trType t = t
trParam :: Param (TypeBase shape u) -> Param (TypeBase shape u)
trParam = fmap trType
trLam i (Lambda params body ret) =
localScope (scopeOfLParams params) $
Lambda (map trParam params) <$> trBody i body <*> pure (map trType ret)
trBody i (Body dec stms res) =
inScopeOf stms $ Body dec <$> traverse (trStm i) stms <*> pure res
trStm i (Let pat aux e) =
Let (fmap trType pat) aux <$> trExp i e
trSOAC i = mapSOACM mapper
where
mapper =
identitySOACMapper {mapOnSOACLambda = trLam i}
trExp i (WithAcc acc_inputs lam) =
WithAcc acc_inputs <$> trLam i lam
trExp i (BasicOp (UpdateAcc acc is ses)) = do
acc_t <- lookupType acc
pure $ case acc_t of
Acc cert _ _ _
| cert `elem` acc_certs ->
BasicOp $ UpdateAcc acc (i : is) ses
_ ->
BasicOp $ UpdateAcc acc is ses
trExp i e = mapExpM mapper e
where
mapper =
identityMapper
{ mapOnBody = \scope -> localScope scope . trBody i,
mapOnRetType = pure . trType,
mapOnBranchType = pure . trType,
mapOnFParam = pure . trParam,
mapOnLParam = pure . trParam,
mapOnOp = trSOAC i
}
interchangeWithAcc ::
(MonadFreshNames m, HasScope SOACS m) =>
KernelNest ->
WithAccStm ->
m (Stms SOACS)
interchangeWithAcc nest withacc = do
(withacc', stms) <-
runBuilder $ foldM interchangeWithAcc1 withacc $ reverse $ kernelNestLoops nest
return $ stms <> oneStm (withAccStm withacc')
|
diku-dk/futhark
|
src/Futhark/Pass/ExtractKernels/Interchange.hs
|
isc
| 12,311 | 0 | 21 | 3,356 | 3,647 | 1,830 | 1,817 | 258 | 5 |
module Y2016.M07.D04.Solution where
import Control.Arrow ((&&&), (>>>))
import Control.Monad ((>=>))
import Text.HTML.TagSoup
{--
The name of the game for today's Haskell exercise is report generation.
You have the jUnit test case run-off as XML at Y2016/M07/D01/
You want to generate a report of this scheme:
--}
genReport :: TestRun -> [String]
genReport (ROFF proj t s f e i) =
let p = s - f - e - i in
["-- REPORT " ++ replicate 65 '-', "",
"Test Case Overview:," ++ proj, "",
"Total Tests:," ++ show t,
"Total Started:," ++ show s,
"Total Passed:," ++ show p,
"Total Failures:," ++ show f,
"Total Errors:," ++ show e,
"Total Ignored:," ++ show i, "",
if s == p then "All unit test cases passed."
else "WAIT! WHAT? YOU'RE ALL FIRED!", "",
"-- END-REPORT " ++ replicate 61 '-', ""]
reportTestSummary :: FilePath -> IO ()
reportTestSummary = readFile >=> mapM_ putStrLn . genReport . runoff . parseTags
data TestRun =
ROFF { project :: String, tests, started, failures, errors, ignored :: Int }
deriving Show
runoff :: [Tag String] -> TestRun
runoff =
(head &&& map read . tail >>> uncurry reify)
. getTheThings
. filter (isTagOpenName "testrun")
reify :: String -> [Int] -> TestRun
reify proj [t,s,f,e,i] = ROFF proj t s f e i
getTheThings :: [Tag String] -> [String]
getTheThings tag = [fromAttrib] <*> attribs <*> tag
attribs :: [String]
attribs = words "project tests started failures errors ignored"
{--
*Y2016.M07.D04.Solution> reportTestSummary "Y2016/M07/D01/test.xml"
-- REPORT -----------------------------------------------------------------
Test Case Overview: AA
Total Tests: 22
Total Started: 22
Total Passed: 22
Total Failures: 0
Total Errors: 0
Total Ignored: 0
All unit test cases passed.
-- END-REPORT -------------------------------------------------------------
I didn't get fired! YAY!
--}
|
geophf/1HaskellADay
|
exercises/HAD/Y2016/M07/D04/Solution.hs
|
mit
| 1,956 | 0 | 11 | 430 | 460 | 257 | 203 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
import Control.Monad
import Control.Exception (SomeException, try, catch)
import Data.Either
import Test.Tasty
import Test.Tasty.HUnit
import System.FilePath
import Core.Language
import qualified Core.Parser as Parser
import qualified Core.Template as Template
testFile :: FilePath -> FilePath
testFile = ("test/input" </>) . (<.> "core")
main :: IO ()
main = defaultMain $
testGroup "Tests"
[ parserTests
, templateTests
]
parserTests =
testCase "parser tests" $ do
forM_ [ "ex1_21", "infix_simpl", "infixops", "ex2_11"] $ \f -> do
src <- readFile (testFile f)
let result = Parser.parseProgram src
assertBool ("PARSE ERROR: " ++ f) (isRight result)
templateTests =
testCase "template instantiation" $ do
let rnfFile f expect = do
src <- readFile $ testFile f
Template.reduceToNormalForm src @?= Right expect
rnfFile "ex2_4" (Template.NNum 3)
rnfFile "ex2_11" (Template.NNum 4)
|
themattchan/core
|
test/Spec.hs
|
mit
| 1,003 | 0 | 16 | 213 | 298 | 156 | 142 | 30 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Web.Scotty
import Web.Scotty.Cookie
import qualified Network.Wai.Handler.Warp as W
import qualified Network.Wai.Handler.WebSockets as WS
import Network.WebSockets (ConnectionException, ServerApp, pendingRequest, requestPath, rejectRequest, defaultConnectionOptions, acceptRequest, receiveData)
import qualified Network.Wai.Middleware.Static as Static
import qualified Data.Text.Lazy.IO as T
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8)
import qualified Data.Text.Lazy as TL
import qualified Data.ByteString as BS
import qualified Data.Aeson as Aeson
import Control.Monad.IO.Class (liftIO)
import Control.Monad (forever)
import Control.Applicative ((<$>))
import Control.Monad (when)
import Data.Maybe (isJust, fromJust)
import Control.Exception (handle)
import Data.Unique
import Control.Concurrent.STM (atomically)
import Control.Concurrent.STM.TVar (TVar, newTVar, readTVar, writeTVar)
import qualified Auth
import qualified Chat
import Types
main :: IO ()
main = do
let settings = W.setPort 1488 W.defaultSettings
state <- liftIO $ atomically $ newTVar $ S { users = [], connections = [] }
liftIO $ T.putStrLn "start server at http://localhost:1488"
app <- scottyApp $ do
post "/api/auth" $ do
req <- jsonData
liftIO $ putStrLn $ "Login request from " ++ (show req)
res <- liftIO $ Auth.auth req state
when (Auth.isSuccess res) $ do
setSimpleCookie "login" $ Auth.arLogin req
json res
get "/api/status" $ do
res <- getCookie "login" >>= (liftIO . flip Auth.getStatus state)
json res
get "/api/logout" $ do
deleteCookie "login"
login <- getCookie "login"
when (isJust login) $ liftIO $ Auth.logout (fromJust login) state
text "{\"status\":\"ok\"}"
get "/api/users" $ do
users <- liftIO $ atomically $ do
S { users = us } <- readTVar state
return us
text $ TL.pack $ show users
frontend "./frontend"
let ws = WS.websocketsOr defaultConnectionOptions (onWebSocket state) app
W.runSettings settings ws
frontend :: String -> ScottyM ()
frontend appDir = do
middleware $ Static.staticPolicy $ Static.addBase appDir
get "/" $ (liftIO $ T.readFile $ appDir ++ "/index.html") >>= html
onWebSocket :: TVar ServerState -> ServerApp
onWebSocket st pending = do
let path = requestPath . pendingRequest $ pending
let (prefix, suffix) = BS.splitAt 6 path
if prefix == "/chat/" && BS.length suffix > 0
then do
let login = decodeUtf8 suffix
-- non consistent
s@(S { users = us, connections = conns }) <- atomically $ readTVar st
if login `elem` us
then do
conn <- acceptRequest pending
uniq <- newUnique
atomically $ writeTVar st $ s { connections = (uniq, conn) : conns }
Chat.sendToAll st $ Chat.Enter login
handle (catchDisconect st (login, uniq)) $ forever $ do
msg <- Aeson.decode <$> receiveData conn
case msg of
Just (Chat.MessageReq msgText) ->
Chat.sendToAll st $ Chat.Message login msgText
Nothing ->
return ()
else rejectRequest pending "Such user is not exists"
else do
rejectRequest pending "Such user is not exists"
where
catchDisconect :: TVar ServerState -> (T.Text, Unique) -> ConnectionException -> IO ()
catchDisconect st (login, connId) _ex = do
atomically $ do
s@(S { connections = conns }) <- readTVar st
writeTVar st $ s { connections = filter ((/= connId) . fst) conns }
putStrLn $ "Disconect ws"
Chat.sendToAll st $ Chat.Leave login
|
SPY/scotty-chat-sandbox
|
src/Main.hs
|
mit
| 3,631 | 0 | 22 | 799 | 1,189 | 610 | 579 | 89 | 4 |
module MTGSimulator where
import qualified Data.Map.Strict as Map
data Mana = Black | Blue | Green | Red | White | Colorless | Hybrid Mana Mana deriving (Show, Eq, Ord)
type ManaCounts = Map.Map Mana Int
emptyManaCounts :: ManaCounts
emptyManaCounts = Map.empty
addToManaCounts :: ManaCounts -> Mana -> ManaCounts
addToManaCounts counts symbol =
Map.insert symbol (currentCount + 1) counts
where currentCount = countBySymbol symbol counts
countBySymbol :: Mana -> ManaCounts -> Int
countBySymbol = Map.findWithDefault 0
countSymbols :: [Mana] -> ManaCounts
countSymbols = foldl addToManaCounts emptyManaCounts
castableWith :: [Mana] -> [Mana] -> Bool
castableWith available cost = all hasAdequateManaFor $ Map.keys costCounts
where costCounts = countSymbols cost
availableCounts = countSymbols available
hasAdequateManaFor symbol = amountAvailable >= amountNeeded
where amountAvailable = countBySymbol symbol availableCounts
amountNeeded = countBySymbol symbol costCounts
|
rtfeldman/mtgsimulator
|
src/MTGSimulator.hs
|
mit
| 1,036 | 0 | 9 | 192 | 271 | 147 | 124 | 21 | 1 |
-- File : haskelltest.hs
-- Author : Peter Schachte
-- Origin : Mon Aug 23 16:00:03 2010
-- Purpose : Run an assessment test suite
--
-- |This code has a similar purpose to hunit, namely to run a suite of test
-- cases. However, the intended application is for student project
-- assessment. As such, it has a few extra features: it produces output
-- for every test. It prints messages as it goes along, so if the program
-- crashes or hangs during a test, you can see which test it was. It also
-- supports quality tests, which compute a numeric quality assessment for
-- each test. Finally, it supports timeouts, so individual tests or lists
-- of tests can be given a limited time to complete, and it catches
-- exceptions and reports them as failures (a test intended to fail should
-- catch the exception itself and report it as a success).
module HaskellTest (TestCase(..), TestResult(..), expect, quality,
test, testStdout, testVerbose) where
import Prelude hiding (catch)
import Text.Printf
import System.Timeout
import Data.Time
-- import Data.Time.Clock
import System.CPUTime
import System.IO
import Control.Monad
import Control.Exception
-- | The possible results from running a single test.
data TestResult
-- | Straightforward test success, including a (often empty) string to
-- show for brief output and another for verbose output.
= Succeed String String
-- | Straightforward test failure, including a string explaining how or
-- why the test failed, and another string for verbose output.
| Fail String String
-- | Test threw an exception.
| Exception SomeException
-- | Test quality result, as a number between 0.0 and 1.0, including a
-- string explaining why the test received the score it did, and another
-- string for verbose output.
| Quality Double String String
-- | The test did not complete in the allowed time.
| Timeout
-- | The TestCase represents an individual test case or a collection of
-- tests.
data TestCase
-- | The 'Label' test case attaches a string as a label to an individual
-- test or a collection. The label will be printed when the test is
-- started to show what is being tested; a counter is also printed to
-- distinguish among multiple individual tests with the same label.
= Label String TestCase
-- | The value of the specified test case(s) is multiplied by the
-- specified factor
| Scaled Double TestCase
-- | The value of the specified test case is the quotient of the
-- number of passed tests by the total number of tests.
| Ratio TestCase
-- | The specified number of seconds is given as a time limit for the
-- execution of each individual test in the constituent test case.
| TimeLimit Double TestCase
-- | The total score for the included test(s), after any scaling,
-- is written to the specified file
| ResultFile FilePath TestCase
-- | An individual correctness test. The specified test is
-- evaluated, and a message is printed indicating whether it passed or
-- failed, or the quality of the result.
| Test TestResult
-- | A collection of test cases.
| Suite [TestCase]
-- | A collection of test cases with its own summary information. The
-- first String will introduce the summary, and the percentage correct
-- will be scaled by the double. Finally, if the second string is
-- not empty, the scaled value will be written to a file with the
-- specified name.
| Summarised String (Double -> Double -> String) TestCase
-- | Construct a correctness test case that compares the value of an
-- expression with an expected value. Returns 'Succeed' if the actual
-- value '==' the expected value, and 'Fail' otherwise.
expect :: (Eq a, Show a) => a -> a -> TestCase
expr `expect` expected
= Test (if expr == expected then Succeed "" ""
else Fail "" ("Expected " ++ show expected ++
" but got " ++ show expr))
-- | Construct a quality test case that tests the value of the first
-- argument, an expression, with the second argument, a function. If the
-- function returns 'False', then the test fails. Otherwise the third
-- argument, another function, is applied, and its value is the quality of
-- the test result (between 0.0 and 1.0).
quality :: Show a => a -> (a -> Bool) -> (a -> Double) -> TestCase
quality expr test assessment
= Test (if not $ test expr then Fail "invalid output" ""
else Quality (assessment expr) "" "")
-- | Run the provided test or suite, logging the output to the named file.
test :: String -> String -> TestCase -> IO ()
test username fileName test = do
fhandle <- openFile fileName WriteMode
hPutStrLn fhandle ("Begin test of submission for " ++ username)
testToHandle fhandle False test
hClose fhandle
-- | Run the provided test or suite, logging the output to standard out.
testStdout :: TestCase -> IO ()
testStdout = testToHandle stdout False
-- | Run the provided test or suite, producing a verbose output log to
-- standard out.
testVerbose :: TestCase -> IO ()
testVerbose = testToHandle stdout True
-- | Run the provided test or suite, logging the output to the provided file
-- | handle.
testToHandle :: Handle -> Bool -> TestCase -> IO ()
testToHandle fhandle verbose test = do
startDate <- getZonedTime
hPutStrLn fhandle $ "Haskell test run started " ++ show startDate
startTime <- getCPUTime
(_,score,count) <- runTest fhandle "Test" 1 Nothing verbose test
endTime <- getCPUTime
endDate <- getZonedTime
hPutStrLn fhandle $ "Haskell test run ended " ++ show endDate
hPutStrLn fhandle ("Total CPU time used = " ++
(show $ round (realToFrac(endTime-startTime)/1000000000)) ++
" milliseconds"
)
-- ("Total score: " ++ show score ++
-- " / " ++ (show $ realToFrac count) ++
-- " = " ++
-- show (100*((realToFrac score)/(realToFrac count))) ++
-- "%"
-- )
-- | Run an individual test or suite, sending the output to the provided file
-- handle. The given string and integer are the label in whose scope this
-- test is included and the ordinal position of this test in the suite of
-- tests for this label. Returns the test number of the next test case for
-- this label, the total of the qualities and count of successful test cases,
-- and the total count of individual tests run.
runTest :: Handle -> String -> Int -> Maybe Double -> Bool -> TestCase
-> IO (Int,Double,Double)
-- runTest fhandle label count limit verbose testcase
runTest fhandle _ _ limit verbose (Label label tcase) =
runTest fhandle label 1 limit verbose tcase
runTest fhandle l n _ verbose (TimeLimit secs tcase) =
runTest fhandle l n (Just secs) verbose tcase
runTest fhandle l n limit verbose (Scaled sc tcase) = do
(n1,t,c) <- runTest fhandle l n limit verbose tcase
return (n1,sc*t,sc*c)
runTest fhandle l n limit verbose (Ratio tcase) = do
(n1,t,c) <- runTest fhandle l n limit verbose tcase
return (n1,t/c,1)
runTest fhandle l n limit verbose (ResultFile file tcase) = do
(n1,t,c) <- runTest fhandle l n limit verbose tcase
writeFile file $ (show t) ++ "\n"
return (n1,t,c)
runTest fhandle l n limit verbose (Test code) =
actualTest fhandle l n limit verbose code
runTest fhandle l n limit verbose (Summarised intro outrofn tcase) =
do when (intro /= "") $ hPutStrLn fhandle intro
(n1,t,c) <- runTest fhandle l n limit verbose tcase
let outro = outrofn t c
when (outro /= "") $ hPutStrLn fhandle outro
return (n1,t,c)
runTest fhandle l n _ _ (Suite []) = return (n,0,0)
runTest fhandle l n limit verbose (Suite (t:ts)) =
do (n1,t1,c1) <- runTest fhandle l n limit verbose t
(n2,t2,c2) <- runTest fhandle l n1 limit verbose (Suite ts)
return (n2,t1+t2,c1+c2)
timeoutTime :: Maybe Double -> Int
timeoutTime Nothing = (-1) -- Negative timeout number means no timeout
timeoutTime (Just secs) =
if secs >= (fromIntegral (maxBound::Int)) / 1000000
then error ("TimeLimit too large: " ++ (show secs))
else (round (1000000.0*secs)) -- convert to microsecs
actualTest :: Handle -> String -> Int -> Maybe Double -> Bool
-> TestResult -> IO (Int,Double,Double)
actualTest fhandle l n limit verbose code =
do printLabel fhandle l n
result0 <- timeout (timeoutTime limit)
(limitedTest fhandle n verbose code)
case result0 of
Nothing -> -- timed out: haven't printed message yet
do printTestResult fhandle verbose Timeout
return (n+1,testResult Timeout,1)
Just r -> return r -- already printed message
limitedTest fhandle n verbose code =
-- force evaluation; catch any exceptions
catch (handleTestResult fhandle n verbose code)
(\e -> handleTestResult fhandle n verbose
(Exception (e::SomeException)))
printLabel :: Handle -> String -> Int -> IO ()
printLabel fhandle label num =
do hPrintf fhandle "%30s" label
hPrintf fhandle " %3d" num
hPutStr fhandle " ... "
hFlush fhandle
handleTestResult :: Handle -> Int -> Bool -> TestResult
-> IO (Int,Double,Double)
handleTestResult fhandle n verbose code =
do printTestResult fhandle verbose code -- forces execution of code
return (n+1,testResult code,1) -- doesn't re-evaluate code
printTestResult :: Handle -> Bool -> TestResult -> IO ()
printTestResult fhandle verbose result
= hPutStrLn fhandle (resultMessage verbose result )
resultMessage :: Bool -> TestResult -> String
resultMessage verbose (Succeed norm verb)
= "PASSED " ++ (parenthetical $ pick verbose norm norm++verb)
resultMessage verbose (Fail norm verb)
= "FAILED*** " ++ (parenthetical $ pick verbose norm norm++verb)
resultMessage verbose (Exception e)
= "EXCEPT*** " ++ (parenthetical (show e))
resultMessage verbose (Quality score norm verb)
= "PASSED " ++ (printf "%5.1f" (100*score)) ++ "% " ++
(parenthetical $ pick verbose norm norm++verb)
resultMessage _ Timeout
= "TIMEOUT**"
testResult (Succeed _ _) = 1.0
testResult (Fail _ _) = 0.0
testResult (Exception _) = 0.0
testResult (Quality val _ _) = val
testResult Timeout = 0.0
pick True _ x = x
pick False x _ = x
parenthetical "" = ""
parenthetical text = "(" ++ text ++ ")"
printQualityResult fhandle = (hPutStrLn fhandle) . show
|
CIS-UoM/assignments
|
COMP90048 Declarative Programming/haskell/1/HaskellTest.hs
|
mit
| 10,710 | 0 | 18 | 2,612 | 2,280 | 1,194 | 1,086 | 142 | 2 |
module JSTrans.Writer where
import JSTrans.AST
import List (intersperse)
import Char (isAlphaNum,isDigit)
import Maybe (isJust,fromJust)
expr :: Expr -> ShowS
stat :: Statement -> ShowS
block :: Block -> ShowS
primaryExpr :: Expr -> ShowS
memberExpr :: Expr -> ShowS
many = foldr (.) id
sepBy a x = foldr (.) id (intersperse x a)
option' = maybe id
isIdentifierPart c = isAlphaNum c || c == '_' || c == '$' || isDigit c -- TODO: Unicode
isOperatorPart c = c `elem` "=<>&|+-"
char = showChar
string = showString
parens = showParen True
squares x = char '[' . x . char ']'
braces x = char '{' . x . char '}'
comma = char ','
colon = char ':'
semi = char ';'
dot = char '.'
ident x [] = x
ident x rest@(y:_) | isIdentifierPart y = x ++ ' ':rest
| otherwise = x ++ rest
operator x [] = x
operator x rest@(y:_) | (isIdentifierPart (head x) && isIdentifierPart y)
|| (isOperatorPart (head x) && isOperatorPart y)
= x ++ ' ':rest
| otherwise = x ++ rest
propertyName (PNIdentifier name) = ident name
propertyName (PNLiteral lit) = literal lit
pattern e (LHSSimple a) = e a
pattern e (LHSArray elems) = squares $ sepBy (map element elems) comma
where
element Nothing = id
element (Just x) = pattern e x
pattern e (LHSObject elems) = braces $ sepBy (map element elems) comma
where
element (name,pat) = propertyName name
. colon
. pattern e pat
patternNoExpr = pattern ident
patternExpr = pattern leftHandSideExpression
functionParameterAndBody fn
= (parens $ sepBy (map patternNoExpr $ functionArguments fn) comma)
. (braces $ many $ map sourceElement $ case functionBody fn of FunctionBody x -> x)
literal :: Literal -> ShowS
literal NullLiteral = ident "null"
literal (NumericLiteral t) = ident t
literal (RegExpLiteral t) = ident t
literal (StringLiteral t) = showString t
literal (BooleanLiteral True) = ident "true"
literal (BooleanLiteral False) = ident "false"
primaryExpr This = ident "this"
primaryExpr (Variable name) = ident name
primaryExpr (Literal lit) = literal lit
primaryExpr (ArrayLiteral elems) = squares $ sepBy (map element elems) comma
where
element Nothing = id
element (Just x) = assignmentExpression x
primaryExpr (ArrayComprehension x f i) = squares $ expr x . (many $ map compFor f) . option' compIf i
where
compFor (k,n,e)
= ident "for"
. (if k == CompForIn
then id
else ident "each")
. (parens $ patternNoExpr n . ident "in" . expr e)
compIf e = ident "if" . (parens $ expr e)
primaryExpr (ObjectLiteral elems) = braces $ sepBy (map element elems) comma
where
element (name,Left e) = propertyName name
. colon
. assignmentExpression e
element (name,Right (kind,fn))
= ident (if kind==Getter then "get" else "set")
. propertyName name
. functionParameterAndBody fn
primaryExpr e = parens $ expr e
memberExpr (FunctionExpression True fn)
| isJust body'
= ident "function"
. option' ident (functionName fn)
. parens (sepBy (map patternNoExpr $ functionArguments fn) comma)
. let c = assignmentExpression body -- TODO: allowIn
in case c "" of
'{':_ -> parens c
_ -> c
where
body' = case functionBody fn of
FunctionBody [Statement (Return b@(Just _))] -> b
_ -> Nothing
body = fromJust body'
memberExpr (FunctionExpression _ fn)
= ident "function"
. option' ident (functionName fn)
. functionParameterAndBody fn
memberExpr (Let vars body)
= ident "let"
. (parens $ sepBy (map varDecl vars) comma)
. assignmentExpression body -- TODO: allowIn
memberExpr (Index x y) = memberExpr x . squares (expr y)
memberExpr (Field x name) = memberExpr x . char '.' . ident name
memberExpr (New ctor args) = ident "new" . memberExpr ctor . arguments args
memberExpr (FuncCall fn args) = memberExpr fn . arguments args
memberExpr e = primaryExpr e
arguments args = parens $ sepBy (map assignmentExpression args) comma
leftHandSideExpression = memberExpr
postfixExpression (Postfix op e) = leftHandSideExpression e . operator op
postfixExpression e = leftHandSideExpression e
unaryExpression (Prefix op e) = operator op . unaryExpression e
unaryExpression e = postfixExpression e
make :: [String] -> (Expr -> ShowS) -> Expr -> ShowS
make' :: [String] -> (Bool -> Expr -> ShowS) -> Bool -> Expr -> ShowS
make1 :: String -> (Bool -> Expr -> ShowS) -> Bool -> Expr -> ShowS
make set super (Binary op' x y)
| op' `elem` set = make set super x . operator op' . super y
make _ super e = super e
make' set super allowIn (Binary op' x y)
| op' `elem` set = make' set super allowIn x . operator op' . super allowIn y
make' _ super allowIn e = super allowIn e
make1 op super allowIn (Binary op' x y)
| op == op' = make1 op super allowIn x . operator op' . super allowIn y
make1 _ super allowIn e = super allowIn e
multiplicativeExpression = make ["*","/","%"] unaryExpression
additiveExpression = make ["+","-"] multiplicativeExpression
shiftExpression = make ["<<",">>",">>>"] additiveExpression
relationalExpressionBase True = make ["<",">","<=",">=","instanceof","in"] shiftExpression
relationalExpressionBase False = make ["<",">","<=",">=","instanceof"] shiftExpression
equalityExpressionBase = make' ["==","!=","===","!=="] relationalExpressionBase
bitwiseANDExpressionBase = make1 "&" equalityExpressionBase
bitwiseXORExpressionBase = make1 "^" bitwiseANDExpressionBase
bitwiseORExpressionBase = make1 "|" bitwiseXORExpressionBase
logicalANDExpressionBase = make1 "&&" bitwiseORExpressionBase
logicalORExpressionBase = make1 "||" logicalANDExpressionBase
conditionalExpressionBase allowIn (Cond x y z)
= logicalORExpressionBase allowIn x
. operator "?" . assignmentExpression y
. operator ":" . assignmentExpressionBase allowIn z
conditionalExpressionBase allowIn e = logicalORExpressionBase allowIn e
assignmentExpressionBase allowIn (Assign op x y)
= patternExpr x -- leftHandSideExpression
. operator op . assignmentExpressionBase allowIn y
assignmentExpressionBase allowIn e = conditionalExpressionBase allowIn e
exprBase allowIn (Binary "," x y)
= exprBase allowIn x . operator "," . assignmentExpressionBase allowIn y
exprBase allowIn e = assignmentExpressionBase allowIn e
assignmentExpression = assignmentExpressionBase True
expr = exprBase True
exprNoIn = exprBase False
---
--- Statements
---
block (Block stats) = braces $ many $ map stat stats
varDecl (name,value) = patternNoExpr name . option' (\e -> operator "=" . assignmentExpression e) value
stat EmptyStat = semi
stat (VarDef kind v) = definitionKind kind . sepBy (map varDecl v) comma . semi
stat (LetStatement v b) = ident "let" . (parens $ sepBy (map varDecl v) comma) . block b
stat (ExpressionStatement e)
= let c = expr e
in case c "" of
('{':_) -> parens c . semi
('f':'u':'n':'c':'t':'i':'o':'n':d:_)
| not $ isIdentifierPart d -> parens c . semi
_ -> c . semi
stat (Return value) = ident "return" . maybe id expr value . semi
stat (Throw value) = ident "throw" . expr value . semi
stat (BlockStatement b) = block b
stat (If cond body@(If _ _ Nothing) else') = ident "if" . parens (expr cond)
. braces (stat body)
. option' (\x -> ident "else" . stat x) else'
stat (If cond body else') = ident "if" . parens (expr cond)
. stat body
. option' (\x -> ident "else" . stat x) else'
stat (While cond body) = ident "while" . parens (expr cond) . stat body
stat (DoWhile cond body) = ident "do" . stat body
. ident "while" . parens (expr cond) . semi
stat (For Nothing b c d) = ident "for"
. parens (semi . option' expr b . semi . option' expr c)
. stat d
stat (For (Just (VarDef kind vars)) b c d)
= ident "for" . parens (definitionKind kind . sepBy (map varDecl vars) comma
. semi . option' expr b . semi . option' expr c)
. stat d
stat (For (Just (ExpressionStatement a)) b c d)
= ident "for" . parens (exprNoIn a . semi . option' expr b
. semi . option' expr c)
. stat d
stat (ForIn (ForInVarDef kind var e) b body)
= ident "for" . parens (definitionKind kind . varDecl (var,e) . ident "in" . expr b)
. stat body
stat (ForIn (ForInLHSExpr a) b body)
= ident "for" . parens (patternExpr a . ident "in" . expr b)
. stat body
stat (ForEach (ForInVarDef kind var e) b body)
= ident "for each" . parens (definitionKind kind . varDecl (var,e)
. ident "in" . expr b)
. stat body
stat (ForEach (ForInLHSExpr a) b body)
= ident "for each" . parens (patternExpr a . ident "in" . expr b)
. stat body
stat (Try body conditionalCatchClauses unconditionalCatchClause finallyClause)
= ident "try" . block body
. many (map c conditionalCatchClauses)
. maybe id u unconditionalCatchClause
. maybe id f finallyClause
where
c (vname,cond,body) = ident "catch"
. parens (patternNoExpr vname . ident "if" . expr cond)
. block body
u (vname,body) = ident "catch" . parens (patternNoExpr vname) . block body
f body = ident "finally" . block body
stat (Switch e clauses) = ident "switch" . (parens $ expr e)
. (braces $ many $ map c clauses)
where
c (CaseClause e s) = ident "case" . expr e . colon . many (map stat s)
c (DefaultClause s) = ident "default" . colon . many (map stat s)
stat (Break label) = ident "break" . maybe id ident label . semi
stat (Continue label) = ident "continue" . maybe id ident label . semi
stat (Labelled label s) = ident label . colon . stat s
stat Debugger = ident "debugger" . semi
definitionKind VariableDefinition = ident "var"
definitionKind ConstantDefinition = ident "const"
definitionKind LetDefinition = ident "let"
sourceElement (Statement s) = stat s
sourceElement (FunctionDeclaration name fn)
= ident "function" . ident name
. functionParameterAndBody fn
program (Program s) = many $ map sourceElement s
|
minoki/jstrans
|
JSTrans/Writer.hs
|
mit
| 10,497 | 269 | 13 | 2,673 | 3,896 | 1,975 | 1,921 | 221 | 5 |
{-# LANGUAGE OverloadedStrings #-}
module Smutt.HTTP.Header where
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HS
import Data.Text.Lazy (Text)
import qualified Data.Text.Lazy as T
import Data.Monoid
import Prelude hiding (lookup)
newtype Header = Header (HashMap Text Text) deriving (Eq)
toHashMap :: Header -> HashMap Text Text
toHashMap (Header hdr) = hdr
-- | Warning this function is a bit slow and should generally not be used
--
fromHashMap :: HashMap Text Text -> Header
fromHashMap = fromList . map (\(k,v) -> (T.toCaseFold k, v)) . HS.toList
toString :: Header -> Text
toString (Header hdr)= HS.foldrWithKey (\ k v s -> s <> k <> ": " <> v <> "\r\n") "" hdr <> "\r\n"
instance Show Header where
show = show . toString
instance Monoid Header where
mempty = Header mempty
mappend (Header a) (Header b) = (Header (mappend a b))
mconcat [] = mempty
mconcat hdrs = Header $ mconcat $ map (\ (Header a) -> a) hdrs
null :: Header -> Bool
null = (==mempty)
size :: Header -> Int
size = HS.size . toHashMap
member :: Text -> Header -> Bool
member key = HS.member (T.toCaseFold key) . toHashMap
lookup :: Text -> Header -> Maybe Text
lookup key = HS.lookup (T.toCaseFold key) . toHashMap
-- | Checks if there are a header k with value v
fieldIn :: Text -> Text -> Header -> Bool
fieldIn k v hdr = case lookup (T.toCaseFold k) hdr of
Just a -> a == v
_ -> False
insert :: Text -> Text -> Header -> Header
insert key val = Header . HS.insert (T.toCaseFold key) val . toHashMap
toList :: Header -> [(Text,Text)]
toList (Header hdr) = HS.toList hdr
fromList :: [(Text,Text)] -> Header
fromList = Header . HS.fromList . map (\(k,v) -> (T.toCaseFold k, v))
keys :: Header -> [Text]
keys = HS.keys . toHashMap
|
black0range/Smutt
|
src/Smutt/HTTP/Header.hs
|
mit
| 1,781 | 0 | 12 | 354 | 704 | 381 | 323 | 42 | 2 |
{-# LANGUAGE CPP #-}
module GHCJS.DOM.SpeechSynthesisUtterance (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.SpeechSynthesisUtterance
#else
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.SpeechSynthesisUtterance
#else
#endif
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/SpeechSynthesisUtterance.hs
|
mit
| 385 | 0 | 5 | 33 | 33 | 26 | 7 | 4 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : EchoClient
-- Copyright : (c) Phil Hargett 2015
-- License : MIT (see LICENSE file)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (requires STM)
--
-- Simple echo client for testing out courier.
--
-----------------------------------------------------------------------------
module Main where
-- local imports
import Network.Endpoints
import Network.RPC
import Network.Transport.Sockets.TCP
-- external imports
import Control.Exception
import Data.Serialize
import System.Environment
import System.IO
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
main :: IO ()
main = do
[serverNameStr,clientNameStr] <- getArgs
let server = Name serverNameStr
client = Name clientNameStr
endpoint <- newEndpoint
withTransport (newTCPTransport4 tcpSocketResolver4) $ \transport ->
withEndpoint transport endpoint $
withName endpoint client $
withConnection transport endpoint server $ do
hPutStrLn stdout $ "Started echo client on " ++ (show client) ++ " for " ++ (show server)
finally (echo endpoint server client)
(hPutStrLn stdout $ "\nStopped echo client on " ++ (show client) ++ " for " ++ (show server))
echo :: Endpoint -> Name -> Name -> IO ()
echo endpoint server client = do
text <- hGetLine stdin
let cs = newCallSite endpoint client
response <- call cs server "echo" $ encode text
case decode response of
Left _ -> error "Could not decode message"
Right responseText -> do
hPutStrLn stdout $ "> " ++ responseText
echo endpoint server client
|
hargettp/courier
|
echo/EchoClient.hs
|
mit
| 1,819 | 0 | 18 | 339 | 383 | 193 | 190 | 31 | 2 |
-- | Just the smallest set of lens operators
{-# language RankNTypes #-}
module ZeptoLens where
import Control.Applicative (Const (..))
import Control.Monad.Reader.Class
import Data.Functor.Identity (Identity (..))
type Lens s t a b = forall f . Functor f => (a -> f b) -> s -> f t
over :: Lens s t a b -> (a -> b) -> s -> t
over l f = runIdentity . l (Identity . f)
{-# INLINE over #-}
(%~) :: Lens s t a b -> (a -> b) -> s -> t
(%~) = over
{-# INLINE (%~) #-}
set :: Lens s t a b -> b -> s -> t
set l b = over l (const b)
{-# INLINE set #-}
(.~) :: Lens s t a b -> b -> s -> t
(.~) = set
{-# INLINE (.~) #-}
view :: MonadReader s m => Lens s t a b -> m a
view l = asks (getConst . l Const)
{-# INLINE view #-}
views :: MonadReader s m => Lens s t a b -> (a -> r) -> m r
views l f = asks (getConst . l (Const . f))
{-# INLINE views #-}
|
lambdageek/use-c
|
src/ZeptoLens.hs
|
mit
| 845 | 0 | 10 | 211 | 394 | 214 | 180 | 24 | 1 |
module Main
(
main
) where
import Distribution.Simple
main = defaultMain
|
Qinka/GiveYouAHead
|
Setup.hs
|
mit
| 99 | 0 | 4 | 37 | 19 | 12 | 7 | 5 | 1 |
module Render
(
renderRay
)
where
import Data.Maybe ( fromMaybe )
import Color ( Color )
import Core ( Ray, (|*|), translate )
import Light ( Light, toColor, black )
import Scene ( Scene, Intersection(..)
, sceneIntersection, pointLightSourcesVisibleFrom )
import Surface ( Surface(..) )
renderRay :: Ray -> Scene -> Color
renderRay ray scene =
toColor $ renderRayRecursive scene 4 ray
renderRayRecursive :: Scene -> Int -> Ray -> Light
renderRayRecursive scene level ray
| level <= 0 = black
| otherwise = fromMaybe black maybeColor
where
maybeColor = do
(Intersection rt (Surface _ nrm mat) _ wp) <- sceneIntersection scene ray
let surfaceNormal = nrm wp
let movedFromSurface = translate (surfaceNormal |*| epsilon) wp
let lights = pointLightSourcesVisibleFrom scene movedFromSurface
let recursiveRender = renderRayRecursive scene (level - 1)
return $ mat lights rt wp surfaceNormal recursiveRender
epsilon = 0.0001
|
stu-smith/rendering-in-haskell
|
src/experiment05/Render.hs
|
mit
| 1,060 | 0 | 14 | 282 | 309 | 163 | 146 | 25 | 1 |
-- | Functions to print list components and convert them to strings.
module Printing
( showItem
, showTask
, showGroup
, showBlock
, showTodoList
, printStrings
, writeToFile
) where
import Types
import Data.Time
import Data.List (sort, sortBy, intersperse, intercalate)
import Data.Char (toUpper)
spaces :: Int -> String
spaces num = replicate num ' '
tick :: String
tick = " ✔ "
separator :: String
separator = "____"
noTasks :: String
noTasks = " Nothing yet."
padIndex :: Int -> String
padIndex index
| index < 10 = show index ++ ". "
| otherwise = show index ++ ". "
showItem :: (Char, Item) -> String
showItem (index, item)
| _itemDone item = concat [spaces 4, tick, index : ") ", _itemDesc item]
| otherwise = concat [spaces 8, index : ") ", _itemDesc item]
showPriority :: Priority -> String
showPriority Low = "... "
showPriority Normal = " "
showPriority High = "! "
showAbsDeadline :: Deadline -> String
showAbsDeadline (Abs (Date d)) = "- " ++ formatTime defaultTimeLocale "%A %e %b %G" d
showAbsDeadline (Abs (Time t)) = "- " ++ formatTime defaultTimeLocale "%R, %A %e %b %G" t
showAbsDeadline _ = ""
showTaskHeader :: Int -> Task -> String
showTaskHeader index task
| _done task = concat [tick, padIndex index, text]
| otherwise = concat [spaces 4, padIndex index, text]
where text = concat [_desc task, showPriority (_priority task), showAbsDeadline (_deadline task)]
-- | Converts a task into a string list.
showTask :: (Int, Task) -> [String]
showTask (index, task) = showTaskHeader index task : map showItem indexedItems
where indexedItems = zip ['a'..] (_items task)
showGroupHeader :: TaskGroup -> String
showGroupHeader (TaskGroup (Custom str) _) = map toUpper str
showGroupHeader (TaskGroup time _) = map toUpper $ show time
-- | Converts a task group into a string list.
showGroup :: TaskGroup -> [String]
showGroup group
| null (_tasks group) = showGroupHeader group : [noTasks]
| otherwise = showGroupHeader group : concatMap showTask indexedTasks
where indexedTasks = zip [1..] (sortBy (flip compare) $ _tasks group)
-- | Converts a group block into a string list.
showBlock :: GroupBlock -> [String]
showBlock block = intercalate [""] $ map showGroup sortedGroups
where sortedGroups = sort $ _groups block
showListHeader :: TodoList -> [String]
showListHeader (TodoList (name, date) _) = ["", spaces 4 ++ title, spaces (4 + centOffset) ++ dateString,""]
where title = name ++ "'s To Do List"
dateString = formatTime defaultTimeLocale "%e %b %G" date
centOffset = (length title - length dateString) `div` 2
-- | Converts a to do list into a string list.
showTodoList :: TodoList -> [String]
showTodoList list = showListHeader list ++ intercalate [separator ++ "\n"] (map showBlock sortedBlocks)
where sortedBlocks = sort $ _blocks list
exItems :: [Item]
exItems =
[ Item "Bread" False
, Item "Milk" True
, Item "Toothpaste" False
]
exTask :: Task
exTask = Task "Grocery shopping" exItems (Rel Today) Normal True
exGroup :: TaskGroup
exGroup = TaskGroup (RelTime Today)
[ Task "Do laundry" [] (Rel Today) Low False
, Task "Call Simon" [] (Rel Today) High False
, exTask
, Task "Print tickets" [] (Abs $ Date (fromGregorian 2016 9 24)) High False
, Task "Do important stuff" [] (Abs $ Time (UTCTime (fromGregorian 2005 3 5) (timeOfDayToTime $ dayFractionToTimeOfDay 0.76))) High False
]
exBlock :: GroupBlock
exBlock = GroupBlock Days
[ exGroup
, TaskGroup (RelTime Tomorrow)
[ Task "Hi" [] (Rel Tomorrow) High True
, Task "Email" [] (Rel Tomorrow) Low False
]
]
exBlock2 :: GroupBlock
exBlock2 = GroupBlock Weeks
[ TaskGroup (RelTime ThisWeek) []
, TaskGroup (RelTime NextWeek)
[ Task "Eat"
[ Item "Blabla" True
, Item "Wibble" False
] (Rel NextWeek) Normal False
]
]
exList :: TodoList
exList = TodoList ("Dima", fromGregorian 2016 8 23)
[ exBlock
, exBlock2
]
-- | Print a todo list in the console.
printStrings :: [String] -> IO ()
printStrings = mapM_ putStrLn
-- | Write the to-do list into a file.
writeToFile :: FilePath -> TodoList -> IO ()
writeToFile fileName = writeFile fileName . unlines . showTodoList
|
DimaSamoz/thodo
|
src/Printing.hs
|
mit
| 4,366 | 0 | 15 | 998 | 1,421 | 736 | 685 | 99 | 1 |
module Game.Cosanostra.Selectors
( alive
, dead
, other
, otherAlive
, selectorCheck
) where
import Game.Cosanostra.Effect
import Game.Cosanostra.Expr
import Game.Cosanostra.Types
import Control.Lens
import Data.Maybe
alive :: Selector
alive = Atom Participant `And` Atom Alive
dead :: Selector
dead = Not alive
other :: Selector
other = Atom Participant `And` Not (Atom Self)
otherAlive :: Selector
otherAlive = other `And` alive
-- | Check if a player fulfills a selector.
selectorCheck :: Players -> Player -> Turn -> Phase -> Selector -> Player -> Bool
selectorCheck players source turn phase expr target = exprEval f expr
where
f Participant = target `elem` playerKeys players
f Self = source == target
f Alive = isNothing (target ^. to (playerEffects players source turn phase)
. to effectsCauseOfDeath)
f WasLynched = (target ^. to (playerEffects players source turn phase)
. to effectsCauseOfDeath) == Just Lynched
|
rfw/cosanostra
|
src/Game/Cosanostra/Selectors.hs
|
mit
| 1,033 | 0 | 13 | 251 | 299 | 161 | 138 | -1 | -1 |
module Data.Expenses.Types
( Entry(..)
, Model
, Money(..)
, QueryAttribute(..)
, SimpleTransaction(..)
, entriesFromModel
, modelFromEntries
)
where
import qualified Data.Decimal as D
-- ~ 1234.12 CUR
data Money = Amount
{ moneyAmount :: D.Decimal
, moneyCurrency :: Maybe String
, moneyIsApprox :: Bool
} deriving (Show, Eq)
data Entry = Entry
{ entryDate :: (Int, Int, Int) -- (y,m,d)
, entryPrice :: (D.Decimal, String) -- (dlr,cents,cur)
, entryRemark :: String
, entryComment :: Maybe String
} deriving (Show, Eq)
newtype Model = Model [Entry]
modelFromEntries :: [Entry] -> Model
modelFromEntries = Model
entriesFromModel :: Model -> [Entry]
entriesFromModel (Model entries) = entries
data QueryAttribute
= Earliest
| Latest
deriving (Show, Eq)
data SimpleTransaction = SimpleTransaction
{ transactionDescription :: String
, transactionAmount :: Money
, transactionDebittedAccount :: String
, transactionCredittedAccount :: String
} deriving (Show, Eq)
instance Ord SimpleTransaction where
compare SimpleTransaction { transactionDescription = a } SimpleTransaction { transactionDescription = b }
= compare a b
|
rgoulter/expenses-csv-utils
|
src/Data/Expenses/Types.hs
|
mit
| 1,236 | 0 | 10 | 276 | 324 | 196 | 128 | 38 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
module App
( app
) where
import Control.Monad.Reader (liftIO, ReaderT)
import Crypto.PasswordStore (makePassword, verifyPassword)
import Data.ByteString.Char8 (pack, unpack)
import Data.ByteString.Lazy (ByteString)
import Data.Time.Clock (addUTCTime, getCurrentTime, NominalDiffTime)
import Database.Persist.Postgresql (entityVal, fromSqlKey, getBy, insertBy, ConnectionPool)
import Servant
import Servant.Auth.Server
import AuthAPI.API (authAPIProxy, AuthAPI, UserCreationResponse(..))
import Models (runDB, User(..), Unique(..))
-- Type alias custom monad to handle passing the Postgres connection pool around
type App = ReaderT ConnectionPool Handler
appToServer :: JWTSettings -> ConnectionPool -> Server (AuthAPI auths)
appToServer jwtSettings pool = enter (runReaderTNat pool) (server jwtSettings)
app :: JWTSettings -> ConnectionPool -> Application
app jwtSettings pool = serveWithContext authAPIProxy context (appToServer jwtSettings pool)
where context = defaultCookieSettings :. jwtSettings :. EmptyContext
server :: JWTSettings -> ServerT (AuthAPI auths) App
server jwts = register jwts :<|> login jwts :<|> verifyJWT
register :: JWTSettings -> User -> App (Headers '[Header "Token" ByteString] UserCreationResponse)
register jwts user = do
let hashRounds = 17 -- PKBDF1 hashing iterations
hashedPassword <- liftIO $ makePassword (pack $ userPassword user) hashRounds
newOrExistingUser <- runDB $ insertBy user { userPassword = unpack hashedPassword }
case newOrExistingUser of
Left _ -> throwError err500 { errBody = "Username already taken" }
Right newUser -> do
jwt <- createToken 60 user jwts -- 1 hour expiry token
pure $ addHeader jwt UserCreationResponse { userId = fromSqlKey newUser }
-- Log a user in and give them their auth token
login :: JWTSettings -> User -> App (Headers '[Header "Token" ByteString] NoContent)
login jwts login = do
maybeUser <- runDB $ getBy $ UniqueUsername (userUsername login)
case maybeUser of
Nothing -> loginError -- Username doesn't exist
Just user -> if verifyPassword (pack $ userPassword login) (pack $ userPassword $ entityVal user) -- Ensure hashed password matches
then do
jwt <- createToken 60 login jwts -- 1 hour expiry token
pure $ addHeader jwt NoContent
else loginError -- Incorrect password
where loginError = throwError err401 { errBody = "Incorrect username or password" }
-- Create a JWT token with X minutes expiry time
createToken :: NominalDiffTime -> User -> JWTSettings -> App ByteString
createToken expiryMinutes user jwts = do
time <- liftIO getCurrentTime
let expiryTime = addUTCTime (expiryMinutes * 60) time
eitherJWT <- liftIO $ makeJWT user jwts (Just expiryTime)
case eitherJWT of
Left _ -> throwError err500 { errBody = "Unable to create JWT" }
Right jwt -> pure jwt
-- Ensure that a token is valid and signed by our server
verifyJWT :: AuthResult User -> App NoContent
verifyJWT (Authenticated _) = pure NoContent
verifyJWT _ = throwError err401
|
houli/distributed-file-system
|
auth-service/src/App.hs
|
mit
| 3,146 | 0 | 16 | 579 | 836 | 434 | 402 | 54 | 3 |
{-# LANGUAGE OverloadedStrings,TemplateHaskell #-}
module Database.Toxic.TSql.Handler where
import Control.Applicative
import Control.Concurrent
import Control.Concurrent.STM.TChan
import Control.Lens
import Control.Monad.STM
import Control.Monad.Trans
import Control.Monad.Trans.Class
import Control.Monad.Trans.State
import qualified Data.Binary as B
import qualified Data.Binary.Put as B
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as BSL
import qualified Data.Vector as V
import Network.Socket hiding (send, sendTo, recv, recvFrom)
import Network.Socket.ByteString
import System.Posix.Types
import Database.Toxic.TSql.Protocol
maximumMessageSize = 1000
data HandlerState = HandlerState {
_handlerSocket :: Socket
}
data HandlerNotification =
NotificationNetworkSend BS.ByteString
| NotificationNetworkReceive BS.ByteString
deriving (Eq, Show)
data HandlerAction =
ActionSendStartupMessage StartupMessage
| ActionSendQuery BS.ByteString
deriving (Eq, Show)
makeLenses ''HandlerState
handlerConnect :: IO (ThreadId, HandlerState)
handlerConnect = do
let family = AF_UNIX
socketType = Stream
protocolNumber = defaultProtocol
address = SockAddrUnix "/var/run/postgresql/.s.PGSQL.5432"
mySocket <- socket family socketType protocolNumber
connect mySocket address
let initialState = HandlerState {
_handlerSocket = mySocket
}
return (error "handlerConnect: threadId shouldn't be used", initialState)
handleAction :: HandlerState -> HandlerAction -> IO ()
handleAction state action = case action of
ActionSendStartupMessage startupMessage ->
processSendStartupMessage state startupMessage
ActionSendQuery query ->
processSendQuery state query
defaultStartupMessage :: StartupMessage
defaultStartupMessage = StartupMessage {
startupMessageProtocolVersion = defaultProtocolVersion,
startupMessageParameters = V.fromList [
("user", "example_user"),
("database", "example"),
("application_name", "psql"),
("client_encoding", "UTF8")
]
}
handlerSend :: HandlerState -> BS.ByteString -> IO ()
handlerSend state message = do
let socket = state ^. handlerSocket
putStrLn $ "Sent message: " ++ show message
send socket message
return ()
waitOnSocket socket = do
putStrLn "Waiting on socket"
threadWaitRead $Fd $ fdSocket socket
handlerReceive :: HandlerState -> IO BS.ByteString
handlerReceive state = do
let socket = state ^. handlerSocket
waitOnSocket socket
message <- recv socket maximumMessageSize
putStrLn $ "Received message: " ++ show message
return message
processSendStartupMessage :: HandlerState -> StartupMessage -> IO ()
processSendStartupMessage state startupMessage = do
let message = startupMessage
serializedMessage = BSL.toStrict $ B.runPut $ B.put message
handlerSend state serializedMessage
handlerReceive state >> return ()
processSendQuery :: HandlerState -> BS.ByteString -> IO ()
processSendQuery state message = do
let query = Query { queryQuery = message }
serializedMessage = BSL.toStrict $ B.runPut $ B.put query
handlerSend state serializedMessage
handlerReceive state >> return ()
|
MichaelBurge/ToxicSludgeDB
|
src/Database/Toxic/TSql/Handler.hs
|
mit
| 3,216 | 0 | 12 | 516 | 797 | 423 | 374 | 84 | 2 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE UndecidableInstances #-}
module Betfair.APING.Types.CancelExecutionReport
( CancelExecutionReport(..)
) where
import Betfair.APING.Types.CancelInstructionReport (CancelInstructionReport)
import Betfair.APING.Types.ExecutionReportErrorCode (ExecutionReportErrorCode)
import Betfair.APING.Types.ExecutionReportStatus (ExecutionReportStatus)
import Data.Aeson.TH (Options (omitNothingFields),
defaultOptions,
deriveJSON)
import Protolude
import Text.PrettyPrint.GenericPretty
data CancelExecutionReport = CancelExecutionReport
{ customerRef :: Maybe Text
, status :: ExecutionReportStatus
, errorCode :: Maybe ExecutionReportErrorCode
, marketId :: Maybe Text
, instructionReports :: Maybe [CancelInstructionReport]
} deriving (Eq, Show, Generic, Pretty)
$(deriveJSON defaultOptions {omitNothingFields = True} ''CancelExecutionReport)
|
joe9/betfair-api
|
src/Betfair/APING/Types/CancelExecutionReport.hs
|
mit
| 1,272 | 0 | 10 | 320 | 185 | 115 | 70 | 25 | 0 |
{-# OPTIONS_HADDOCK show-extensions #-}
{-# LANGUAGE FlexibleContexts, ExistentialQuantification #-}
-- |
--
-- Module : EventProbability.Cache
-- Description : 'Cache's used for 'Event' probabilities.
-- License : MIT
--
--
module EventProbability.Cache (
-- * Count Cache
EventCountCache
, countOccurences
, updateCountCache
-- * 'Prob' Caches
, EventProbCache
, EventCondProbCache
-- * Containers
, EventCaches(..)
) where
import Cache
import EventProbability
import qualified Data.Set as Set
import qualified Data.Map as Map
-----------------------------------------------------------------------------
-- | A cache for counting 'Event's.
type EventCountCache cache m = Cache cache m Event Int
-- | given an event, searches in cache for events,
-- that contain the event in question, and returns thier sum.
countOccurences :: (EventCountCache cache m) =>
cache Event Int -> Event -> m Int
-- | updates events count (accumulative) in cache.
updateCountCache :: (EventCountCache cache IO) =>
cache Event Int -> [Event] -> IO (cache Event Int)
countOccurences cache (Event evset) = do
mcs <- filterCacheByKey cache (\(Event k) -> evset `Map.isSubmapOf` k)
return . sum $ do
(_, mc) <- mcs
return mc
updateCountCache cache events = do sequence_ upds
return cache
where upds = do ev <- events
let f = maybe 1 (1+)
return $ updateOrInsert cache f ev
-----------------------------------------------------------------------------
-- | A cache for 'Event's probabilities.
type EventProbCache cache m = Cache cache m Event Prob
-- | A cache for 'Event's conditional probabilities.
type EventCondProbCache cache m = Cache cache m (Event, EvAtom) Prob
-----------------------------------------------------------------------------
-- | A container for 'EventProbCache', 'EventProbCache' and 'EventCondProbCache'.
data EventCaches m = forall cc pc cpc . ( EventCountCache cc m
, EventProbCache pc m
, EventCondProbCache cpc m ) =>
EvCaches { countCache :: cc Event Int
, probCache :: pc Event Prob
, condProbCache :: cpc (Event, EvAtom) Prob
}
-----------------------------------------------------------------------------
|
fehu/min-dat--naive-bayes
|
src/EventProbability/Cache.hs
|
mit
| 2,436 | 0 | 13 | 619 | 438 | 249 | 189 | 36 | 1 |
module FillDB where
import Database.HDBC
import Database.HDBC.Sqlite3
import System.Random
import Control.Monad
fill = do
gen <- getStdGen
males <- replicateM 50 $ mkPeople maleNames gen
females <- replicateM 50 $ mkPeople femaleNames gen
let couples = zipWith (\x y -> [x!!0, y!!0]) females males
--conn <- connectPostgreSQL []
conn <- connectSqlite3 "test.db"
quickQuery' conn "DROP TABLE IF EXISTS people" []
quickQuery' conn "DROP TABLE IF EXISTS couples" []
quickQuery' conn "CREATE TABLE people (name TEXT, age INT)" []
quickQuery' conn "CREATE TABLE couples (her TEXT, him text)" []
insPeople <- prepare conn "INSERT INTO people VALUES ((?), (?))"
executeMany insPeople males
executeMany insPeople females
insCouples <- prepare conn "INSERT INTO couples VALUES ((?), (?))"
executeMany insCouples couples
-- newStdGen
-- let male2 = mkPerson maleNames gen
commit conn
res <- quickQuery' conn "SELECT * FROM people LIMIT 10" []
disconnect conn
return res
fillN n = do
gen <- getStdGen
males <- replicateM n $ mkPeople maleNames gen
females <- replicateM n $ mkPeople femaleNames gen
let couples = zipWith (\x y -> [x!!0, y!!0]) females males
--conn <- connectPostgreSQL []
conn <- connectSqlite3 "test.db"
quickQuery' conn "DROP TABLE IF EXISTS people" []
quickQuery' conn "DROP TABLE IF EXISTS couples" []
quickQuery' conn "CREATE TABLE people (name TEXT, age INT)" []
quickQuery' conn "CREATE TABLE couples (her TEXT, him text)" []
insPeople <- prepare conn "INSERT INTO people VALUES ((?), (?))"
executeMany insPeople males
executeMany insPeople females
insCouples <- prepare conn "INSERT INTO couples VALUES ((?), (?))"
executeMany insCouples couples
-- newStdGen
-- let male2 = mkPerson maleNames gen
commit conn
res <- quickQuery' conn "SELECT * FROM people LIMIT 10" []
disconnect conn
return res
-- let males = take 50000 $
addTestTable = do
conn <- connectSqlite3 "test.db"
quickQuery' conn "CREATE TABLE temp (temp2 TEXT)" []
commit conn
disconnect conn
removeTestTable = do
conn <- connectSqlite3 "test.db"
quickQuery' conn "DROP TABLE IF EXISTS temp" []
commit conn
disconnect conn
fillDefault = do
--conn <- connectPostgreSQL []
conn <- connectSqlite3 "test.db"
quickQuery' conn "DROP TABLE IF EXISTS people" []
quickQuery' conn "DROP TABLE IF EXISTS couples" []
quickQuery' conn "CREATE TABLE people (name TEXT, age INT)" []
quickQuery' conn "CREATE TABLE couples (her TEXT, him text)" []
insPeople <- prepare conn "INSERT INTO people VALUES ((?), (?))"
executeMany insPeople [[toSql "Drew", toSql (31::Int)], [toSql "Bert", toSql (55::Int)], [toSql "Fred", toSql (60::Int)]]
executeMany insPeople [[toSql "Alex", toSql (60::Int)], [toSql "Cora", toSql (33::Int)], [toSql "Edna", toSql (21::Int)]]
insCouples <- prepare conn "INSERT INTO couples VALUES ((?), (?))"
executeMany insCouples [[toSql "Alex", toSql "Bert"], [toSql "Cora", toSql "Drew"], [toSql "Edna", toSql "Fred"]]
maleNames = ["Tom", "John", "Ron", "Harry", "Mark", "Fred", "Richard", "Robin", "Christian", "Ben", "Bert", "Drew"]
femaleNames =["Laura", "Cindy", "Lisa", "Louis", "Jeniffer", "Catherine", "Kate", "Carol", "Jane", "Alex", "Cora", "Edna"]
age gen = fst $ randomR (18,80) gen :: Int
mkPerson names gen = [toSql (names !! ((fst (randomR (0, 11) gen))::Int)), toSql $ (age gen)]
mkPeople names gen = do
gen <- newStdGen
return $ mkPerson names gen
|
juventietis/HLINQ
|
Tests/FillDB.hs
|
mit
| 3,428 | 4 | 14 | 581 | 1,060 | 514 | 546 | 71 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Test.Hspec.LawsSpec (main, spec) where
import Test.Hspec
import Test.Hspec.Runner
import Test.QuickCheck
import Data.Monoid
import Test.Hspec.Laws
main :: IO ()
main = hspec spec
newtype Minus = Minus Int
deriving (Eq, Show, Num, Arbitrary)
instance Monoid Minus where
mempty = 0
mappend = (-)
spec :: Spec
spec = do
describe "shouldSatisfyMonoidLaws" $ do
it "succeeds for a valid Monoid instance" $ do
hspecWithResult defaultConfig (shouldSatisfyMonoidLaws (undefined :: [Int])) `shouldReturn` Summary 3 0
it "fails for a broken Monoid instance" $ do
hspecWithResult defaultConfig (shouldSatisfyMonoidLaws (undefined :: Minus)) `shouldReturn` Summary 3 2
|
hspec/hspec-laws
|
test/Test/Hspec/LawsSpec.hs
|
mit
| 794 | 0 | 18 | 181 | 213 | 116 | 97 | 21 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
module Lib.Set
( filterA, partitionA
) where
import Data.Set (Set)
import qualified Data.Set as S
import qualified Lib.List as L
import Prelude.Compat
both :: (a -> b) -> (a, a) -> (b, b)
both f (x, y) = (f x, f y)
filterA :: (Applicative f, Ord k) => (k -> f Bool) -> Set k -> f (Set k)
filterA p = fmap S.fromAscList . L.filterA p . S.toAscList
partitionA :: (Applicative f, Ord k) => (k -> f Bool) -> Set k -> f (Set k, Set k)
partitionA p = fmap (both S.fromAscList) . L.partitionA p . S.toAscList
|
sinelaw/buildsome
|
src/Lib/Set.hs
|
gpl-2.0
| 566 | 0 | 10 | 134 | 274 | 147 | 127 | 13 | 1 |
--
-- Copyright (c) 2012 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
{-# LANGUAGE ScopedTypeVariables,DeriveDataTypeable,TupleSections #-}
module Channel where
import Control.Concurrent
import Control.Monad
import Control.Applicative
import qualified Control.Exception as E
import Data.ByteString (ByteString)
import Data.Typeable
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Network.Socket as NS
import qualified Network.Socket.ByteString as NSB
import qualified Network.WebSocket as W
import Text.Printf
import System.Posix
import qualified Tools.Argo as AR
import Tools.Log
import Tools.Serial
import Tools.IfM
import Settings
import Types
import Domain
import Foreign (castPtr)
import Data.ByteString.Internal ( createAndTrim )
import Data.ByteString.Unsafe ( unsafeUseAsCStringLen )
data Channel
= StdSocket !NS.Socket Closed
| ArgoSocket !Fd Closed
| FdChann !Fd Closed
| WebSocketCh !W.WebSocket Channel Closed
type Closed = MVar Bool
instance Show Channel where
show (StdSocket s _) = show s
show (ArgoSocket f _) = printf "<argo: %s>" (show f)
show (FdChann fd _) = printf "<fd: %s>" (show fd)
show (WebSocketCh _ ch _) = printf "<websocket: %s>" (show ch)
makeIncomingTransport :: IncomingChannel -> IO Channel
makeIncomingTransport (FromArgo port) =
do sock <- AR.socket NS.Stream
AR.bind sock (AR.Addr port invalidDomID) invalidDomID
AR.listen sock 5
closed <- newMVar False
return $ ArgoSocket sock closed
makeIncomingTransport (FromTCP port) =
do sock <- NS.socket NS.AF_INET NS.Stream NS.defaultProtocol
NS.bindSocket sock (NS.SockAddrInet (fromIntegral port) NS.iNADDR_ANY)
NS.listen sock 5
closed <- newMVar False
return $ StdSocket sock closed
makeIncomingTransport (FromUnixSocket path) =
do sock <- NS.socket NS.AF_UNIX NS.Stream NS.defaultProtocol
NS.bindSocket sock (NS.SockAddrUnix path)
NS.listen sock 5
closed <- newMVar False
return $ StdSocket sock closed
makeIncomingTransport (FromSerial path) =
do f <- openSerial path
closed <- newMVar False
return $ FdChann f closed
wrapInWebSocket :: Channel -> IO Channel
wrapInWebSocket ch
= WebSocketCh <$> W.create (recv ch) (send ch)
<*> pure ch
<*> newMVar False
makeOutgoingTransport :: OutgoingChannel -> IO (Channel,DomID)
makeOutgoingTransport (ToArgo port dom) =
do sock <- AR.socket NS.Stream
domid <- case dom of ByID id -> return id
ByUuid u -> resolv u =<< domidOfUuid u
AR.connect sock (AR.Addr port (fromIntegral domid))
closed <- newMVar False
return (ArgoSocket sock closed, domid)
where
resolv _ (Just domid) = return domid
resolv uuid Nothing = E.throw (NoUuid uuid)
makeOutgoingTransport (ToUnixSocket path) =
do sock <- NS.socket NS.AF_UNIX NS.Stream NS.defaultProtocol
NS.connect sock (NS.SockAddrUnix path)
closed <- newMVar False
return (StdSocket sock closed, currentDomain)
makeOutgoingTransport (ToSerial path) =
(,currentDomain) <$> (FdChann <$> openSerial path <*> newMVar False)
accept :: Channel -> IO (Channel, DomID)
accept (ArgoSocket s _) =
do (s',addr) <- AR.accept s
info $ printf "incoming connection from domain %d, port 0x%0x, fd is %s" (AR.addrDomID addr) (AR.addrPort addr) (show s')
closed <- newMVar False
return (ArgoSocket s' closed, fromIntegral $ AR.addrDomID addr)
accept (StdSocket s _) =
do (s',addr) <- NS.accept s
info $ printf "incoming connection from socket %s" (show s')
closed <- newMVar False
return (StdSocket s' closed, currentDomain)
accept ch@(FdChann fd _) = return (ch,-1)
accept (WebSocketCh _ ch _) = error "websockets accept: unsupported"
handshake :: Channel -> IO ()
handshake (WebSocketCh s _ _) = W.handshake s
handshake _ = return ()
recv :: Channel -> Int -> IO ByteString
recv x@(ArgoSocket fd _) sz = AR.recv fd sz 0
recv x@(StdSocket s _) sz = NSB.recv s sz
recv x@(FdChann fd _) sz =
do threadWaitRead fd
createAndTrim sz $ \ptr -> fromIntegral <$> fdReadBuf fd ptr (fromIntegral sz)
recv (WebSocketCh s _ _) sz =
frame <$> W.recvFrame s sz
where
frame Nothing = B.empty
frame (Just (t,d)) = B.concat ( BL.toChunks d )
send :: Channel -> ByteString -> IO Int
send (ArgoSocket fd _) buf = AR.send fd buf 0
send (StdSocket s _) buf = NSB.send s buf
send x@(FdChann fd _) buf = liftM fromIntegral .
unsafeUseAsCStringLen buf $ \(ptr,sz) ->
do threadWaitWrite fd
fromIntegral <$> fdWriteBuf fd (castPtr ptr) (fromIntegral sz)
send (WebSocketCh s _ _) buf = W.sendFrame s (W.Text, BL.fromChunks [buf]) >> return (B.length buf)
send_all :: Channel -> ByteString -> IO ()
send_all s buf =
send s buf >>= \sent ->
when (sent < B.length buf) $
do warn $ printf "short sent over %s -> %d out of %d" (show s) sent (B.length buf)
send_all s (B.drop sent buf)
shutdownSend :: Channel -> IO ()
shutdownSend (StdSocket s closed) = NS.shutdown s NS.ShutdownSend
shutdownSend _ = return ()
shutdownRecv :: Channel -> IO ()
shutdownRecv (StdSocket s closed) = NS.shutdown s NS.ShutdownReceive
shutdownRecv _ = return ()
close :: Channel -> IO ()
close (ArgoSocket s closed) = perhaps closed (AR.close s)
close (StdSocket s closed) = perhaps closed (NS.sClose s)
close (FdChann fd closed) = perhaps closed (closeFd fd)
close (WebSocketCh s ch closed) = perhaps closed $ {- E.finally (W.shutdown s) -} (close ch)
perhaps closed f = modifyMVar_ closed $ \c -> True <$ unless c f
data BouncerErr = EOF Channel
| ShortSend Channel Int Int
| AuthError Channel String
| NoUuid Uuid
deriving ( Typeable )
instance E.Exception BouncerErr
instance Show BouncerErr where
show (EOF s) = printf "%s: end of stream reached" (show s)
show (ShortSend s recv sent) = printf "%s send short: received %d, sent %d" (show s) recv sent
show (AuthError s m) = printf "%s failed to authenticate %s" (show s) m
show (NoUuid uuid) = printf "no domain with uuid %s" (show uuid)
|
OpenXT/manager
|
rpc-proxy/Channel.hs
|
gpl-2.0
| 6,964 | 0 | 14 | 1,538 | 2,279 | 1,130 | 1,149 | 152 | 3 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Test.Ganeti.Rpc (testRpc) where
import Test.QuickCheck
import Test.QuickCheck.Monadic (monadicIO, run, stop)
import Control.Applicative
import qualified Data.Map as Map
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Test.Ganeti.Objects (genInst)
import qualified Ganeti.Rpc as Rpc
import qualified Ganeti.Objects as Objects
import qualified Ganeti.Types as Types
import qualified Ganeti.JSON as JSON
import Ganeti.Types
instance Arbitrary Rpc.RpcCallInstanceConsoleInfo where
arbitrary = Rpc.RpcCallInstanceConsoleInfo <$> genConsoleInfoCallParams
genStorageUnit :: Gen StorageUnit
genStorageUnit = do
storage_type <- arbitrary
storage_key <- genName
storage_es <- arbitrary
return $ addParamsToStorageUnit storage_es (SURaw storage_type storage_key)
genStorageUnits :: Gen [StorageUnit]
genStorageUnits = do
num_storage_units <- choose (0, 5)
vectorOf num_storage_units genStorageUnit
genStorageUnitMap :: Gen (Map.Map String [StorageUnit])
genStorageUnitMap = do
num_nodes <- choose (0,5)
node_uuids <- vectorOf num_nodes genName
storage_units_list <- vectorOf num_nodes genStorageUnits
return $ Map.fromList (zip node_uuids storage_units_list)
-- FIXME: Generate more interesting hvparams
-- | Generate Hvparams
genHvParams :: Gen Objects.HvParams
genHvParams = return $ JSON.GenericContainer Map.empty
-- | Generate hypervisor specifications to be used for the NodeInfo call
genHvSpecs :: Gen [(Types.Hypervisor, Objects.HvParams)]
genHvSpecs = do
numhv <- choose (0, 5)
hvs <- vectorOf numhv arbitrary
hvparams <- vectorOf numhv genHvParams
let specs = zip hvs hvparams
return specs
instance Arbitrary Rpc.RpcCallAllInstancesInfo where
arbitrary = Rpc.RpcCallAllInstancesInfo <$> genHvSpecs
instance Arbitrary Rpc.RpcCallInstanceList where
arbitrary = Rpc.RpcCallInstanceList <$> arbitrary
instance Arbitrary Rpc.RpcCallNodeInfo where
arbitrary = Rpc.RpcCallNodeInfo <$> genStorageUnitMap <*> genHvSpecs
-- | Generates per-instance console info params for the 'InstanceConsoleInfo'
-- call.
genConsoleInfoCallParams :: Gen [(String, Rpc.InstanceConsoleInfoParams)]
genConsoleInfoCallParams = do
numInstances <- choose (0, 3)
names <- vectorOf numInstances arbitrary
params <- vectorOf numInstances genInstanceConsoleInfoParams
return $ zip names params
-- | Generates parameters for the console info call, consisting of an instance
-- object, node object, 'HvParams', and 'FilledBeParams'.
genInstanceConsoleInfoParams :: Gen Rpc.InstanceConsoleInfoParams
genInstanceConsoleInfoParams = Rpc.InstanceConsoleInfoParams <$>
genInst <*> arbitrary <*> arbitrary <*> genHvParams <*> arbitrary
-- | Monadic check that, for an offline node and a call that does not support
-- offline nodes, we get a OfflineNodeError response.
runOfflineTest :: (Rpc.Rpc a b, Eq b, Show b) => a -> Property
runOfflineTest call =
forAll (arbitrary `suchThat` Objects.nodeOffline) $ \node -> monadicIO $ do
res <- run $ Rpc.executeRpcCall [node] call
stop $ res ==? [(node, Left Rpc.OfflineNodeError)]
prop_noffl_request_allinstinfo :: Rpc.RpcCallAllInstancesInfo -> Property
prop_noffl_request_allinstinfo = runOfflineTest
prop_noffl_request_instconsinfo :: Rpc.RpcCallInstanceConsoleInfo -> Property
prop_noffl_request_instconsinfo = runOfflineTest
prop_noffl_request_instlist :: Rpc.RpcCallInstanceList -> Property
prop_noffl_request_instlist = runOfflineTest
prop_noffl_request_nodeinfo :: Rpc.RpcCallNodeInfo -> Property
prop_noffl_request_nodeinfo = runOfflineTest
testSuite "Rpc"
[ 'prop_noffl_request_allinstinfo
, 'prop_noffl_request_instconsinfo
, 'prop_noffl_request_instlist
, 'prop_noffl_request_nodeinfo
]
|
badp/ganeti
|
test/hs/Test/Ganeti/Rpc.hs
|
gpl-2.0
| 4,574 | 0 | 14 | 639 | 816 | 439 | 377 | 75 | 1 |
module Main where
import Options.Applicative
import System.IO (IOMode (..), withFile)
import HEP.Analysis.Histogram1D (showHist1D, unitNormalize)
import HEP.Analysis.HepMC.Photon (photonSpectrum)
main :: IO ()
main = execParser opts >>= getSpectrum
where opts = info (helper <*> cmdoptions)
(fullDesc
<> progDesc "Calculate photon energy spectrum from the HepMC data"
<> header "photonspectrum - calculate photon energy spectrum")
getSpectrum :: Args -> IO ()
getSpectrum (Args infile n l u) =
withFile infile ReadMode (photonSpectrum
(read n :: Int)
(read l :: Double)
(read u :: Double))
>>= (putStr . showHist1D . unitNormalize)
data Args = Args { input :: String
, nbin :: String
, low :: String
, upper :: String }
cmdoptions :: Parser Args
cmdoptions = Args <$>
argument str (metavar "INPUT"
<> help "Input HepMC file (ex: data.hepmc)")
<*> strOption (long "nbin"
<> short 'n'
<> metavar "nbin"
<> help "Number of bins")
<*> strOption (long "low"
<> short 'l'
<> metavar "low"
<> help "Lower bound")
<*> strOption (long "upper"
<> short 'u'
<> metavar "upper"
<> help "Upper bound")
|
cbpark/PhotonSpectrum
|
src/photonspectrum.hs
|
gpl-3.0
| 1,675 | 0 | 13 | 757 | 373 | 194 | 179 | 38 | 1 |
{-# LANGUAGE StandaloneDeriving #-}
{-|
An 'Amount' is some quantity of money, shares, or anything else.
A simple amount is a 'Commodity', quantity pair:
@
$1
£-50
EUR 3.44
GOOG 500
1.5h
90 apples
0
@
An amount may also have a per-unit price, or conversion rate, in terms
of some other commodity. If present, this is displayed after \@:
@
EUR 3 \@ $1.35
@
A 'MixedAmount' is zero or more simple amounts. Mixed amounts are
usually normalised so that there is no more than one amount in each
commodity, and no zero amounts (or, there is just a single zero amount
and no others.):
@
$50 + EUR 3
16h + $13.55 + AAPL 500 + 6 oranges
0
@
We can do limited arithmetic with simple or mixed amounts: either
price-preserving arithmetic with similarly-priced amounts, or
price-discarding arithmetic which ignores and discards prices.
-}
-- XXX due for review/rewrite
module Hledger.Data.Amount (
amounts,
canonicaliseAmount,
canonicaliseMixedAmount,
convertMixedAmountToSimilarCommodity,
costOfAmount,
costOfMixedAmount,
divideAmount,
divideMixedAmount,
isNegativeMixedAmount,
isReallyZeroMixedAmountCost,
isZeroMixedAmount,
maxprecision,
maxprecisionwithpoint,
missingamt,
normaliseMixedAmount,
nullamt,
nullmixedamt,
punctuatethousands,
setAmountPrecision,
setMixedAmountPrecision,
showAmountDebug,
showAmountWithoutPrice,
showMixedAmount,
showMixedAmountDebug,
showMixedAmountOrZero,
showMixedAmountOrZeroWithoutPrice,
showMixedAmountWithoutPrice,
showMixedAmountWithPrecision,
sumMixedAmountsPreservingHighestPrecision,
tests_Hledger_Data_Amount
)
where
import Data.Char (isDigit)
import Data.List
import Data.Map (findWithDefault)
import Data.Ord
import Test.HUnit
import Text.Printf
import qualified Data.Map as Map
import Hledger.Data.Types
import Hledger.Data.Commodity
import Hledger.Utils
instance Show Amount where show = showAmount
instance Show MixedAmount where show = showMixedAmount
deriving instance Show HistoricalPrice
instance Num Amount where
abs (Amount c q p) = Amount c (abs q) p
signum (Amount c q p) = Amount c (signum q) p
fromInteger i = Amount (comm "") (fromInteger i) Nothing
(+) = similarAmountsOp (+)
(-) = similarAmountsOp (-)
(*) = similarAmountsOp (*)
instance Num MixedAmount where
fromInteger i = Mixed [Amount (comm "") (fromInteger i) Nothing]
negate (Mixed as) = Mixed $ map negateAmountPreservingPrice as
where negateAmountPreservingPrice a = (-a){price=price a}
(+) (Mixed as) (Mixed bs) = normaliseMixedAmount $ Mixed $ as ++ bs
(*) = error' "programming error, mixed amounts do not support multiplication"
abs = error' "programming error, mixed amounts do not support abs"
signum = error' "programming error, mixed amounts do not support signum"
-- | Apply a binary arithmetic operator to two amounts, after converting
-- the first to the commodity (and display precision) of the second in a
-- simplistic way. This should be used only for two amounts in the same
-- commodity, since the conversion rate is assumed to be 1.
-- NB preserving the second commodity is preferred since sum and other
-- folds start with the no-commodity zero amount.
similarAmountsOp :: (Double -> Double -> Double) -> Amount -> Amount -> Amount
similarAmountsOp op a (Amount bc bq _) =
Amount bc (quantity (convertAmountToSimilarCommodity bc a) `op` bq) Nothing
-- | Convert an amount to the specified commodity, assuming an exchange rate of 1.
convertAmountToSimilarCommodity :: Commodity -> Amount -> Amount
convertAmountToSimilarCommodity c (Amount _ q _) = Amount c q Nothing
-- | Convert a mixed amount to the specified commodity, assuming an exchange rate of 1.
convertMixedAmountToSimilarCommodity :: Commodity -> MixedAmount -> Amount
convertMixedAmountToSimilarCommodity c (Mixed as) = Amount c total Nothing
where
total = sum $ map (quantity . convertAmountToSimilarCommodity c) as
-- | Convert an amount to the commodity of its saved price, if any. Notes:
-- - price amounts must be MixedAmounts with exactly one component Amount (or there will be a runtime error)
-- - price amounts should be positive, though this is not currently enforced
costOfAmount :: Amount -> Amount
costOfAmount a@(Amount _ q price) =
case price of
Nothing -> a
Just (UnitPrice (Mixed [Amount pc pq Nothing])) -> Amount pc (pq*q) Nothing
Just (TotalPrice (Mixed [Amount pc pq Nothing])) -> Amount pc (pq*signum q) Nothing
_ -> error' "costOfAmount: Malformed price encountered, programmer error"
-- | Get the string representation of an amount, based on its commodity's
-- display settings except using the specified precision.
showAmountWithPrecision :: Int -> Amount -> String
showAmountWithPrecision p = showAmount . setAmountPrecision p
setAmountPrecision p a@Amount{commodity=c} = a{commodity=c{precision=p}}
-- XXX refactor
-- | Get the unambiguous string representation of an amount, for debugging.
showAmountDebug :: Amount -> String
showAmountDebug (Amount c q pri) = printf "Amount {commodity = %s, quantity = %s, price = %s}"
(show c) (show q) (maybe "" showPriceDebug pri)
-- | Get the string representation of an amount, without any \@ price.
showAmountWithoutPrice :: Amount -> String
showAmountWithoutPrice a = showAmount a{price=Nothing}
-- | Get the string representation of an amount, without any price or commodity symbol.
showAmountWithoutPriceOrCommodity :: Amount -> String
showAmountWithoutPriceOrCommodity a@Amount{commodity=c} = showAmount a{commodity=c{symbol=""}, price=Nothing}
showPrice :: Price -> String
showPrice (UnitPrice pa) = " @ " ++ showMixedAmount pa
showPrice (TotalPrice pa) = " @@ " ++ showMixedAmount pa
showPriceDebug :: Price -> String
showPriceDebug (UnitPrice pa) = " @ " ++ showMixedAmountDebug pa
showPriceDebug (TotalPrice pa) = " @@ " ++ showMixedAmountDebug pa
-- | Get the string representation of an amount, based on its commodity's
-- display settings. Amounts which look like zero are rendered without sign or commodity.
showAmount :: Amount -> String
showAmount (Amount (Commodity {symbol="AUTO"}) _ _) = "" -- can appear in an error message
showAmount a@(Amount (Commodity {symbol=sym,side=side,spaced=spaced}) _ pri) =
case side of
L -> printf "%s%s%s%s" sym' space quantity' price
R -> printf "%s%s%s%s" quantity' space sym' price
where
quantity = showamountquantity a
displayingzero = null $ filter (`elem` "123456789") $ quantity
(quantity',sym') | displayingzero = ("0","")
| otherwise = (quantity,quoteCommoditySymbolIfNeeded sym)
space = if (not (null sym') && spaced) then " " else ""
price = maybe "" showPrice pri
-- | Get the string representation of the number part of of an amount,
-- using the display settings from its commodity.
showamountquantity :: Amount -> String
showamountquantity (Amount (Commodity {decimalpoint=d,precision=p,separator=s,separatorpositions=spos}) q _) =
punctuatenumber d s spos $ qstr
where
-- isint n = fromIntegral (round n) == n
qstr -- p == maxprecision && isint q = printf "%d" (round q::Integer)
| p == maxprecisionwithpoint = printf "%f" q
| p == maxprecision = chopdotzero $ printf "%f" q
| otherwise = printf ("%."++show p++"f") q
chopdotzero str = reverse $ case reverse str of
'0':'.':s -> s
s -> s
-- | A special precision value meaning show all available digits.
maxprecision = 999998
-- | Similar, forces display of a decimal point.
maxprecisionwithpoint = 999999
-- | Replace a number string's decimal point with the specified character,
-- and add the specified digit group separators.
punctuatenumber :: Char -> Char -> [Int] -> String -> String
punctuatenumber dec sep grps str = sign ++ reverse (addseps sep (extend grps) (reverse int)) ++ frac''
where
(sign,num) = break isDigit str
(int,frac) = break (=='.') num
frac' = dropWhile (=='.') frac
frac'' | null frac' = ""
| otherwise = dec:frac'
extend [] = []
extend gs = init gs ++ repeat (last gs)
addseps _ [] str = str
addseps sep (g:gs) str
| length str <= g = str
| otherwise = let (s,rest) = splitAt g str
in s ++ [sep] ++ addseps sep gs rest
-- | Add thousands-separating commas to a decimal number string
punctuatethousands :: String -> String
punctuatethousands s =
sign ++ addcommas int ++ frac
where
(sign,num) = break isDigit s
(int,frac) = break (=='.') num
addcommas = reverse . concat . intersperse "," . triples . reverse
triples [] = []
triples l = take 3 l : triples (drop 3 l)
-- | Does this amount appear to be zero when displayed with its given precision ?
isZeroAmount :: Amount -> Bool
isZeroAmount = null . filter (`elem` "123456789") . showAmountWithoutPriceOrCommodity
-- | Is this amount "really" zero, regardless of the display precision ?
-- Since we are using floating point, for now just test to some high precision.
isReallyZeroAmount :: Amount -> Bool
isReallyZeroAmount = null . filter (`elem` "123456789") . printf ("%."++show zeroprecision++"f") . quantity
where zeroprecision = 8
-- | Is this amount negative ? The price is ignored.
isNegativeAmount :: Amount -> Bool
isNegativeAmount Amount{quantity=q} = q < 0
-- | Access a mixed amount's components.
amounts :: MixedAmount -> [Amount]
amounts (Mixed as) = as
-- | Does this mixed amount appear to be zero when displayed with its given precision ?
isZeroMixedAmount :: MixedAmount -> Bool
isZeroMixedAmount = all isZeroAmount . amounts . normaliseMixedAmount
-- | Is this mixed amount "really" zero ? See isReallyZeroAmount.
isReallyZeroMixedAmount :: MixedAmount -> Bool
isReallyZeroMixedAmount = all isReallyZeroAmount . amounts . normaliseMixedAmount
-- | Is this mixed amount negative, if it can be normalised to a single commodity ?
isNegativeMixedAmount :: MixedAmount -> Maybe Bool
isNegativeMixedAmount m = case as of [a] -> Just $ isNegativeAmount a
_ -> Nothing
where
as = amounts $ normaliseMixedAmount m
-- | Is this mixed amount "really" zero, after converting to cost
-- commodities where possible ?
isReallyZeroMixedAmountCost :: MixedAmount -> Bool
isReallyZeroMixedAmountCost = isReallyZeroMixedAmount . costOfMixedAmount
-- -- | MixedAmount derives Eq in Types.hs, but that doesn't know that we
-- -- want $0 = EUR0 = 0. Yet we don't want to drag all this code in there.
-- -- When zero equality is important, use this, for now; should be used
-- -- everywhere.
-- mixedAmountEquals :: MixedAmount -> MixedAmount -> Bool
-- mixedAmountEquals a b = amounts a' == amounts b' || (isZeroMixedAmount a' && isZeroMixedAmount b')
-- where a' = normaliseMixedAmount a
-- b' = normaliseMixedAmount b
-- | Get the string representation of a mixed amount, showing each of
-- its component amounts. NB a mixed amount can have an empty amounts
-- list in which case it shows as \"\".
showMixedAmount :: MixedAmount -> String
showMixedAmount m = vConcatRightAligned $ map show $ amounts $ normaliseMixedAmount m
setMixedAmountPrecision :: Int -> MixedAmount -> MixedAmount
setMixedAmountPrecision p (Mixed as) = Mixed $ map (setAmountPrecision p) as
-- | Get the string representation of a mixed amount, showing each of its
-- component amounts with the specified precision, ignoring their
-- commoditys' display precision settings. NB a mixed amount can have an
-- empty amounts list in which case it shows as \"\".
showMixedAmountWithPrecision :: Int -> MixedAmount -> String
showMixedAmountWithPrecision p m =
vConcatRightAligned $ map (showAmountWithPrecision p) $ amounts $ normaliseMixedAmount m
-- | Get an unambiguous string representation of a mixed amount for debugging.
showMixedAmountDebug :: MixedAmount -> String
showMixedAmountDebug m = printf "Mixed [%s]" as
where as = intercalate "\n " $ map showAmountDebug $ amounts $ normaliseMixedAmount m
-- | Get the string representation of a mixed amount, but without
-- any \@ prices.
showMixedAmountWithoutPrice :: MixedAmount -> String
showMixedAmountWithoutPrice m = concat $ intersperse "\n" $ map showfixedwidth as
where
(Mixed as) = normaliseMixedAmountIgnoringPrice m
width = maximum $ map (length . show) as
showfixedwidth = printf (printf "%%%ds" width) . showAmountWithoutPrice
-- | Get the string representation of a mixed amount, and if it
-- appears to be all zero just show a bare 0, ledger-style.
showMixedAmountOrZero :: MixedAmount -> String
showMixedAmountOrZero a | a == missingamt = ""
| isZeroMixedAmount a = "0"
| otherwise = showMixedAmount a
-- | Get the string representation of a mixed amount, or a bare 0,
-- without any \@ prices.
showMixedAmountOrZeroWithoutPrice :: MixedAmount -> String
showMixedAmountOrZeroWithoutPrice a
| isZeroMixedAmount a = "0"
| otherwise = showMixedAmountWithoutPrice a
-- | Simplify a mixed amount by removing redundancy in its component amounts, as follows:
-- 1. sum amounts which have the same commodity (ignoring their price)
-- 2. remove zero amounts
-- 3. if there are no amounts at all, add a single zero amount
normaliseMixedAmount :: MixedAmount -> MixedAmount
normaliseMixedAmount (Mixed as) = Mixed as''
where
as'' = if null nonzeros then [nullamt] else nonzeros
(_,nonzeros) = partition (\a -> isReallyZeroAmount a && Mixed [a] /= missingamt) as'
as' = map sumSamePricedAmountsPreservingPrice $ group $ sort as
sort = sortBy (\a1 a2 -> compare (sym a1) (sym a2))
group = groupBy (\a1 a2 -> sym a1 == sym a2)
sym = symbol . commodity
-- | Set a mixed amount's commodity to the canonicalised commodity from
-- the provided commodity map.
canonicaliseMixedAmount :: Maybe (Map.Map String Commodity) -> MixedAmount -> MixedAmount
canonicaliseMixedAmount canonicalcommoditymap (Mixed as) = Mixed $ map (canonicaliseAmount canonicalcommoditymap) as
-- | Set an amount's commodity to the canonicalised commodity from
-- the provided commodity map.
canonicaliseAmount :: Maybe (Map.Map String Commodity) -> Amount -> Amount
canonicaliseAmount Nothing = id
canonicaliseAmount (Just canonicalcommoditymap) = fixamount
where
-- like journalCanonicaliseAmounts
fixamount a@Amount{commodity=c} = a{commodity=fixcommodity c}
fixcommodity c@Commodity{symbol=s} = findWithDefault c s canonicalcommoditymap
-- various sum variants..
sumAmountsDiscardingPrice [] = nullamt
sumAmountsDiscardingPrice as = (sum as){price=Nothing}
sumSamePricedAmountsPreservingPrice [] = nullamt
sumSamePricedAmountsPreservingPrice as = (sum as){price=price $ head as}
-- | Simplify a mixed amount by combining any component amounts which have
-- the same commodity, ignoring and discarding their unit prices if any.
-- Also removes zero amounts, or adds a single zero amount if there are no
-- amounts at all.
normaliseMixedAmountIgnoringPrice :: MixedAmount -> MixedAmount
normaliseMixedAmountIgnoringPrice (Mixed as) = Mixed as''
where
as'' = map sumAmountsDiscardingPrice $ group $ sort as'
group = groupBy samesymbol where samesymbol a1 a2 = sym a1 == sym a2
sort = sortBy (comparing sym)
sym = symbol . commodity
as' | null nonzeros = [head $ zeros ++ [nullamt]]
| otherwise = nonzeros
where (zeros,nonzeros) = partition isZeroAmount as
sumMixedAmountsPreservingHighestPrecision :: [MixedAmount] -> MixedAmount
sumMixedAmountsPreservingHighestPrecision ms = foldl' (+~) 0 ms
where (+~) (Mixed as) (Mixed bs) = normaliseMixedAmountPreservingHighestPrecision $ Mixed $ as ++ bs
normaliseMixedAmountPreservingHighestPrecision :: MixedAmount -> MixedAmount
normaliseMixedAmountPreservingHighestPrecision (Mixed as) = Mixed as''
where
as'' = map sumSamePricedAmountsPreservingPriceAndHighestPrecision $ group $ sort as'
sort = sortBy cmpsymbolandprice
cmpsymbolandprice a1 a2 = compare (sym a1,price a1) (sym a2,price a2)
group = groupBy samesymbolandprice
samesymbolandprice a1 a2 = (sym a1 == sym a2) && (price a1 == price a2)
sym = symbol . commodity
as' | null nonzeros = [head $ zeros ++ [nullamt]]
| otherwise = nonzeros
(zeros,nonzeros) = partition isReallyZeroAmount as
sumSamePricedAmountsPreservingPriceAndHighestPrecision [] = nullamt
sumSamePricedAmountsPreservingPriceAndHighestPrecision as = (sumAmountsPreservingHighestPrecision as){price=price $ head as}
sumAmountsPreservingHighestPrecision :: [Amount] -> Amount
sumAmountsPreservingHighestPrecision as = foldl' (+~) 0 as
where (+~) = amountopPreservingHighestPrecision (+)
amountopPreservingHighestPrecision :: (Double -> Double -> Double) -> Amount -> Amount -> Amount
amountopPreservingHighestPrecision op a@(Amount ac@Commodity{precision=ap} _ _) (Amount bc@Commodity{precision=bp} bq _) =
Amount c q Nothing
where
q = quantity (convertAmountToSimilarCommodity bc a) `op` bq
c = if ap > bp then ac else bc
--
-- | Convert a mixed amount's component amounts to the commodity of their
-- saved price, if any.
costOfMixedAmount :: MixedAmount -> MixedAmount
costOfMixedAmount (Mixed as) = Mixed $ map costOfAmount as
-- | Divide a mixed amount's quantities by some constant.
divideMixedAmount :: MixedAmount -> Double -> MixedAmount
divideMixedAmount (Mixed as) d = Mixed $ map (flip divideAmount d) as
-- | Divide an amount's quantity by some constant.
divideAmount :: Amount -> Double -> Amount
divideAmount a@Amount{quantity=q} d = a{quantity=q/d}
-- | The empty simple amount.
nullamt :: Amount
nullamt = Amount unknown 0 Nothing
-- | The empty mixed amount.
nullmixedamt :: MixedAmount
nullmixedamt = Mixed []
-- | A temporary value for parsed transactions which had no amount specified.
missingamt :: MixedAmount
missingamt = Mixed [Amount unknown{symbol="AUTO"} 0 Nothing]
tests_Hledger_Data_Amount = TestList [
"showAmount" ~: do
showAmount (dollars 0 + pounds 0) `is` "0"
,"showMixedAmount" ~: do
showMixedAmount (Mixed [Amount dollar 0 Nothing]) `is` "0"
showMixedAmount (Mixed []) `is` "0"
showMixedAmount missingamt `is` ""
,"showMixedAmountOrZero" ~: do
showMixedAmountOrZero (Mixed [Amount dollar 0 Nothing]) `is` "0"
showMixedAmountOrZero (Mixed []) `is` "0"
showMixedAmountOrZero missingamt `is` ""
,"amount arithmetic" ~: do
let a1 = dollars 1.23
let a2 = Amount (comm "$") (-1.23) Nothing
let a3 = Amount (comm "$") (-1.23) Nothing
(a1 + a2) `is` Amount (comm "$") 0 Nothing
(a1 + a3) `is` Amount (comm "$") 0 Nothing
(a2 + a3) `is` Amount (comm "$") (-2.46) Nothing
(a3 + a3) `is` Amount (comm "$") (-2.46) Nothing
-- arithmetic with different commodities currently assumes conversion rate 1:
let a4 = euros (-1.23)
assertBool "" $ isZeroAmount (a1 + a4)
sum [a2,a3] `is` Amount (comm "$") (-2.46) Nothing
sum [a3,a3] `is` Amount (comm "$") (-2.46) Nothing
sum [a1,a2,a3,-a3] `is` Amount (comm "$") 0 Nothing
let dollar0 = dollar{precision=0}
(sum [Amount dollar 1.25 Nothing, Amount dollar0 (-1) Nothing, Amount dollar (-0.25) Nothing])
`is` (Amount dollar 0 Nothing)
,"mixed amount arithmetic" ~: do
let dollar0 = dollar{precision=0}
(sum $ map (Mixed . (\a -> [a]))
[Amount dollar 1.25 Nothing,
Amount dollar0 (-1) Nothing,
Amount dollar (-0.25) Nothing])
`is` Mixed [Amount unknown 0 Nothing]
,"normaliseMixedAmount" ~: do
normaliseMixedAmount (Mixed []) `is` Mixed [nullamt]
assertBool "" $ isZeroMixedAmount $ normaliseMixedAmount (Mixed [Amount {commodity=dollar, quantity=10, price=Nothing}
,Amount {commodity=dollar, quantity=10, price=Just (TotalPrice (Mixed [Amount {commodity=euro, quantity=7, price=Nothing}]))}
,Amount {commodity=dollar, quantity=(-10), price=Nothing}
,Amount {commodity=dollar, quantity=(-10), price=Just (TotalPrice (Mixed [Amount {commodity=euro, quantity=7, price=Nothing}]))}
])
,"punctuatethousands 1" ~: punctuatethousands "" `is` ""
,"punctuatethousands 2" ~: punctuatethousands "1234567.8901" `is` "1,234,567.8901"
,"punctuatethousands 3" ~: punctuatethousands "-100" `is` "-100"
,"costOfAmount" ~: do
costOfAmount (euros 1) `is` euros 1
costOfAmount (euros 2){price=Just $ UnitPrice $ Mixed [dollars 2]} `is` dollars 4
costOfAmount (euros 1){price=Just $ TotalPrice $ Mixed [dollars 2]} `is` dollars 2
costOfAmount (euros (-1)){price=Just $ TotalPrice $ Mixed [dollars 2]} `is` dollars (-2)
]
|
trygvis/hledger
|
hledger-lib/Hledger/Data/Amount.hs
|
gpl-3.0
| 22,044 | 0 | 24 | 5,236 | 5,097 | 2,699 | 2,398 | 291 | 4 |
module OpenSandbox.Data.EntitySpec (main,spec) where
import OpenSandbox.Data.Entity
import Test.Hspec
import Test.QuickCheck
spec :: Spec
spec = return ()
main :: IO ()
main = hspec spec
|
oldmanmike/opensandbox
|
test/OpenSandbox/Data/EntitySpec.hs
|
gpl-3.0
| 191 | 0 | 6 | 29 | 64 | 37 | 27 | 8 | 1 |
module Paper where
import Ranking.Glicko
players :: [Player 1]
players =
[ Player { playerId = 1
, playerRating = 1500
, playerDev = 200
, playerVol = 0.06
, playerInactivity = 0
, playerAge = 0 }
, Player { playerId = 2
, playerRating = 1400
, playerDev = 30
, playerVol = 0.06
, playerInactivity = 0
, playerAge = 0 }
, Player { playerId = 3
, playerRating = 1550
, playerDev = 100
, playerVol = 0.06
, playerInactivity = 0
, playerAge = 0 }
, Player { playerId = 4
, playerRating = 1700
, playerDev = 300
, playerVol = 0.06
, playerInactivity = 0
, playerAge = 0 }]
matches :: [Match]
matches =
[ Match 1 2 1 0
, Match 1 3 0 1
, Match 1 4 0 1]
|
Prillan/haskell-glicko
|
test/Paper.hs
|
gpl-3.0
| 887 | 0 | 7 | 382 | 238 | 149 | 89 | -1 | -1 |
{-# LANGUAGE DuplicateRecordFields #-}
module Data.Walker where
import Data.Hashable (hash)
import Data.List
import Data.SCAD
import Data.Shared
import qualified Data.URDF as URDF
import qualified Data.SRDF as SRDF
import Linear.V3
import Linear.Quaternion
type LegName = String
type CenterOfMass = Position
type PosOffset = Position
jointNames = ["alfa","beta","gama","delta","epsilon","zeta","eta","theta"]
data SCADModel = SCADModel {
module_name :: SCADName,
args :: [Arg],
imports :: [Filename]
}
scadModel :: SCADArg a => SCADName -> [(String, a)] -> [Filename] -> SCADModel
scadModel name args imps = SCADModel{
module_name = name,
args = map (uncurry toArg) args,
imports = imps
}
data Part = Part {
model :: SCADModel,
collision_models :: [SCADModel],
part_weight :: Maybe Mass,
center_of_mass :: Maybe CenterOfMass
};
data JointConfig = JointConfig {
min_angle :: Angle,
default_angle :: Angle,
max_angle :: Angle,
vel_lim :: AngularVelocity,
effor_lim :: Effort,
g_steps :: Integer
}
data Link = Link {
link_name :: String,
offset :: PosOffset,
part :: Part,
axis :: Axis,
joint_config ::JointConfig
};
newtype Tip = Tip {
point :: Position
}
data Leg = Leg {
leg_name :: LegName,
chain :: [Link],
tip :: Tip,
pose :: Pose
};
data Body = Body {
body_part :: Part
};
data Walker = Walker {
walker_name :: String,
legs :: [Leg],
body :: Body
};
getLinkName leg name = "leg/" ++ leg_name leg ++ "/" ++ name
getJointName = getLinkName
poseToURDFOrigin :: Pose -> URDF.Origin
poseToURDFOrigin pose = URDF.Origin (Just $ pos pose) ( Just $ quat_to_rpy $ ori pose )
where
getLegJoints :: Leg -> [URDF.Joint]
getLegJoints leg = [ first_joint ] ++ map linkToURDFJoint link_triples ++ [ last_joint ]
where
link_names = map (getLinkName leg . link_name) $ chain leg
link_triples = take ( length $ chain leg ) $ zip3 ( getLinkName leg "base" : link_names ) ( chain leg) (tail jointNames)
jConfigToURDFLimit (JointConfig min_angle _ max_angle vel_lim effor_lim _) = URDF.Limit effor_lim vel_lim (Just min_angle) (Just max_angle)
first_joint = URDF.Joint {
name = getJointName leg ( head jointNames ),
jtype = URDF.Fixed,
parent = "body",
child = getLinkName leg "base",
origin = Just $ URDF.Origin (Just $ pos $ pose leg) Nothing,
axis = Nothing,
calibration = Nothing,
mimic = Nothing,
safety_controller = Nothing
}
linkToURDFJoint :: (String, Link, String) -> URDF.Joint
linkToURDFJoint (p_link_name, ch_link, name) = URDF.Joint {
name = getJointName leg name,
jtype = URDF.Revolute ( jConfigToURDFLimit $ joint_config ch_link ) Nothing,
parent = p_link_name,
child = getLinkName leg $ link_name ch_link,
origin = Just $ URDF.Origin ( Just $ offset ch_link ) Nothing,
axis = Just $ axis ch_link,
calibration = Nothing,
mimic = Nothing ,
safety_controller = Nothing
}
last_joint = URDF.Joint {
name = getJointName leg ( jointNames !! (2 + length (chain leg)) ),
jtype = URDF.Fixed,
parent = last link_names,
child = getLinkName leg "tip",
origin = Just $ URDF.Origin ( Just $ point $ tip leg ) Nothing,
axis = Nothing,
calibration = Nothing,
mimic = Nothing,
safety_controller = Nothing
}
getStlName leg name = "src/stl/" ++ getLinkName leg name ++ ".stl"
getURDFStlName leg name = "package://schpin_koke/" ++ getStlName leg name
getLinkVisual :: Leg -> Link -> URDF.Visual
getLinkVisual leg link = URDF.Visual {
v_geometry = URDF.Mesh ( getURDFStlName leg $ link_name link ) (Just $ V3 0.001 0.001 0.001),
name = Nothing,
origin = Nothing,
material = Just $ URDF.Material name (Just color ) Nothing
}
where
name = getLinkName leg $ link_name link
color = URDF.Color {
r = fromIntegral ( hash (name ++ "red") `mod` 1024) / 1024.0,
g = fromIntegral ( hash (name ++ "green") `mod` 1024) / 1024.0,
b = fromIntegral ( hash (name ++ "blue") `mod` 1024) / 1024.0,
a = 1
}
getLinkCollision :: Leg -> Link -> [URDF.Collision]
getLinkCollision leg link = map f (collision_models $ part link)
where
f model = URDF.Collision {
geometry = URDF.Mesh ( getURDFStlName leg ( module_name model ) ) (Just $ V3 0.001 0.001 0.001),
name = Nothing,
origin = Nothing
}
getLegLinks :: Leg -> [URDF.Link]
getLegLinks leg = [URDF.Link (getLinkName leg "base") [] Nothing []] ++ map linkToURDFLink ( chain leg) ++ [URDF.Link (getLinkName leg "tip") [] Nothing []]
where
linkToURDFLink leg_link = URDF.Link {
name = getLinkName leg $ link_name leg_link,
visuals = [ getLinkVisual leg leg_link ],
inertial = Nothing,
collision = getLinkCollision leg leg_link
}
getBodyLink :: Body -> URDF.Link
getBodyLink body = URDF.Link {
name = "body",
visuals = [ body_vis ],
inertial = Nothing,
collision = body_cols
}
where
body_cols = map f (collision_models $ body_part body )
f model = URDF.Collision {
geometry = URDF.Mesh ("package://schpin_koke/src/stl/" ++ module_name model ++ ".stl") (Just $ V3 0.001 0.001 0.001),
name = Nothing,
origin = Nothing
}
body_vis = URDF.Visual {
v_geometry = URDF.Mesh "package://schpin_koke/src/stl/body.stl" (Just $ V3 0.001 0.001 0.001),
name = Nothing,
origin = Nothing,
material = Nothing
}
getURDF :: Walker -> URDF.URDF
getURDF walker = URDF.URDF (walker_name walker) joints links
where
joints = concatMap getLegJoints ( legs walker )
links = getBodyLink ( body walker) : (concatMap getLegLinks $ legs walker)
getLegSRDFChain :: Leg -> SRDF.Chain
getLegSRDFChain leg = SRDF.Chain (link_name $ head urdf_chain) (link_name $ last urdf_chain )
where
urdf_chain = getLegLinks leg
link_name :: URDF.Link -> UrdfName
link_name (URDF.Link name _ _ _) = name
getLegSRDFJoints :: Leg -> [SRDF.Joint]
getLegSRDFJoints leg = zipWith convert_chain (chain leg) ( tail jointNames )
where
convert_chain (Link _ _ _ _ joint_config) joint_name = SRDF.Joint (getJointName leg joint_name) (build_joint_config joint_config)
build_joint_config (JointConfig _ _ _ _ _ g_steps ) = [SRDF.GenericNode "graph_steps" [SRDF.Attribute "value" "16"] []]
getSRDF :: Walker -> SRDF.SRDF
getSRDF walker = SRDF.SRDF (walker_name walker) [leg_group] items [schpin_config]
where
leg_group = SRDF.Group "legs" [] (concatMap getLegSRDFJoints $ legs walker) (map getLegSRDFChain $ legs walker) [] []
items = []
schpin_config = SRDF.GenericNode "schpin_config" [] [
SRDF.GenericNode "environment" [SRDF.Attribute "resolution" "0.05"] [],
SRDF.GenericNode "leg_workarea" [SRDF.Attribute "resolution" "0.01"] [],
SRDF.GenericNode "leg_mapping" [SRDF.Attribute "resolution" "0.01"] []
]
getScadName leg name = "src/scad/gen/" ++ getLinkName leg name ++ ".scad"
getSTLRenderList :: Walker -> [(Filename, Filename)]
getSTLRenderList walker = ("src/scad/gen/body.scad", "src/stl/body.stl") : (concatMap convert ( legs walker ) ++ body_collisions)
where
body_collisions :: [(Filename, Filename)]
body_collisions = map (\x->("src/scad/gen/" ++ module_name x ++ ".scad", "src/stl/" ++ module_name x ++ ".stl" )) $ collision_models $ body_part $ body walker
convert :: Leg -> [(Filename, Filename)]
convert leg = zip ( map (getScadName leg) names ) (map (getStlName leg) names )
where
link_names = map link_name $ chain leg
collision_names = concatMap ( \x -> map module_name $ collision_models $ part x ) $ chain leg
names = link_names ++ collision_names
getSCADModels :: Walker -> [(Filename, SCADModel)]
getSCADModels walkie = ("src/scad/gen/body.scad", model $ body_part $ body walkie) : (concatMap get_leg_models ( legs walkie ) ++ (body_collisions $ body walkie))
where
body_collisions :: Body -> [(Filename, SCADModel)]
body_collisions body = map (\x -> ("src/scad/gen/" ++ (module_name x) ++ ".scad", x)) (collision_models $ body_part body)
get_leg_models leg = concatMap (get_link_model leg) $ chain leg
get_link_model :: Leg -> Link -> [(Filename, SCADModel)]
get_link_model leg (Link link_name _ part _ _) = (getScadName leg link_name, model part) : (map (\x -> (getScadName leg $ module_name x, x)) $ collision_models part)
modelToSCAD :: (Filename, SCADModel) -> (Filename, [SCAD])
modelToSCAD (file, model) = (file, s)
where
s = map to_import_f ( imports model) ++ [ Module (module_name model) (args model) ]
to_import_f :: Filename -> SCAD
to_import_f filename = Use $ concat ( replicate ( length (filter (== '/') file) - 2 ) "../" ) ++ filename
getSCADSkeleton :: String -> Walker -> ( Part -> SCAD ) -> (Filename, [SCAD])
getSCADSkeleton filename (Walker _ legs body) part_to_skeleton = (filename, [Use "../body.scad"] ++ map Use ( concatMap get_imports legs) ++ map to_skeleton legs ++ [part_to_skeleton $ body_part body])
where
get_imports (Leg _ chain _ _) = map (\x -> "../" ++ x) $ concatMap (imports . model . part ) chain
to_skeleton :: Leg -> SCAD
to_skeleton (Leg _ chain tip pose) = Translate (pos pose) [ Rotate (ori pose) ( link_to_skeleton chain ) ]
link_to_skeleton :: [Link] -> [SCAD]
link_to_skeleton [] = []
link_to_skeleton (link:rest) = [ Translate (offset link) [ Rotate (toQuat ( axis link) (default_angle $ joint_config link) ) ( part_to_skeleton ( part link ) : link_to_skeleton rest)]]
partToVisual (Part model _ _ _) = Module (module_name model) (args model)
partToCollision (Part _ collision_models _ _) = Debug $ map ( \x -> Module(module_name x) (args x) ) collision_models
getSCAD :: Walker -> [(Filename, [SCAD])]
getSCAD walkie = map modelToSCAD (getSCADModels walkie) ++ [getSCADSkeleton "src/scad/gen/skeleton.scad" walkie partToVisual] ++ [getSCADSkeleton "src/scad/gen/col_skeleton.scad" walkie partToCollision]
|
Schpin/schpin-chassis
|
schpin_robot_lib/src/Data/Walker.hs
|
gpl-3.0
| 11,996 | 0 | 17 | 4,086 | 3,487 | 1,879 | 1,608 | 192 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TemplateHaskell #-}
-- |
-- Copyright : (c) 2010, 2011 Benedikt Schmidt & Simon Meier
-- License : GPL v3 (see LICENSE)
--
-- Maintainer : Simon Meier <[email protected]>
-- Portability : GHC only
--
-- Theory loading infrastructure.
module Main.TheoryLoader (
-- * Static theory loading settings
theoryLoadFlags
-- ** Loading open theories
, loadOpenThy
-- ** Loading and closing theories
, loadClosedThy
, loadClosedThyWfReport
, loadClosedThyString
-- ** Loading open diff theories
, loadOpenDiffThy
-- ** Loading and closing diff theories
, loadClosedDiffThy
, loadClosedDiffThyWfReport
, loadClosedDiffThyString
-- ** Constructing automatic provers
, constructAutoProver
, constructAutoDiffProver
-- ** Cached Message Deduction Rule Variants
, dhIntruderVariantsFile
, bpIntruderVariantsFile
, addMessageDeductionRuleVariants
) where
-- import Debug.Trace
import Prelude hiding (id, (.))
import Data.Char (toLower)
import Data.Label
import Data.List (isPrefixOf)
import Data.Monoid
import Data.FileEmbed (embedFile)
import Control.Basics
import Control.Category
import Control.DeepSeq (rnf)
import System.Console.CmdArgs.Explicit
import Theory
import Theory.Text.Parser (parseIntruderRules, parseOpenTheory, parseOpenTheoryString, parseOpenDiffTheory, parseOpenDiffTheoryString)
import Theory.Text.Pretty
import Theory.Tools.AbstractInterpretation (EvaluationStyle(..))
import Theory.Tools.IntruderRules (specialIntruderRules, subtermIntruderRules
, multisetIntruderRules)
import Theory.Tools.Wellformedness
import Main.Console
import Main.Environment
------------------------------------------------------------------------------
-- Theory loading: shared between interactive and batch mode
------------------------------------------------------------------------------
-- | Flags for loading a theory.
theoryLoadFlags :: [Flag Arguments]
theoryLoadFlags =
[ flagOpt "" ["prove"] (updateArg "prove") "LEMMAPREFIX"
"Attempt to prove a lemma "
, flagOpt "dfs" ["stop-on-trace"] (updateArg "stopOnTrace") "DFS|BFS|NONE"
"How to search for traces (default DFS)"
, flagOpt "5" ["bound", "b"] (updateArg "bound") "INT"
"Bound the depth of the proofs"
, flagOpt "s" ["heuristic"] (updateArg "heuristic") "(s|S|p|P|c|C)+"
"Sequence of goal rankings to use (default 's')"
, flagOpt "summary" ["partial-evaluation"] (updateArg "partialEvaluation")
"SUMMARY|VERBOSE"
"Partially evaluate multiset rewriting system"
, flagOpt "" ["defines","D"] (updateArg "defines") "STRING"
"Define flags for pseudo-preprocessor."
, flagNone ["diff"] (addEmptyArg "diff")
"Turn on observational equivalence mode using diff terms."
-- , flagOpt "" ["diff"] (updateArg "diff") "OFF|ON"
-- "Turn on observational equivalence (default OFF)."
]
-- | The defined pre-processor flags in the argument.
defines :: Arguments -> [String]
defines = findArg "defines"
-- | Diff flag in the argument
diff :: Arguments -> [String]
diff as = if (argExists "diff" as) then ["diff"] else []
-- | Load an open theory from a file.
loadOpenDiffThy :: Arguments -> FilePath -> IO OpenDiffTheory
loadOpenDiffThy as fp = parseOpenDiffTheory (diff as ++ defines as) fp
-- | Load an open theory from a file.
loadOpenThy :: Arguments -> FilePath -> IO OpenTheory
loadOpenThy as = parseOpenTheory (diff as ++ defines as)
-- | Load a closed theory.
loadClosedDiffThy :: Arguments -> FilePath -> IO ClosedDiffTheory
loadClosedDiffThy as inFile = do
thy0 <- loadOpenDiffThy as inFile
thy1 <- addMessageDeductionRuleVariantsDiff thy0
closeDiffThy as thy1
-- | Load a closed theory.
loadClosedThy :: Arguments -> FilePath -> IO ClosedTheory
loadClosedThy as inFile = loadOpenThy as inFile >>= closeThy as
-- | Load a close theory and report on well-formedness errors.
loadClosedThyWfReport :: Arguments -> FilePath -> IO ClosedTheory
loadClosedThyWfReport as inFile = do
thy <- loadOpenThy as inFile
-- report
case checkWellformedness thy of
[] -> return ()
report -> do
putStrLn ""
putStrLn $ replicate 78 '-'
putStrLn $ "Theory file '" ++ inFile ++ "'"
putStrLn $ replicate 78 '-'
putStrLn ""
putStrLn $ "WARNING: ignoring the following wellformedness errors"
putStrLn ""
putStrLn $ renderDoc $ prettyWfErrorReport report
putStrLn $ replicate 78 '-'
putStrLn ""
-- return closed theory
closeThy as thy
-- | Load a closed diff theory and report on well-formedness errors.
loadClosedDiffThyWfReport :: Arguments -> FilePath -> IO ClosedDiffTheory
loadClosedDiffThyWfReport as inFile = do
thy0 <- loadOpenDiffThy as inFile
thy1 <- addMessageDeductionRuleVariantsDiff thy0
-- report
case checkWellformednessDiff thy1 of
[] -> return ()
report -> do
putStrLn ""
putStrLn $ replicate 78 '-'
putStrLn $ "Theory file '" ++ inFile ++ "'"
putStrLn $ replicate 78 '-'
putStrLn ""
putStrLn $ "WARNING: ignoring the following wellformedness errors"
putStrLn ""
putStrLn $ renderDoc $ prettyWfErrorReport report
putStrLn $ replicate 78 '-'
putStrLn ""
-- return closed theory
closeDiffThy as thy1
loadClosedThyString :: Arguments -> String -> IO (Either String ClosedTheory)
loadClosedThyString as input =
case parseOpenTheoryString (defines as) input of
Left err -> return $ Left $ "parse error: " ++ show err
Right thy -> fmap Right $ closeThy as thy
loadClosedDiffThyString :: Arguments -> String -> IO (Either String ClosedDiffTheory)
loadClosedDiffThyString as input =
case parseOpenDiffTheoryString (defines as) input of
Left err -> return $ Left $ "parse error: " ++ show err
Right thy -> fmap Right $ do
thy1 <- addMessageDeductionRuleVariantsDiff thy
closeDiffThy as thy1
-- | Close a theory according to arguments.
closeThy :: Arguments -> OpenTheory -> IO ClosedTheory
closeThy as thy0 = do
thy1 <- addMessageDeductionRuleVariants thy0
-- FIXME: wf-check is at the wrong position here. Needs to be more
-- fine-grained.
let thy2 = wfCheck thy1
-- close and prove
cthy <- closeTheory (maudePath as) thy2
return $ proveTheory lemmaSelector prover $ partialEvaluation cthy
where
-- apply partial application
----------------------------
partialEvaluation = case map toLower <$> findArg "partialEvaluation" as of
Just "verbose" -> applyPartialEvaluation Tracing
Just _ -> applyPartialEvaluation Summary
_ -> id
-- wellformedness check
-----------------------
wfCheck :: OpenTheory -> OpenTheory
wfCheck thy =
noteWellformedness
(checkWellformedness thy) thy
lemmaSelector :: Lemma p -> Bool
lemmaSelector lem =
any (`isPrefixOf` get lName lem) lemmaNames
where
lemmaNames :: [String]
lemmaNames = findArg "prove" as
-- replace all annotated sorrys with the configured autoprover.
prover :: Prover
prover | argExists "prove" as =
replaceSorryProver $ runAutoProver $ constructAutoProver as
| otherwise = mempty
-- | Close a diff theory according to arguments.
closeDiffThy :: Arguments -> OpenDiffTheory -> IO ClosedDiffTheory
closeDiffThy as thy0 = do
-- FIXME: wf-check is at the wrong position here. Needs to be more
-- fine-grained.
let thy2 = wfCheckDiff thy0
-- close and prove
cthy <- closeDiffTheory (maudePath as) (addDefaultDiffLemma (addProtoRuleLabels thy2))
return $ proveDiffTheory lemmaSelector diffLemmaSelector prover diffprover $ partialEvaluation cthy
where
-- apply partial application
----------------------------
partialEvaluation = case map toLower <$> findArg "partialEvaluation" as of
Just "verbose" -> applyPartialEvaluationDiff Tracing
Just _ -> applyPartialEvaluationDiff Summary
_ -> id
-- wellformedness check
-----------------------
wfCheckDiff :: OpenDiffTheory -> OpenDiffTheory
wfCheckDiff thy =
noteWellformednessDiff
(checkWellformednessDiff thy) thy
lemmaSelector :: Lemma p -> Bool
lemmaSelector lem =
any (`isPrefixOf` get lName lem) lemmaNames
where
lemmaNames :: [String]
lemmaNames = findArg "prove" as
diffLemmaSelector :: DiffLemma p -> Bool
diffLemmaSelector lem =
any (`isPrefixOf` get lDiffName lem) lemmaNames
where
lemmaNames :: [String]
lemmaNames = findArg "prove" as
-- diff prover: replace all annotated sorrys with the configured autoprover.
diffprover :: DiffProver
diffprover | argExists "prove" as =
replaceDiffSorryProver $ runAutoDiffProver $ constructAutoDiffProver as
| otherwise = mempty
-- replace all annotated sorrys with the configured autoprover.
prover :: Prover
prover | argExists "prove" as =
replaceSorryProver $ runAutoProver $ constructAutoProver as
| otherwise = mempty
-- | Construct an 'AutoProver' from the given arguments (--bound,
-- --stop-on-trace).
constructAutoProver :: Arguments -> AutoProver
constructAutoProver as =
-- force error early
(rnf rankings) `seq`
AutoProver (roundRobinHeuristic rankings) proofBound stopOnTrace
where
-- handles to relevant arguments
--------------------------------
proofBound = read <$> findArg "bound" as
rankings = case findArg "heuristic" as of
Just (rawRankings@(_:_)) -> map ranking rawRankings
Just [] -> error "--heuristic: at least one ranking must be given"
_ -> [SmartRanking False]
ranking 's' = SmartRanking False
ranking 'S' = SmartRanking True
ranking 'p' = SapicRanking
ranking 'P' = SapicPKCS11Ranking
ranking 'c' = UsefulGoalNrRanking
ranking 'C' = GoalNrRanking
ranking r = error $ render $ fsep $ map text $ words $
"Unknown goal ranking '" ++ [r] ++ "'. Use one of the following:\
\ 's' for the smart ranking without loop breakers,\
\ 'S' for the smart ranking with loop breakers,\
\ 'p' for the smart ranking optimized for translations coming from SAPIC (http://sapic.gforge.inria.fr),\
\ 'P' for the smart ranking optimized for a specific model of PKCS11, translated using SAPIC (http://sapic.gforge.inria.fr),\
\ 'c' for the creation order and useful goals first,\
\ and 'C' for the creation order."
stopOnTrace = case (map toLower) <$> findArg "stopOnTrace" as of
Nothing -> CutDFS
Just "dfs" -> CutDFS
Just "none" -> CutNothing
Just "bfs" -> CutBFS
Just other -> error $ "unknown stop-on-trace method: " ++ other
-- | Construct an 'AutoProver' from the given arguments (--bound,
-- --stop-on-trace).
constructAutoDiffProver :: Arguments -> AutoProver
constructAutoDiffProver as =
-- FIXME!
-- force error early
(rnf rankings) `seq`
AutoProver (roundRobinHeuristic rankings) proofBound stopOnTrace
where
-- handles to relevant arguments
--------------------------------
proofBound = read <$> findArg "bound" as
rankings = case findArg "heuristic" as of
Just (rawRankings@(_:_)) -> map ranking rawRankings
Just [] -> error "--heuristic: at least one ranking must be given"
_ -> [SmartDiffRanking]
ranking 's' = SmartRanking False
ranking 'S' = SmartRanking True
ranking 'c' = UsefulGoalNrRanking
ranking 'C' = GoalNrRanking
ranking r = error $ render $ fsep $ map text $ words $
"Unknown goal ranking '" ++ [r] ++ "'. Use one of the following:\
\ 's' for the smart ranking without loop breakers,\
\ 'S' for the smart ranking with loop breakers,\
\ 'c' for the creation order and useful goals first,\
\ and 'C' for the creation order."
stopOnTrace = case (map toLower) <$> findArg "stopOnTrace" as of
Nothing -> CutDFS
Just "dfs" -> CutDFS
Just "none" -> CutNothing
Just "bfs" -> CutBFS
Just other -> error $ "unknown stop-on-trace method: " ++ other
------------------------------------------------------------------------------
-- Message deduction variants cached in files
------------------------------------------------------------------------------
-- | The name of the intruder variants file.
dhIntruderVariantsFile :: FilePath
dhIntruderVariantsFile = "data/intruder_variants_dh.spthy"
-- | The name of the intruder variants file.
bpIntruderVariantsFile :: FilePath
bpIntruderVariantsFile = "data/intruder_variants_bp.spthy"
-- | Construct the DH intruder variants for the given maude signature.
mkDhIntruderVariants :: MaudeSig -> [IntrRuleAC]
mkDhIntruderVariants msig =
either (error . show) id -- report errors lazily through 'error'
$ parseIntruderRules msig dhIntruderVariantsFile
$(embedFile "data/intruder_variants_dh.spthy")
-- | Construct the BP intruder variants for the given maude signature.
mkBpIntruderVariants :: MaudeSig -> [IntrRuleAC]
mkBpIntruderVariants msig =
either (error . show) id -- report errors lazily through 'error'
$ parseIntruderRules msig bpIntruderVariantsFile
$(embedFile "data/intruder_variants_bp.spthy")
-- | Add the variants of the message deduction rule. Uses built-in cached
-- files for the variants of the message deduction rules for Diffie-Hellman
-- exponentiation and Bilinear-Pairing.
addMessageDeductionRuleVariants :: OpenTheory -> IO OpenTheory
-- TODO (SM): drop use of IO here.
addMessageDeductionRuleVariants thy0
| enableBP msig = addIntruderVariants [ mkDhIntruderVariants
, mkBpIntruderVariants ]
| enableDH msig = addIntruderVariants [ mkDhIntruderVariants ]
| otherwise = return thy
where
msig = get (sigpMaudeSig . thySignature) thy0
rules = subtermIntruderRules msig ++ specialIntruderRules
++ if enableMSet msig then multisetIntruderRules else []
thy = addIntrRuleACs rules thy0
addIntruderVariants mkRuless = do
return $ addIntrRuleACs (concatMap ($ msig) mkRuless) thy
-- | Add the variants of the message deduction rule. Uses the cached version
-- of the @"intruder_variants_dh.spthy"@ file for the variants of the message
-- deduction rules for Diffie-Hellman exponentiation.
addMessageDeductionRuleVariantsDiff :: OpenDiffTheory -> IO OpenDiffTheory
addMessageDeductionRuleVariantsDiff thy0
| enableBP msig = addIntruderVariantsDiff [ mkDhIntruderVariants
, mkBpIntruderVariants ]
| enableDH msig = addIntruderVariantsDiff [ mkDhIntruderVariants ]
| otherwise = return $ addIntrRuleLabels thy
where
msig = get (sigpMaudeSig . diffThySignature) thy0
rules = subtermIntruderRules msig ++ specialIntruderRules
++ if enableMSet msig then multisetIntruderRules else []
thy = addIntrRuleACsDiffBoth rules thy0
addIntruderVariantsDiff mkRuless = do
return $ addIntrRuleLabels (addIntrRuleACsDiffBoth (concatMap ($ msig) mkRuless) thy)
|
ekr/tamarin-prover
|
src/Main/TheoryLoader.hs
|
gpl-3.0
| 16,106 | 0 | 15 | 4,136 | 2,910 | 1,478 | 1,432 | 251 | 13 |
-- | A simple counter. Wrong implementation.
module Counter where
import Control.Monad.Reader
import Control.Monad.State.Lazy
newtype Counter = MkCounter Int
deriving (Show)
-- | `inc c n` increments the counter by `n` units.
inc :: Counter -> Int -> Counter
inc (MkCounter c) n = MkCounter (c + n)
manipCounter :: Counter -> Counter
manipCounter c = inc (inc (inc (inc (inc c 3) 3) 3) 5) 5
incS :: Int -> State Counter ()
incS n = modify (\c -> inc c n)
incR :: Reader Int (State Counter ())
-- Types are getting hairy!
incR = asks incS
-- Then, can we write what we where longin for?
manipCounterM :: Reader Int (State Counter ())
manipCounterM =
local (const 3) (
incR >>
incR >>
incR >> local (const 5) (
incR >>
incR
)
)
manipCounterM' :: Reader Int (State Counter ())
manipCounterM' = do
local (const 3) $
do incR
incR
incR
local (const 5) $
do incR
incR
runCounter :: Counter
runCounter = manipCounter (MkCounter 0)
-- | What is happening here?
--
-- If you look at the definition of `incR` we see that we are changing State
-- monad, but the state monad never gets executed! Are monad transformers the solution?
manipCounterM2 :: Reader Int (State Counter ())
manipCounterM2 = incR >> incR
runCounterM :: ((), Counter)
runCounterM = runState (runReader manipCounterM2 1) (MkCounter 0)
showCounter :: IO ()
showCounter = putStrLn $ show runCounter
showCounterM :: IO ()
showCounterM = putStrLn $ show $ snd runCounterM
-- | Results are not equal!!!
|
capitanbatata/sandbox
|
random-data-generation/src/CounterWrong.hs
|
gpl-3.0
| 1,570 | 0 | 13 | 373 | 476 | 247 | 229 | 40 | 1 |
{-|
Defines a state machine of 3D Tic-Tac-Toe.
-}
module Game.TicTacToe3D.TicTacToe3D (
Team,
Issue,
Board,
Game (..),
done,
newGame,
playGame
) where
import Control.Monad
import Data.Functor
import Data.Monoid
import Data.Maybe
import Data.List
import Data.Foldable as F
import Data.Tuple.Homogenous
import Game.TicTacToe3D.Vector3 as V
{-|
Restructures a list.
>>> collapse [1, 2, 3, 4, 5]
[(1, 5), (2, 4)]
-}
collapse :: [a] -> [(a, a)]
collapse ns = take (halfLen ns) (collapse' ns)
where halfLen ms = length ms `quot` 2
collapse' ms = zip ms (reverse ms)
{-|
In the specified dimension,
generates all the possible directions,
and pairs up every two of them that face to each other.
-}
directions :: Int -> [([Int], [Int])]
directions i = collapse $ allDirections
where allDirections = replicateM i [-1, 0, 1]
{-|
Simplifies
> directions 3
-}
directions3 :: [Tuple2 I3]
directions3 = f <$> directions 3 where
f t = g <$> Tuple2 t where
g [x, y, z] = (x, y, z)
{-|
Retrieves a line to every direction from the given point.
-}
explode :: I3 -> [Tuple2 [I3]]
explode c = (walk c <$>) <$> directions3 where
walk h i = let j = add h i in j : walk j i where
add (h, i, j) (k, l, m) = (h + k, i + l, j + m)
{-|
Checks if the given point is inside of
the specified area in every three dimension.
-}
withinC :: Int -> Int -> I3 -> Bool
withinC min max c =
F.all f $ Tuple3 c where
f n = min <= n && n < max
{-|
Retrieves all the possible lines that
intersect at the given point. All those
lines are within the area
from 0 to the specified number.
-}
explode' :: Int -> I3 -> [[I3]]
explode' len crd = catMaybes $ do
Tuple2 (fs, bs) <- explode crd
let line = crd : pick fs ++ pick bs where
pick = takeWhile $ withinC 0 len
return $ if length line == len then Just line else Nothing
{-|
Represents a team.
-}
type Team = Bool
{-|
Represents a state of one point in a board;
owned by either team or empty.
-}
type Issue = Maybe Team
{-|
Represents a tic-tac-toe board with its side length.
-}
type Board = (Int, V3 Issue)
{-|
Folds a list of 'Issue'.
> foldI [B, B, B] = B
> foldI [B, B, R] = D
> foldI [B, B, _] = D
-}
foldI :: [Issue] -> Issue
foldI [] = Nothing
foldI (x:xs) = F.foldr add x xs
where add m n = if m == n then m else Nothing
-- not Monoid; mappend mempty x /= x
{-|
Retrieves the first 'Just' element in a given structure,
or 'Nothing' if not found any.
> firstJust [Nothing, Just 1 , Nothing] = Just 1
> firstJust [Nothing, Nothing, Nothing] = Nothing
-}
firstJust :: (Foldable f) => f (Maybe a) -> Maybe a
firstJust ms = join $ F.find isJust ms
{-|
Retrieves the winner and the owned line.
Nothing if the game has not ended yet.
-}
check :: Board -> I3 -> Maybe ([I3], Team)
check (i, v) c = firstJust $ do
l <- explode' i c
let j = foldI $ (v V.!) <$> l
return $ (,) l <$> j
{-|
Represents a result of one team's action.
-}
type Result = Maybe (Either [I3] Board)
{-|
Makes the given team play the specified square.
> Just Left [I3] -- The team has won.
> Just Right Board -- The game goes on.
> Nothing -- The specified square is not playable.
-}
play :: Board -> Team -> I3 -> Result
play (l, v) t c
| v V.! c /= Nothing = Nothing
| otherwise =
let new = (l, v V.// (c, Just t))
in Just $ case check new c of
Just (cs, _) -> Left cs
Nothing -> Right new
{-|
Initializes a board with the given values.
-}
initBoard :: Int -> (I3 -> Issue) -> Board
initBoard i f = (,) i $ V.init i f
{-|
Represents a state of a tic-tac-toe game.
'Done' represents a game that has finished.
-}
data Game = Game Board Team | Done Team [I3]
{-|
An initialized state of a game.
-}
newGame :: Game
newGame = Game newBoard True where
newBoard = (initBoard 3 $ const Nothing)
{-|
Retrieves whether the given game has finished or not.
-}
done :: Game -> Bool
done (Done _ _) = True
done _ = False
{-|
Lets the current team play at the specified square.
-}
playGame :: Int -> Game -> Game
playGame _ g @ (Done _ _) = g
playGame c g @ (Game b t) =
case play b t (i3 c) of
Just (Left cs) -> Done t cs
Just (Right b') -> Game b' (not t)
Nothing -> g
|
ryo0ka/tictactoe3d
|
src/Game/TicTacToe3D/TicTacToe3D.hs
|
gpl-3.0
| 4,347 | 21 | 14 | 1,147 | 1,297 | 692 | 605 | 80 | 3 |
module Paths_DBus (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName
) where
import Data.Version (Version(..))
import System.Environment (getEnv)
version :: Version
version = Version {versionBranch = [0,4], versionTags = []}
bindir, libdir, datadir, libexecdir :: FilePath
bindir = "/home/ham/.cabal/bin"
libdir = "/home/ham/.cabal/lib/DBus-0.4/ghc-7.0.3"
datadir = "/home/ham/.cabal/share/DBus-0.4"
libexecdir = "/home/ham/.cabal/libexec"
getBinDir, getLibDir, getDataDir, getLibexecDir :: IO FilePath
getBinDir = catch (getEnv "DBus_bindir") (\_ -> return bindir)
getLibDir = catch (getEnv "DBus_libdir") (\_ -> return libdir)
getDataDir = catch (getEnv "DBus_datadir") (\_ -> return datadir)
getLibexecDir = catch (getEnv "DBus_libexecdir") (\_ -> return libexecdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
|
hamaxx/unity-2d-for-xmonad
|
xmonad-files/DBus-0.4/dist/build/autogen/Paths_DBus.hs
|
gpl-3.0
| 945 | 0 | 10 | 144 | 277 | 159 | 118 | 22 | 1 |
module Zoepis.ZCamera where
import Zoepis.ZScene
import Zoepis.ZVector
zMoveCamera :: Double -> Double -> ZSceneRoot -> ZSceneRoot
zMoveCamera rightLeft forBack root = root { zSceneCamera = newCamera }
where (c,f,u) = zSceneCamera root
forward = unit $ f - c
right = forward `cross` u
dDir = realToFrac forBack `scale` forward
+ realToFrac rightLeft `scale` right
newCamera = (c + dDir, f + dDir, u)
zRotateCamera :: Double -> Double -> ZSceneRoot -> ZSceneRoot
zRotateCamera theta phi root = root { zSceneCamera = newCamera }
where (c,f,u) = zSceneCamera root
dTheta = realToFrac theta
dPhi = realToFrac phi
dir = unit $ f - c
rotTheta = rotation dTheta u
dirTheta = rotateVector rotTheta dir
rightVec = dir `cross` u
rotPhi = rotation dPhi rightVec
newCamera = (c, c + rotateVector rotPhi dirTheta, u)
zOrbitCamera :: Double -> ZSceneRoot -> ZSceneRoot
zOrbitCamera theta root = root { zSceneCamera = newCamera }
where (c,f,u) = zSceneCamera root
dTheta = realToFrac theta
dir = c - f
rot = rotation dTheta u
newCamera = (f + rotateVector rot dir, f, u)
|
abakst/Zoepis
|
Zoepis/ZCamera.hs
|
gpl-3.0
| 1,273 | 0 | 10 | 395 | 397 | 219 | 178 | 29 | 1 |
module He.Lexer.Tokens
( keywords, ident, litInt, litFloat, litChar, litBool
, beginString, endString, stringContent
, beginInterp, endInterp, beginExtraDelim, endExtraDelim
, beginBlockComment, endBlockComment, blockCommentContent
, lineComment
) where
import qualified Data.Text as T
import Text.Regex.Applicative
import H.Prelude
import He.Lexer.Types
unsafeReadChars :: (Read a) => [Char] -> a
unsafeReadChars = maybe (error "He.Lexer.Tokens.unsafeReadChars") id . read . pack
text :: Text -> Parser Text
text xs = string (T.unpack xs) *> pure xs
keywords :: TokenParser a
keywords =
keepMode
. fmap ((,NoData) . Keyword)
. choice
. fmap text
. sKeywords
ident :: (IdClass a) => TokenParser a
ident = keepMode . choice . fmap oneIdent . sIdentifiers
oneIdent :: (IdClass a) => (a, IdSpec) -> Parser (RawToken a)
oneIdent (cls, IdSpec{..}) = (Identifier cls,) . TextData . T.pack <$> re
where
singleRE =
(:)
<$> oneOf idStartChars
<*> many (oneOf $ idContinueChars <> idStartChars)
re = case idCompound of
Nothing -> singleRE
Just (onlyCompound, sepChar) -> (concat .) . (:) <$> singleRE <*> m suffix
where
suffix = (:) <$> char sepChar <*> singleRE
m = if onlyCompound then many1 else many
sign :: (Num b) => LexerSpec a -> Parser (b -> b)
sign = maybe (pure id) (option id . (*> pure negate) . text) . sNegative
litInt :: TokenParser a
litInt LexerSpec{ sInts = False } = empty
litInt spec@LexerSpec{ sInts = True } = keepMode $ f <$> sign spec <*> many1 digit
where
f signFunc = (LitInt,) . IntData . signFunc . unsafeReadChars
litFloat :: TokenParser a
litFloat LexerSpec{ sFloats = False } = empty
litFloat spec@LexerSpec{ sFloats = True } =
keepMode $ f <$> sign spec <*> (withIntPart <|> withoutIntPart) <*> g'
where
f mainSignFunc (intPart, fracPart) exp =
(LitFloat,)
. FloatData
$ (fromInteger intVal + fracVal) * (10 ^ maybe 0 id exp)
where
intVal = mainSignFunc . unsafeReadChars $ intPart :: Integer
fracVal = (unsafeReadChars fracPart :: Integer) % (10 ^ length fracPart)
g expSignFunc digs = expSignFunc . unsafeReadChars $ digs :: Integer
g' = optionMaybe $ g <$> (oneOf "eE" *> sign spec) <*> many1 digit
withIntPart = (,) <$> many1 digit <*> (char '.' *> many digit)
withoutIntPart = ("0",) <$> (char '.' *> many1 digit)
escapeCodes :: [Parser Char]
escapeCodes =
fmap (\(e, r) -> (char e :: Parser Char) *> pure r)
[ ('b', '\b'), ('t', '\t'), ('n', '\n'), ('f', '\f'), ('r', '\r') ]
charContent :: [Char] -> Parser Char
charContent specials = (char '\\' *> escape) <|> normal
where
escape = foldr (<|>) (unicode <|> octal <|> anyChar) escapeCodes
octal = f <$> oneOf ['0' .. '3'] <*> (count 2 . oneOf $ ['0' .. '7'])
f a [b, c] =
chr $ (readDigit a * 8 ^ (2 :: Integer)) + (readDigit b * 8) + readDigit c
f _ _ = undefined
unicode =
chr
. unsafeReadChars
. ("0x" <>)
<$> ((char 'u' :: Parser Char) *> count 4 hexDigit)
normal = noneOf $ '\\' : specials
readDigit = unsafeReadChars . (: [])
litChar :: TokenParser a
litChar LexerSpec{ sStrings = StringSpec{ sCharDelim = Nothing } } = empty
litChar LexerSpec{ sStrings = StringSpec{ sCharDelim = Just quote } } =
keepMode
. fmap ((LitChar,) . CharData)
. between q q
. charContent
$ [quote]
where
q = char quote
litBool :: TokenParser a
litBool LexerSpec{ sBools = Nothing } = empty
litBool LexerSpec{ sBools = Just (false, true) } =
keepMode
. fmap ((LitBool,) . BoolData)
$ choice
[ text false *> pure False
, text true *> pure True
]
beginString :: TokenParser a
beginString = sStrings >>> sStringDelim >>> \case
Nothing -> empty
Just quote -> char quote *> pure ((BeginString, NoData), [Push LMString])
stringContent :: (IdClass a) => TokenParser a
stringContent spec =
keepMode
$ (StringContent,)
. TextData
. T.pack <$> many1 (charContent specials)
where
ss = sStrings spec
specials = catMaybes [sStringDelim ss, fmap fst (sInterpMany ss)]
endString :: TokenParser a
endString = sStrings >>> sStringDelim >>> \case
Nothing -> empty
Just quote -> char quote *> pure ((EndString, NoData), [Pop LMString])
beginInterp :: TokenParser a
beginInterp = sStrings >>> sInterpMany >>> \case
Nothing -> empty
Just (delim, _) -> char delim *> pure ((BeginInterp, NoData), [Push LMInterp])
endInterp :: TokenParser a
endInterp = sStrings >>> sInterpMany >>> \case
Nothing -> empty
Just (_, delim) -> char delim *> pure ((EndInterp, NoData), [Pop LMInterp])
beginExtraDelim :: TokenParser a
beginExtraDelim = sStrings >>> sInterpMany >>> \case
Nothing -> empty
Just (delim, _) ->
char delim *> pure ((Keyword $ T.singleton delim, NoData), [Push LMInterp])
endExtraDelim :: TokenParser a
endExtraDelim = sStrings >>> sInterpMany >>> \case
Nothing -> empty
Just (_, delim) ->
char delim *> pure ((Keyword $ T.singleton delim, NoData), [Pop LMInterp])
beginBlockComment :: TokenParser a
beginBlockComment = sComments >>> sBlockComment >>> \case
Nothing -> empty
Just (delim, _) -> text delim *> pure ((BeginComment, NoData), [Push LMBlockComment])
endBlockComment :: TokenParser a
endBlockComment = sComments >>> sBlockComment >>> \case
Nothing -> empty
Just (_, delim) -> text delim *> pure ((EndComment, NoData), [Pop LMBlockComment])
blockCommentContent :: TokenParser a
blockCommentContent = sComments >>> sBlockComment >>> \case
Nothing -> empty
Just _ ->
keepMode
. fmap ((CommentContent,) . TextData . T.pack)
$ (:) <$> anyChar <*> few anyChar
lineComment :: TokenParser a
lineComment = sComments >>> sLineComment >>> \case
Nothing -> empty
Just sigil -> keepMode . fmap f $ text sigil *> many (noneOf ['\n'])
where
f = (CommentContent,) . TextData . T.pack
|
ktvoelker/helium
|
src/He/Lexer/Tokens.hs
|
gpl-3.0
| 5,896 | 0 | 17 | 1,274 | 2,299 | 1,222 | 1,077 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AdSenseHost.Accounts.AdUnits.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Update the supplied ad unit in the specified publisher AdSense account.
--
-- /See:/ <https://developers.google.com/adsense/host/ AdSense Host API Reference> for @adsensehost.accounts.adunits.update@.
module Network.Google.Resource.AdSenseHost.Accounts.AdUnits.Update
(
-- * REST Resource
AccountsAdUnitsUpdateResource
-- * Creating a Request
, accountsAdUnitsUpdate
, AccountsAdUnitsUpdate
-- * Request Lenses
, aauuPayload
, aauuAdClientId
, aauuAccountId
) where
import Network.Google.AdSenseHost.Types
import Network.Google.Prelude
-- | A resource alias for @adsensehost.accounts.adunits.update@ method which the
-- 'AccountsAdUnitsUpdate' request conforms to.
type AccountsAdUnitsUpdateResource =
"adsensehost" :>
"v4.1" :>
"accounts" :>
Capture "accountId" Text :>
"adclients" :>
Capture "adClientId" Text :>
"adunits" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] AdUnit :> Put '[JSON] AdUnit
-- | Update the supplied ad unit in the specified publisher AdSense account.
--
-- /See:/ 'accountsAdUnitsUpdate' smart constructor.
data AccountsAdUnitsUpdate =
AccountsAdUnitsUpdate'
{ _aauuPayload :: !AdUnit
, _aauuAdClientId :: !Text
, _aauuAccountId :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccountsAdUnitsUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aauuPayload'
--
-- * 'aauuAdClientId'
--
-- * 'aauuAccountId'
accountsAdUnitsUpdate
:: AdUnit -- ^ 'aauuPayload'
-> Text -- ^ 'aauuAdClientId'
-> Text -- ^ 'aauuAccountId'
-> AccountsAdUnitsUpdate
accountsAdUnitsUpdate pAauuPayload_ pAauuAdClientId_ pAauuAccountId_ =
AccountsAdUnitsUpdate'
{ _aauuPayload = pAauuPayload_
, _aauuAdClientId = pAauuAdClientId_
, _aauuAccountId = pAauuAccountId_
}
-- | Multipart request metadata.
aauuPayload :: Lens' AccountsAdUnitsUpdate AdUnit
aauuPayload
= lens _aauuPayload (\ s a -> s{_aauuPayload = a})
-- | Ad client which contains the ad unit.
aauuAdClientId :: Lens' AccountsAdUnitsUpdate Text
aauuAdClientId
= lens _aauuAdClientId
(\ s a -> s{_aauuAdClientId = a})
-- | Account which contains the ad client.
aauuAccountId :: Lens' AccountsAdUnitsUpdate Text
aauuAccountId
= lens _aauuAccountId
(\ s a -> s{_aauuAccountId = a})
instance GoogleRequest AccountsAdUnitsUpdate where
type Rs AccountsAdUnitsUpdate = AdUnit
type Scopes AccountsAdUnitsUpdate =
'["https://www.googleapis.com/auth/adsensehost"]
requestClient AccountsAdUnitsUpdate'{..}
= go _aauuAccountId _aauuAdClientId (Just AltJSON)
_aauuPayload
adSenseHostService
where go
= buildClient
(Proxy :: Proxy AccountsAdUnitsUpdateResource)
mempty
|
brendanhay/gogol
|
gogol-adsense-host/gen/Network/Google/Resource/AdSenseHost/Accounts/AdUnits/Update.hs
|
mpl-2.0
| 3,818 | 0 | 16 | 861 | 466 | 278 | 188 | 77 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidEnterprise.Users.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deleted an EMM-managed user.
--
-- /See:/ <https://developers.google.com/android/work/play/emm-api Google Play EMM API Reference> for @androidenterprise.users.delete@.
module Network.Google.Resource.AndroidEnterprise.Users.Delete
(
-- * REST Resource
UsersDeleteResource
-- * Creating a Request
, usersDelete
, UsersDelete
-- * Request Lenses
, udEnterpriseId
, udUserId
) where
import Network.Google.AndroidEnterprise.Types
import Network.Google.Prelude
-- | A resource alias for @androidenterprise.users.delete@ method which the
-- 'UsersDelete' request conforms to.
type UsersDeleteResource =
"androidenterprise" :>
"v1" :>
"enterprises" :>
Capture "enterpriseId" Text :>
"users" :>
Capture "userId" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] ()
-- | Deleted an EMM-managed user.
--
-- /See:/ 'usersDelete' smart constructor.
data UsersDelete = UsersDelete'
{ _udEnterpriseId :: !Text
, _udUserId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'UsersDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'udEnterpriseId'
--
-- * 'udUserId'
usersDelete
:: Text -- ^ 'udEnterpriseId'
-> Text -- ^ 'udUserId'
-> UsersDelete
usersDelete pUdEnterpriseId_ pUdUserId_ =
UsersDelete'
{ _udEnterpriseId = pUdEnterpriseId_
, _udUserId = pUdUserId_
}
-- | The ID of the enterprise.
udEnterpriseId :: Lens' UsersDelete Text
udEnterpriseId
= lens _udEnterpriseId
(\ s a -> s{_udEnterpriseId = a})
-- | The ID of the user.
udUserId :: Lens' UsersDelete Text
udUserId = lens _udUserId (\ s a -> s{_udUserId = a})
instance GoogleRequest UsersDelete where
type Rs UsersDelete = ()
type Scopes UsersDelete =
'["https://www.googleapis.com/auth/androidenterprise"]
requestClient UsersDelete'{..}
= go _udEnterpriseId _udUserId (Just AltJSON)
androidEnterpriseService
where go
= buildClient (Proxy :: Proxy UsersDeleteResource)
mempty
|
rueshyna/gogol
|
gogol-android-enterprise/gen/Network/Google/Resource/AndroidEnterprise/Users/Delete.hs
|
mpl-2.0
| 3,033 | 0 | 14 | 712 | 385 | 231 | 154 | 61 | 1 |
module Main (main) where
import Test.Api
import Test.Tasty
main :: IO ()
main = defaultMain tests
|
twittner/swagger
|
test/Main.hs
|
mpl-2.0
| 100 | 0 | 6 | 18 | 37 | 21 | 16 | 5 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Datastore.Types
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.Datastore.Types
(
-- * Service Configuration
datastoreService
-- * OAuth Scopes
, cloudPlatformScope
, datastoreScope
-- * LatLng
, LatLng
, latLng
, llLatitude
, llLongitude
-- * TransactionOptions
, TransactionOptions
, transactionOptions
, toReadWrite
, toReadOnly
-- * PropertyOrderDirection
, PropertyOrderDirection (..)
-- * Status
, Status
, status
, sDetails
, sCode
, sMessage
-- * GoogleLongrunningOperationMetadata
, GoogleLongrunningOperationMetadata
, googleLongrunningOperationMetadata
, glomAddtional
-- * ReadWrite
, ReadWrite
, readWrite
, rwPreviousTransaction
-- * GoogleDatastoreAdminV1beta1ExportEntitiesResponse
, GoogleDatastoreAdminV1beta1ExportEntitiesResponse
, googleDatastoreAdminV1beta1ExportEntitiesResponse
, gdaveerOutputURL
-- * RollbackRequest
, RollbackRequest
, rollbackRequest
, rrTransaction
-- * ReserveIdsRequest
, ReserveIdsRequest
, reserveIdsRequest
, rirKeys
, rirDatabaseId
-- * PartitionId
, PartitionId
, partitionId
, piNamespaceId
, piProjectId
-- * GoogleDatastoreAdminV1ListIndexesResponse
, GoogleDatastoreAdminV1ListIndexesResponse
, googleDatastoreAdminV1ListIndexesResponse
, gdavlirNextPageToken
, gdavlirIndexes
-- * QueryResultBatch
, QueryResultBatch
, queryResultBatch
, qrbSkippedResults
, qrbSkippedCursor
, qrbEntityResultType
, qrbSnapshotVersion
, qrbEntityResults
, qrbMoreResults
, qrbEndCursor
-- * CompositeFilterOp
, CompositeFilterOp (..)
-- * EntityProperties
, EntityProperties
, entityProperties
, epAddtional
-- * GoogleDatastoreAdminV1ImportEntitiesRequestLabels
, GoogleDatastoreAdminV1ImportEntitiesRequestLabels
, googleDatastoreAdminV1ImportEntitiesRequestLabels
, gdavierlAddtional
-- * BeginTransactionRequest
, BeginTransactionRequest
, beginTransactionRequest
, btrTransactionOptions
-- * RunQueryRequest
, RunQueryRequest
, runQueryRequest
, rqrPartitionId
, rqrGqlQuery
, rqrQuery
, rqrReadOptions
-- * AllocateIdsRequest
, AllocateIdsRequest
, allocateIdsRequest
, airKeys
-- * GoogleDatastoreAdminV1ExportEntitiesMetadata
, GoogleDatastoreAdminV1ExportEntitiesMetadata
, googleDatastoreAdminV1ExportEntitiesMetadata
, gdaveemProgressBytes
, gdaveemOutputURLPrefix
, gdaveemProgressEntities
, gdaveemEntityFilter
, gdaveemCommon
-- * QueryResultBatchEntityResultType
, QueryResultBatchEntityResultType (..)
-- * GoogleDatastoreAdminV1beta1CommonMetadata
, GoogleDatastoreAdminV1beta1CommonMetadata
, googleDatastoreAdminV1beta1CommonMetadata
, gdavcmState
, gdavcmStartTime
, gdavcmEndTime
, gdavcmLabels
, gdavcmOperationType
-- * Empty
, Empty
, empty
-- * CompositeFilter
, CompositeFilter
, compositeFilter
, cfOp
, cfFilters
-- * GoogleDatastoreAdminV1beta1CommonMetadataOperationType
, GoogleDatastoreAdminV1beta1CommonMetadataOperationType (..)
-- * QueryResultBatchMoreResults
, QueryResultBatchMoreResults (..)
-- * GoogleDatastoreAdminV1IndexOperationMetadata
, GoogleDatastoreAdminV1IndexOperationMetadata
, googleDatastoreAdminV1IndexOperationMetadata
, gdaviomProgressEntities
, gdaviomCommon
, gdaviomIndexId
-- * GoogleDatastoreAdminV1beta1ImportEntitiesMetadata
, GoogleDatastoreAdminV1beta1ImportEntitiesMetadata
, googleDatastoreAdminV1beta1ImportEntitiesMetadata
, gdaviemProgressBytes
, gdaviemProgressEntities
, gdaviemEntityFilter
, gdaviemInputURL
, gdaviemCommon
-- * GoogleDatastoreAdminV1beta1Progress
, GoogleDatastoreAdminV1beta1Progress
, googleDatastoreAdminV1beta1Progress
, gdavpWorkCompleted
, gdavpWorkEstimated
-- * BeginTransactionResponse
, BeginTransactionResponse
, beginTransactionResponse
, btrTransaction
-- * MutationResult
, MutationResult
, mutationResult
, mrConflictDetected
, mrKey
, mrVersion
-- * AllocateIdsResponse
, AllocateIdsResponse
, allocateIdsResponse
, aKeys
-- * GqlQuery
, GqlQuery
, gqlQuery
, gqPositionalBindings
, gqNamedBindings
, gqQueryString
, gqAllowLiterals
-- * RunQueryResponse
, RunQueryResponse
, runQueryResponse
, rBatch
, rQuery
-- * GoogleDatastoreAdminV1ExportEntitiesRequestLabels
, GoogleDatastoreAdminV1ExportEntitiesRequestLabels
, googleDatastoreAdminV1ExportEntitiesRequestLabels
, gdaveerlAddtional
-- * GoogleDatastoreAdminV1CommonMetadataOperationType
, GoogleDatastoreAdminV1CommonMetadataOperationType (..)
-- * Value
, Value
, value
, vKeyValue
, vGeoPointValue
, vIntegerValue
, vTimestampValue
, vEntityValue
, vExcludeFromIndexes
, vDoubleValue
, vStringValue
, vBooleanValue
, vMeaning
, vArrayValue
, vNullValue
, vBlobValue
-- * ValueNullValue
, ValueNullValue (..)
-- * GoogleDatastoreAdminV1IndexedPropertyDirection
, GoogleDatastoreAdminV1IndexedPropertyDirection (..)
-- * GoogleDatastoreAdminV1CommonMetadataLabels
, GoogleDatastoreAdminV1CommonMetadataLabels
, googleDatastoreAdminV1CommonMetadataLabels
, gdavcmlAddtional
-- * GoogleDatastoreAdminV1IndexAncestor
, GoogleDatastoreAdminV1IndexAncestor (..)
-- * StatusDetailsItem
, StatusDetailsItem
, statusDetailsItem
, sdiAddtional
-- * LookupRequest
, LookupRequest
, lookupRequest
, lrKeys
, lrReadOptions
-- * ReadOptionsReadConsistency
, ReadOptionsReadConsistency (..)
-- * GoogleDatastoreAdminV1CommonMetadata
, GoogleDatastoreAdminV1CommonMetadata
, googleDatastoreAdminV1CommonMetadata
, gState
, gStartTime
, gEndTime
, gLabels
, gOperationType
-- * GoogleDatastoreAdminV1ExportEntitiesRequest
, GoogleDatastoreAdminV1ExportEntitiesRequest
, googleDatastoreAdminV1ExportEntitiesRequest
, gdaveerOutputURLPrefix
, gdaveerEntityFilter
, gdaveerLabels
-- * Mutation
, Mutation
, mutation
, mBaseVersion
, mInsert
, mUpsert
, mDelete
, mUpdate
-- * GqlQueryNamedBindings
, GqlQueryNamedBindings
, gqlQueryNamedBindings
, gqnbAddtional
-- * GoogleDatastoreAdminV1ExportEntitiesResponse
, GoogleDatastoreAdminV1ExportEntitiesResponse
, googleDatastoreAdminV1ExportEntitiesResponse
, gOutputURL
-- * PropertyReference
, PropertyReference
, propertyReference
, prName
-- * Key
, Key
, key
, kPartitionId
, kPath
-- * GoogleDatastoreAdminV1ImportEntitiesRequest
, GoogleDatastoreAdminV1ImportEntitiesRequest
, googleDatastoreAdminV1ImportEntitiesRequest
, gdavierEntityFilter
, gdavierInputURL
, gdavierLabels
-- * GoogleDatastoreAdminV1IndexState
, GoogleDatastoreAdminV1IndexState (..)
-- * PropertyFilter
, PropertyFilter
, propertyFilter
, pfProperty
, pfOp
, pfValue
-- * Query
, Query
, query
, qStartCursor
, qOffSet
, qKind
, qDistinctOn
, qEndCursor
, qLimit
, qProjection
, qFilter
, qOrder
-- * ArrayValue
, ArrayValue
, arrayValue
, avValues
-- * EntityResult
, EntityResult
, entityResult
, erCursor
, erVersion
, erEntity
-- * Xgafv
, Xgafv (..)
-- * CommitResponse
, CommitResponse
, commitResponse
, crIndexUpdates
, crMutationResults
-- * KindExpression
, KindExpression
, kindExpression
, keName
-- * GoogleLongrunningOperationResponse
, GoogleLongrunningOperationResponse
, googleLongrunningOperationResponse
, glorAddtional
-- * ReadOptions
, ReadOptions
, readOptions
, roReadConsistency
, roTransaction
-- * GoogleDatastoreAdminV1beta1CommonMetadataState
, GoogleDatastoreAdminV1beta1CommonMetadataState (..)
-- * GoogleDatastoreAdminV1EntityFilter
, GoogleDatastoreAdminV1EntityFilter
, googleDatastoreAdminV1EntityFilter
, gdavefNamespaceIds
, gdavefKinds
-- * RollbackResponse
, RollbackResponse
, rollbackResponse
-- * Projection
, Projection
, projection
, pProperty
-- * ReserveIdsResponse
, ReserveIdsResponse
, reserveIdsResponse
-- * Filter
, Filter
, filter'
, fCompositeFilter
, fPropertyFilter
-- * GoogleDatastoreAdminV1Index
, GoogleDatastoreAdminV1Index
, googleDatastoreAdminV1Index
, gdaviState
, gdaviKind
, gdaviProjectId
, gdaviIndexId
, gdaviAncestor
, gdaviProperties
-- * GoogleDatastoreAdminV1beta1CommonMetadataLabels
, GoogleDatastoreAdminV1beta1CommonMetadataLabels
, googleDatastoreAdminV1beta1CommonMetadataLabels
, gAddtional
-- * PropertyFilterOp
, PropertyFilterOp (..)
-- * CommitRequest
, CommitRequest
, commitRequest
, crMutations
, crMode
, crTransaction
-- * CommitRequestMode
, CommitRequestMode (..)
-- * GoogleLongrunningListOperationsResponse
, GoogleLongrunningListOperationsResponse
, googleLongrunningListOperationsResponse
, gllorNextPageToken
, gllorOperations
-- * GoogleDatastoreAdminV1ImportEntitiesMetadata
, GoogleDatastoreAdminV1ImportEntitiesMetadata
, googleDatastoreAdminV1ImportEntitiesMetadata
, gProgressBytes
, gProgressEntities
, gEntityFilter
, gInputURL
, gCommon
-- * GoogleDatastoreAdminV1Progress
, GoogleDatastoreAdminV1Progress
, googleDatastoreAdminV1Progress
, gWorkCompleted
, gWorkEstimated
-- * PathElement
, PathElement
, pathElement
, peKind
, peName
, peId
-- * Entity
, Entity
, entity
, eKey
, eProperties
-- * GoogleDatastoreAdminV1beta1EntityFilter
, GoogleDatastoreAdminV1beta1EntityFilter
, googleDatastoreAdminV1beta1EntityFilter
, gNamespaceIds
, gKinds
-- * ReadOnly
, ReadOnly
, readOnly
-- * GoogleDatastoreAdminV1IndexedProperty
, GoogleDatastoreAdminV1IndexedProperty
, googleDatastoreAdminV1IndexedProperty
, gdavipDirection
, gdavipName
-- * LookupResponse
, LookupResponse
, lookupResponse
, lrDeferred
, lrFound
, lrMissing
-- * GoogleLongrunningOperation
, GoogleLongrunningOperation
, googleLongrunningOperation
, gloDone
, gloError
, gloResponse
, gloName
, gloMetadata
-- * PropertyOrder
, PropertyOrder
, propertyOrder
, poProperty
, poDirection
-- * GoogleDatastoreAdminV1beta1ExportEntitiesMetadata
, GoogleDatastoreAdminV1beta1ExportEntitiesMetadata
, googleDatastoreAdminV1beta1ExportEntitiesMetadata
, gooProgressBytes
, gooOutputURLPrefix
, gooProgressEntities
, gooEntityFilter
, gooCommon
-- * GqlQueryParameter
, GqlQueryParameter
, gqlQueryParameter
, gqpCursor
, gqpValue
-- * GoogleDatastoreAdminV1CommonMetadataState
, GoogleDatastoreAdminV1CommonMetadataState (..)
) where
import Network.Google.Datastore.Types.Product
import Network.Google.Datastore.Types.Sum
import Network.Google.Prelude
-- | Default request referring to version 'v1' of the Cloud Datastore API. This contains the host and root path used as a starting point for constructing service requests.
datastoreService :: ServiceConfig
datastoreService
= defaultService (ServiceId "datastore:v1")
"datastore.googleapis.com"
-- | See, edit, configure, and delete your Google Cloud Platform data
cloudPlatformScope :: Proxy '["https://www.googleapis.com/auth/cloud-platform"]
cloudPlatformScope = Proxy
-- | View and manage your Google Cloud Datastore data
datastoreScope :: Proxy '["https://www.googleapis.com/auth/datastore"]
datastoreScope = Proxy
|
brendanhay/gogol
|
gogol-datastore/gen/Network/Google/Datastore/Types.hs
|
mpl-2.0
| 12,721 | 0 | 7 | 2,931 | 1,263 | 877 | 386 | 354 | 1 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -Wall -fno-warn-orphans #-}
----------------------------------------------------------------------
-- |
-- Module : Text.PrettyPrint.Leijen.PrettyPrec
-- Copyright : (c) Conal Elliott 2009
-- License : BSD
--
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Pretty class with precedence
----------------------------------------------------------------------
module Text.PrettyPrint.Leijen.PrettyPrec
( PrettyPrec(..)
-- * 'Show' helpers
, showsPretty, showsPretty'
, showsPrettyPrec, showsPrettyPrec'
) where
#if __GLASGOW_HASKELL__ < 612
import Data.Maybe (maybe)
#endif
import Data.Ratio (Ratio)
import Text.PrettyPrint.Leijen
-- | Pretty printing with precedence. A cross between 'Show' and 'Pretty'.
-- The 'prettyPrec' method defaults to discarding the context precedence
-- and invoking 'pretty'. The reason 'PrettyPrec' derives from Pretty is
-- that so that this default is possible.
--
-- To make a 'Show' instance for a 'PrettyPrec' instance 'Foo', define
--
-- instance Show Foo where showsPrec p e = showsPrec p (prettyPrec p e)
class Pretty a => PrettyPrec a where
prettyPrec :: Int -> a -> Doc
prettyPrec = const pretty -- default
-- Will we need prettyListPrec?
--
-- prettyList :: [a] -> Doc
-- prettyList = list . map pretty
instance PrettyPrec Doc
instance PrettyPrec ()
instance PrettyPrec Bool
instance PrettyPrec Char
instance PrettyPrec Int
instance PrettyPrec Integer
instance PrettyPrec Float
instance PrettyPrec Double
-- Orphan. Missing from wl-pprint
instance Show a => Pretty (Ratio a) where pretty = text . show
instance Pretty a => PrettyPrec [a]
instance (Pretty a,Pretty b) => PrettyPrec (a,b)
instance (Pretty a,Pretty b,Pretty c) => PrettyPrec (a,b,c)
instance PrettyPrec a => PrettyPrec (Maybe a) where
prettyPrec p = maybe empty (prettyPrec p)
instance Show a => PrettyPrec (Ratio a) where
prettyPrec = const (text . show)
-- TODO: Revisit Ratio. Use p
-- Price to pay for assuming HasExpr is a superclass of HasType. Revisit.
instance Pretty (a -> b) where
pretty = error "PrettyPrec: can't really pretty a function. Sorry."
instance PrettyPrec (a -> b)
{--------------------------------------------------------------------
'Show' helpers
--------------------------------------------------------------------}
pageWidth :: Int
pageWidth = 80
-- | Convenient definition for 'showsPrec' in a 'Show' instance. Uses
-- ribbon fraction of 0.9 and width of 80. To set these values, use
-- 'showsPrettyPrec'' instead. See also 'showsPretty'.
showsPrettyPrec :: PrettyPrec a => Int -> a -> ShowS
showsPrettyPrec = showsPrettyPrec' 0.9 pageWidth
-- | Convenient definition for 'showsPrec' in a 'Show' instance.
-- Arguments are ribbon fraction and line width. To get my defaults, use
-- 'showsPrettyPretty' instead.
showsPrettyPrec' :: PrettyPrec a => Float -> Int -> Int -> a -> ShowS
showsPrettyPrec' rfrac w p = showsG (prettyPrec p) rfrac w
-- | Convenient definition for 'showsPrec' in a 'Show' instance. Uses
-- ribbon fraction of 0.9 and width of 80. To set these values, use
-- 'showsPretty'' instead. If you want to take precedence into account,
-- use 'showsPrettyPrec' instead.
showsPretty :: Pretty a => Int -> a -> ShowS
showsPretty = showsPretty' 0.9 pageWidth
-- | Convenient definition for 'showsPrec' in a 'Show' instance.
-- Arguments are ribbon fraction and line width. To get my defaults, use
-- 'showsPretty' instead. Ignores precedence, which 'Pretty' doesn't
-- understand. If you have a 'PrettyPrec' instance, you can use
-- 'showsPrettyPrec' instead.
showsPretty' :: Pretty a => Float -> Int -> Int -> a -> ShowS
showsPretty' rfrac w _ = showsG pretty rfrac w
-- General 'Doc'-friendly helper for 'showsPrec' definitions.
showsG :: (a -> Doc) -> Float -> Int -> a -> ShowS
showsG toDoc rfrac w a = displayS (renderPretty rfrac w (toDoc a))
|
conal/shady-gen
|
src/Text/PrettyPrint/Leijen/PrettyPrec.hs
|
agpl-3.0
| 3,944 | 0 | 9 | 671 | 655 | 358 | 297 | 43 | 1 |
module Main where
import Graphics.SOE.Gtk
type Vector = (Double, Double)
(<+>) :: Vector -> Vector -> Vector
(a, b) <+> (c, d) = (a+c, b+d)
(<->) :: Vector -> Vector -> Vector
(a, b) <-> (c, d) = (a-c, b-d)
(.*) :: Double -> Vector -> Vector
k .* (a, b) = (k * a, k * b)
(*.) :: Vector -> Double -> Vector
(*.) = flip (.*)
(<*>) :: Vector -> Vector -> Double
(a, b) <*> (c, d) = a * c + b * d
norm :: Vector -> Double
norm v = sqrt (v <*> v)
dist :: Vector -> Vector -> Double
dist v1 v2 = norm (v1 <-> v2)
xunit :: Vector
xunit = (1, 0)
yunit :: Vector
yunit = (0,1)
ortho :: Vector -> Vector
ortho v@(a, b) = (m11*a + m12*b, m21*a+m22*b)
where m11 = cos ang
m12 = sin ang
m21 = -sin ang
m22 = cos ang
ang = pi/2
type Line = (Vector, Vector)
gen :: [Line] -> [Line]
gen [] = []
gen (l@(v1,v2):ls)
| dist v1 v2 < 3 = l : gen ls
| otherwise = let dir = v1 <-> v2
ort = ortho dir
p = v2 <+> ((1/3) .* dir)
q = v2 <+> ((2/3) .* dir)
r = v2 <+> (0.5 .* dir) <+> ((1/3) .* ort)
s = v2 <+> (0.5 .* dir) <+> ((-1/3) .* ort)
in --(v2,p) : (q,v1) : gen ((p,r) : (q,r) : (p,s) : (q,s) : ls)
gen ((v2,p) : (q,v1) : (p,r) : (q,r) : (p,s) : (q,s) : ls)
-- kauniimpi kuva (mutta tehtävänannon vastainen) tulee
-- korvaamalla edellinen lauseke seuraavalla:
-- gen ((v2,p) : (q,v1) : (p,r) : (q,r) : (p,s) : (q,s) : ls)
draw :: [Line] -> Graphic
draw [] = emptyGraphic
draw ((p1,p2):ls) = overGraphic (line (f p1) (f p2)) (draw ls)
where f (x,y) = (round (x), round (y))
test = runGraphics $ do w <- openWindow "T 3.7-8" (200, 200)
loop w
closeWindow w
where loop w = do (xmax', ymax') <- getWindowSize w
let xmax = fromIntegral xmax'
ymax = fromIntegral ymax'
ls = gen [((1/8 * xmax, 1/2 * ymax),
(7/8 * xmax, 1/2 * ymax))]
setGraphic w (draw ls)
e <- getWindowEvent w
case e of Resize -> loop w
_ -> return ()
main = test
|
thiagoarrais/gtk2hs
|
demo/soe/Snowflake.hs
|
lgpl-2.1
| 2,333 | 3 | 17 | 917 | 1,057 | 578 | 479 | 57 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE LambdaCase #-}
{-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
module Main
where
import Paths_pgdl (version)
import Data.Version (showVersion)
import qualified Data.Text as T
import Data.Text (Text)
import Data.Maybe
import Control.Monad.IO.Class
import Control.Applicative
import System.FilePath ((</>))
import System.Environment (getArgs)
import System.Directory (removeFile, doesFileExist)
import Text.HTML.DirectoryListing.Type
import qualified Graphics.Vty as V
import qualified Brick.Main as M
import qualified Brick.Types as T
import qualified Brick.Widgets.List as L
import qualified Brick.Widgets.Center as C
import qualified Brick.Widgets.Edit as E
import qualified Brick.AttrMap as A
import Brick.Widgets.Core
import Brick.Types (Widget)
import Brick.Util (on)
import DownloadInterface
import EntryAttrViewer
import Utils
import qualified Configure as Conf
import Cache
import Types
import Networking
import DList
import qualified Utils as U
data MainState = LState DList
| SearchState DList (E.Editor String String)
main :: IO ()
main =
getArgs >>= \case
["-v"] -> putStrLn $ "pgdl " ++ showVersion version
["--version"] -> putStrLn $ "pgdl " ++ showVersion version
_ -> mainUI
mainUI :: IO ()
mainUI = do
(dNodes, nr) <- initializeResource
let
initialState :: MainState
initialState = LState $ newDList dNodes
theApp =
M.App { M.appDraw = drawUI
, M.appChooseCursor = M.neverShowCursor
, M.appHandleEvent = appEvent
, M.appStartEvent = return
, M.appAttrMap = const theMap
}
appEvent :: MainState -> T.BrickEvent String e -> T.EventM String (T.Next MainState)
appEvent ls@(LState dlst) (T.VtyEvent e) = case e of
V.EvKey V.KEsc [] -> M.halt ls
V.EvKey (V.KChar 'q') [] -> M.halt ls
V.EvKey V.KEnter mdf -> case extractSelectedDNode dlst of
Nothing -> M.continue ls
Just dnode -> case dnode of
Directory entry openOp -> do
dns <- liftIO openOp -- grab the subdirectory
M.continue $ LState (pushDList dlst dns)
File entry url False -> do
let fn = decodedName entry
path <- liftIO $ Conf.getLocaldir >>= \case
Nothing -> return fn
Just pre -> return $ T.pack (T.unpack pre </> T.unpack fn)
let dui = downloadInterface DownloadSettings { networkResource = nr
, relativeUrl = url
, localStoragePath = path
, justOpen = False
, continueDownload = False
}
M.suspendAndResume $ do
dui
ex <- doesFileExist (T.unpack path)
return $ LState (fromJust $ replaceSelectedDNode dlst (File entry url ex))
-- ^ this fromJust is ok since we can be sure that
-- there has something been selected. However this need to perform refactor in the future
File entry url True -> do -- already downloaded file
let fn = decodedName entry
path <- liftIO $ Conf.getLocaldir >>= \case
Nothing -> return fn
Just pre -> return $ T.pack (T.unpack pre </> T.unpack fn)
let dui = downloadInterface DownloadSettings { networkResource = nr
, relativeUrl = url
, localStoragePath = path
, justOpen = mdf /= [V.MMeta]
, continueDownload = mdf == [V.MMeta]
}
M.suspendAndResume $ dui >> return ls
V.EvKey V.KLeft [] -> M.continue . LState $ popDList dlst
V.EvKey V.KRight [] -> case extractSelectedDNode dlst of
Nothing -> M.continue ls
Just d -> M.suspendAndResume $ entryAttrViewer d >> return ls
V.EvKey (V.KChar '/') [] -> M.continue $ SearchState (dupDList dlst) (E.editor "searchBar" (Just 1) "")
V.EvKey (V.KChar 'd') [] -> case extractSelectedDNode dlst of
Nothing -> M.continue ls
Just dnode -> case dnode of
Directory _ _ -> M.continue ls
File entry url False -> M.continue ls
File entry url True -> do -- already downloaded file
let fn = decodedName entry
path <- liftIO $ Conf.getLocaldir >>= \case
Nothing -> return $ T.unpack fn
Just pre -> return $ T.unpack pre </> T.unpack fn
liftIO $ removeFile path
ex <- liftIO $ doesFileExist path
M.continue $ LState $ fromJust (replaceSelectedDNode dlst (File entry url ex))
-- ^ this fromJust is ok since we can be sure that
-- there has something been selected. However this need to perform refactor in the future
ev -> M.continue =<< do
dlst' <- adjustCurrentBrickList dlst $ L.handleListEvent ev
return $ LState dlst'
appEvent ss@(SearchState dlst ed) (T.VtyEvent e) = case e of
V.EvKey V.KEsc [] -> M.halt ss
V.EvKey V.KEnter [] -> case E.getEditContents ed of
[""] -> M.continue (LState $ popDList dlst)
_ -> M.continue $ LState dlst
ev -> do
newEd <- E.handleEditorEvent ev ed
let
linesToALine [l] = l
linesToALine _ = error "not one line of words in the search bar, why?"
keyword = T.pack . linesToALine $ E.getEditContents newEd
cond :: DNode -> Bool
cond (File entry _ _) = keyword `isKeyWordOf` decodedName entry
cond (Directory entry _) = keyword `isKeyWordOf` decodedName entry
isKeyWordOf a b = T.toCaseFold a `T.isInfixOf` T.toCaseFold b
M.continue $ SearchState (filterDList dlst cond) newEd
appEvent _ _ = error "unknown event received in event loop."
theMap = A.attrMap V.defAttr [ (L.listAttr, V.white `on` V.black)
, ("directory", V.black `on` V.magenta)
, ("file", V.black `on` V.cyan)
, ("downloaded file", V.black `on` V.yellow)
, ("statusBar", V.black `on` V.green)
, ("searchBar", V.black `on` V.blue)
]
M.defaultMain theApp initialState
return ()
-- | use cropping to draw UI in the future?
drawUI :: MainState -> [Widget String]
drawUI mainState = case mainState of
(LState dlst) -> [ C.hCenter . hLimit U.terminalWidth $
vBox [entryList dlst, statusBar (extractSelectedDNode dlst)]
]
(SearchState dlst e) -> [ C.hCenter . hLimit U.terminalWidth $
vBox [entryList dlst, searchBar e]
]
where
entryList dlist = renderDList dlist $ \b d -> hBox $ listDrawElement b d
listDrawElement :: Bool -> DNode -> [Widget String]
listDrawElement sel dn = [ color (not sel) attrName . vLimit 3 . hLimit 1 $ fill ' '
, color sel attrName text
, color (not sel) attrName . vLimit 3 . hLimit 1 $ fill ' '
]
where
attrName = case dn of
Directory _ _ -> "directory"
File _ _ False -> "file"
File _ _ True -> "downloaded file"
name = case dn of
Directory a _ -> a
File a _ _ -> a
text = txt . placeTextIntoRectangle 3 (U.terminalWidth-2) . stripWidth $ decodedName name
color True attr = withAttr attr
color False _ = id
stripWidth :: Text -> Text
stripWidth t = case U.cutTextByDisplayLength (U.terminalWidth-7) t of
[] -> ""
[singleLine] -> singleLine
(x:_) -> x `T.append` "..."
searchBar ed = forceAttr "searchBar" $ hBox [txt " search: ", E.renderEditor (str . unlines) True ed]
statusBar = withAttr "statusBar" . str . expand . info
info Nothing = " Nothing selected by user"
info (Just sel) = " " ++ show (lastModified etr) ++ " " ++ maybe "Directory" friendlySize (fileSize etr)
where
etr = entry sel
entry (Directory e _) = e
entry (File e _ _) = e
expand s = s ++ replicate 88 ' '
initializeResource :: IO ([DNode], NetworkResource)
initializeResource =
getArgs >>= \case
["--offline"] -> readCache >>= \case
Nothing -> error "no offline data or data corrupted."
Just dlst -> return (dlst, error "no network resource, offline mode.")
online -> do
(rootUrl, up) <- case online of
[] -> Conf.getServpath >>= \case
Nothing -> error "example usage: pgdl https://www.kernel.org/pub/"
Just ru -> Conf.getUsername >>= \case
Nothing -> return (ru, Nothing)
Just user -> Conf.getPassword >>= \case
Nothing -> do
pass <- U.askPassword
return (ru, Just (user, pass))
Just pass -> return (ru, Just (user, pass))
[r] -> return (T.pack r, Nothing)
_ -> error "too many arguments."
putStrLn "loading webpage..."
putStrLn "(you can use 'pgdl --offline' to browse the webpage you load last time)"
nr <- genNetworkResource rootUrl up
dNodes <- fetch nr
return (dNodes, nr)
|
sifmelcara/pgdl
|
src/Main.hs
|
unlicense
| 10,980 | 0 | 32 | 4,651 | 2,837 | 1,445 | 1,392 | 193 | 25 |
module Git.Command.LsFiles (run) where
run :: [String] -> IO ()
run args = return ()
|
wereHamster/yag
|
Git/Command/LsFiles.hs
|
unlicense
| 85 | 0 | 7 | 15 | 42 | 23 | 19 | 3 | 1 |
{-# LANGUAGE TupleSections #-}
module Main (main) where
import Prelude hiding (catch)
import Control.Applicative ((<$>))
import Control.Concurrent (forkIO, newEmptyMVar, putMVar, takeMVar)
--import Control.Monad (unless)
import Data.Function (on)
import Data.List ((\\), maximumBy, transpose)
import Data.Maybe ({-isJust,-} mapMaybe)
import System.Environment (getArgs)
import System.IO (Handle, hGetLine, hFlush, hPutStrLn)
import System.Process (ProcessHandle, runInteractiveCommand, terminateProcess)
import Poker.Cards
import Poker.Hands
data Player = Player
{ pName :: String
, pScore :: Int
, pStdin :: Handle
, pStdout :: Handle
, pProc :: ProcessHandle }
instance Show Player where
show p = show (pName p, pScore p)
initPlayer :: String -> String -> IO Player
initPlayer name exec = do
(inh, outh, _, proch) <- runInteractiveCommand exec
return $ Player name 0 inh outh proch
initGame :: IO [Player]
initGame = do
nes <- pairs <$> getArgs
mapM (uncurry initPlayer) nes
where
pairs (n:e:as) = (n, e) : pairs as
pairs _ = []
dealCards :: Player -> [Card] -> IO ()
dealCards p cs = do
hPutStrLn (pStdin p) $ unwords $ map show cs
hFlush (pStdin p)
readHand :: Player -> Int -> IO (Maybe Hand)
readHand p n = do
cs <- map read . words <$> hGetLine (pStdout p)
case hands cs of
[] -> return Nothing
(h,_):_ -> return $ if length (cardList h) == n then Just h else Nothing
checkPlay :: [Card] -> [Hand] -> Maybe [Hand]
checkPlay cs ply = if nonInc ply && sameSet then Just ply else Nothing
where
sameSet =
let cs' = concatMap cardList ply
in cs \\ cs' == [] && cs' \\ cs == []
nonInc (h1:h2:hs)
| h1 >= h2 = nonInc (h2:hs)
| otherwise = False
nonInc _ = True
play :: Player -> [Card] -> IO (Maybe [Hand])
play p cs = do
dealCards p cs
mTopH <- readHand p 5
mMidH <- readHand p 5
mBotH <- readHand p 3
return $ sequence [mTopH, mMidH, mBotH] >>= checkPlay cs
removeDQs :: [(Player, Maybe [Hand])] -> [(Player, [Hand])]
removeDQs = mapMaybe $ \(p, mh) -> (p,) <$> mh
liftHs :: [(Player, [Hand])] -> [[(Player, Hand)]]
liftHs = map $ \(p, hs) -> map (p,) hs
winners :: [[(Player, Hand)]] -> [(Player, Hand)]
winners = map $ maximumBy (compare `on` snd)
homog :: Eq a => [a] -> Bool
homog = all (uncurry (==)) . pairs
where
pairs (x:y:zs) = (x, y) : pairs (y:zs)
pairs _ = []
update :: Player -> Int -> [Player] -> [Player]
update _ _ [] = []
update p pts (p':ps)
| pName p == pName p' = p' { pScore = pts + pScore p' } : ps
| otherwise = p' : update p pts ps
bonusT :: Hand
bonusT = FourKind (read "(2c, 2d, 2h, 2s)") []
bonusM :: Hand
bonusM = FullHouse (read "(2c, 2d, 2h)") (read "(3c, 3d)")
bonusB :: Hand
bonusB = ThreeKind (read "(2c, 2d, 2h)") []
score :: [Player] -> [(Player, Hand)] -> [Player]
score ps ((pT, hT):(pM, hM):(pB, hB):_) =
update pT ptsT $
update pM ptsM $
update pB ptsB ps
where
ptsT = ptsS + if hT >= bonusT then 2 else 1
ptsM = if hM >= bonusM then 2 else 1
ptsB = if hB >= bonusB then 2 else 1
ptsS = if homog $ map pName [pT, pM, pB] then 1 else 0
score _ _ = error "score called with < 3 rounds"
threadIOs :: [IO a] -> IO [a]
threadIOs acts = mapM go acts >>= mapM takeMVar
where
go act = do
m <- newEmptyMVar
_ <- forkIO $ act >>= putMVar m
return m
playRound :: [Player] -> IO [Player]
playRound ps = do
ds <- deals <$> shuffle deck
mPlays <- threadIOs $ map (uncurry play) $ zip ps ds
let ps' = score ps $ winners $ transpose $ liftHs $ removeDQs $ zip ps mPlays
return ps'
where
deals [] = []
deals xs = let (d, cs) = splitAt 13 xs in d : deals cs
untilM :: Monad m => (a -> Bool) -> (a -> m a) -> a -> m a
untilM c f x
| c x = return x
| otherwise = f x >>= untilM c f
main :: IO ()
main = do
ps <- initGame
ps' <- untilM (any ((>= 11) . pScore)) playRound ps
mapM_ (putStrLn . show) ps'
mapM_ (terminateProcess . pProc) ps'
|
mkscrg/chinese-poker
|
dealer/main.hs
|
bsd-2-clause
| 3,974 | 0 | 15 | 948 | 1,890 | 987 | 903 | 112 | 5 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE EmptyDataDecls #-}
-- | * Author: Jefferson Heard (jefferson.r.heard at gmail.com)
--
-- * Copyright 2008 Renaissance Computing Institute < http://www.renci.org >
--
-- * License: GNU LGPL
--
-- * Compatibility GHC (I could change the data declarations to not be empty and that would make it more generally compatible, I believe)
--
-- * Description:
--
-- Use FreeType 2 Fonts in OpenGL. Requires the FTGL library and FreeType libraries.
-- available at < http://ftgl.wiki.sourceforge.net/ > . The most important functions for
-- everyday use are renderFont and the create*Font family of functions. To render a
-- simple string inside OpenGL, assuming you have OpenGL initialized and a current
-- pen color, all you need is:
--
-- > do font <- createTextureFont "Font.ttf"
-- > setFontFaceSize font 24 72
-- > renderFont font "Hello world!"
--
-- Fonts are rendered so that a single point is an OpenGL unit, and a point is 1:72 of
-- an inch.
module Graphics.Rendering.FTGL
where
import Control.Monad.IO.Class
import System.IO.Unsafe (unsafePerformIO)
import Foreign.C
import Foreign.Ptr
import Foreign.Marshal.Alloc
import Foreign.Marshal.Array
import Data.Bits
import Data.Char (ord)
import qualified Data.ByteString.Char8 as BS8
import Data.StateVar.Trans
import Control.Applicative ((<$>))
foreign import ccall unsafe "ftglCreateBitmapFont" fcreateBitmapFont :: CString -> IO Font
{-# INLINE fcreateBitmapFont #-}
-- | Create a bitmapped version of a TrueType font. Bitmapped versions will not
-- | respond to matrix transformations, but rather must be transformed using the
-- | raster positioning functions in OpenGL
createBitmapFont :: BS8.ByteString -> IO Font
createBitmapFont = flip BS8.useAsCString fcreateBitmapFont
{-# INLINE createBitmapFont #-}
foreign import ccall unsafe "ftglCreateBufferFont" fcreateBufferFont :: CString -> IO Font
{-# INLINE fcreateBufferFont #-}
-- | Create a buffered version of a TrueType font. This stores the entirety of
-- | a string in a texture, "buffering" it before rendering. Very fast if you
-- | will be repeatedly rendering the same strings over and over.
createBufferFont :: BS8.ByteString -> IO Font
createBufferFont = flip BS8.useAsCString fcreateBufferFont
{-# INLINE createBufferFont #-}
foreign import ccall unsafe "ftglCreateOutlineFont" fcreateOutlineFont :: CString -> IO Font
{-# INLINE fcreateOutlineFont #-}
-- | Create an outline version of a TrueType font. This uses actual geometry
-- | and will scale independently without loss of quality. Faster than polygons
-- | but slower than texture or buffer fonts.
createOutlineFont :: BS8.ByteString -> IO Font
createOutlineFont = flip BS8.useAsCString fcreateOutlineFont
{-# INLINE createOutlineFont #-}
foreign import ccall unsafe "ftglCreatePixmapFont" fcreatePixmapFont :: CString -> IO Font
{-# INLINE fcreatePixmapFont #-}
-- | Create a pixmap version of a TrueType font. Higher quality than the bitmap
-- | font without losing any performance. Use this if you don't mind using
-- | set and get RasterPosition.
createPixmapFont :: BS8.ByteString -> IO Font
createPixmapFont = flip BS8.useAsCString fcreatePixmapFont
{-# INLINE createPixmapFont #-}
foreign import ccall unsafe "ftglCreatePolygonFont" fcreatePolygonFont :: CString -> IO Font
{-# INLINE fcreatePolygonFont #-}
-- | Create polygonal display list fonts. These scale independently without
-- | losing quality, unlike texture or buffer fonts, but can be impractical
-- | for large amounts of text because of the high number of polygons needed.
-- | Additionally, they do not, unlike the textured fonts, create artifacts
-- | within the square formed at the edge of each character.
createPolygonFont :: BS8.ByteString -> IO Font
createPolygonFont = flip BS8.useAsCString fcreatePolygonFont
{-# INLINE createPolygonFont #-}
foreign import ccall unsafe "ftglCreateTextureFont" fcreateTextureFont :: CString -> IO Font
{-# INLINE fcreateTextureFont #-}
-- | Create textured display list fonts. These can scale somewhat well,
-- | but lose quality quickly. They are much faster than polygonal fonts,
-- | though, so are suitable for large quantities of text. Especially suited
-- | well to text that changes with most frames, because it doesn't incur the
-- | (normally helpful) overhead of buffering.
createTextureFont :: BS8.ByteString -> IO Font
createTextureFont = flip BS8.useAsCString fcreateTextureFont
{-# INLINE createTextureFont #-}
foreign import ccall unsafe "ftglCreateExtrudeFont" fcreateExtrudeFont :: CString -> IO Font
{-# INLINE fcreateExtrudeFont #-}
-- | Create a 3D extruded font. This is the only way of creating 3D fonts
-- | within FTGL. Could be fun to use a geometry shader to get different
-- | effects by warping the otherwise square nature of the font. Polygonal.
-- | Scales without losing quality. Slower than all other fonts.
createExtrudeFont :: BS8.ByteString -> IO Font
createExtrudeFont = flip BS8.useAsCString fcreateExtrudeFont
{-# INLINE createExtrudeFont #-}
-- | Create a simple layout
foreign import ccall unsafe "ftglCreateSimpleLayout" createSimpleLayout :: IO Layout
{-# INLINE createSimpleLayout #-}
-- | Set the layout's font.
foreign import ccall unsafe "ftglSetLayoutFont" setLayoutFont :: Layout -> Font -> IO ()
{-# INLINE setLayoutFont #-}
-- | Get the embedded font from the Layout
foreign import ccall unsafe "ftglGetLayoutFont" getLayoutFont :: Layout -> IO Font
{-# INLINE getLayoutFont #-}
layoutFont :: MonadIO m => Layout -> StateVar m Font
layoutFont l = makeStateVar (liftIO $ getLayoutFont l) (liftIO . setLayoutFont l)
{-# INLINE layoutFont #-}
-- | Set the line length, I believe in OpenGL units, although I'm not sure.
foreign import ccall unsafe "ftglSetLayoutLineLength" setLayoutLineLength :: Layout -> CFloat -> IO ()
{-# INLINE setLayoutLineLength #-}
foreign import ccall unsafe "ftglGetLayoutLineLength" fgetLayoutLineLength :: Layout -> IO CFloat
{-# INLINE fgetLayoutLineLength #-}
layoutLineLength :: MonadIO m => Layout -> StateVar m CFloat
layoutLineLength l = makeStateVar
(liftIO $ realToFrac <$> fgetLayoutLineLength l)
(liftIO . setLayoutLineLength l)
{-# INLINE layoutLineLength #-}
foreign import ccall unsafe "ftglSetLayoutAlignment" fsetLayoutAlignment :: Layout -> CInt -> IO ()
{-# INLINE fsetLayoutAlignment #-}
foreign import ccall unsafe "ftglGetLayoutAlignement" fgetLayoutAlignment :: Layout -> IO CInt
{-# INLINE fgetLayoutAlignment #-}
layoutAlignment :: MonadIO m => Layout -> StateVar m TextAlignment
layoutAlignment l = makeStateVar
(liftIO $ toEnum . fromIntegral <$> fgetLayoutAlignment l)
(liftIO . fsetLayoutAlignment l . fromIntegral . fromEnum)
{-# INLINE layoutAlignment #-}
foreign import ccall unsafe "ftglSetLayoutLineSpacing" fsetLayoutLineSpacing :: Layout -> CFloat -> IO ()
{-# INLINE fsetLayoutLineSpacing #-}
layoutLineSpacing :: MonadIO m => Layout -> SettableStateVar m Float
layoutLineSpacing l = makeSettableStateVar $ liftIO . fsetLayoutLineSpacing l . realToFrac
{-# INLINE layoutLineSpacing #-}
-- | Destroy a font
foreign import ccall unsafe "ftglDestroyFont" destroyFont :: Font -> IO ()
{-# INLINE destroyFont #-}
foreign import ccall unsafe "ftglAttachFile" fattachFile :: Font -> CString -> IO ()
{-# INLINE fattachFile #-}
-- | Attach a metadata file to a font.
attachFile :: Font -> BS8.ByteString -> IO ()
attachFile font str = BS8.useAsCString str $ fattachFile font
{-# INLINE attachFile #-}
-- | Attach some external data (often kerning) to the font
foreign import ccall unsafe "ftglAttachData" attachData :: Font -> Ptr () -> IO ()
{-# INLINE attachData #-}
-- | Set the font's character map
foreign import ccall unsafe "ftglSetFontCharMap" fsetFontCharMap :: Font -> CInt -> IO ()
{-# INLINE fsetFontCharMap #-}
charMap :: MonadIO m => Font -> SettableStateVar m CharMap
charMap font = makeSettableStateVar $ \charmap -> liftIO $ fsetFontCharMap font (marshalCharMap charmap)
{-# INLINE charMap #-}
foreign import ccall unsafe "ftglGetFontCharMapCount" fgetFontCharMapCount :: Font -> IO CInt
{-# INLINE fgetFontCharMapCount #-}
-- | Get the number of characters loaded into the current charmap for the font.
getFontCharMapCount :: Font -> Int
getFontCharMapCount f = fromIntegral . unsafePerformIO $ fgetFontCharMapCount f
{-# INLINE getFontCharMapCount #-}
foreign import ccall unsafe "ftglGetFontCharMapList" fgetFontCharMapList :: Font -> IO (Ptr CInt)
{-# INLINE fgetFontCharMapList #-}
-- | Get the different character mappings available in this font.
getFontCharMapList :: Font -> Ptr CInt
getFontCharMapList f = unsafePerformIO $ fgetFontCharMapList f
{-# INLINE getFontCharMapList #-}
foreign import ccall unsafe "ftglSetFontFaceSize" fsetFontFaceSize :: Font -> CInt -> CInt -> IO CInt
{-# INLINE fsetFontFaceSize #-}
setFontFaceSize :: Font -> Int -> Int -> IO CInt
setFontFaceSize f s x = fsetFontFaceSize f (fromIntegral s) (fromIntegral x)
{-# INLINE setFontFaceSize #-}
foreign import ccall unsafe "ftglGetFontFaceSize" fgetFontFaceSize :: Font -> IO CInt
{-# INLINE fgetFontFaceSize #-}
-- | Get the current font face size in points.
fontFaceSize :: MonadIO m => Font -> GettableStateVar m Int
fontFaceSize f = makeGettableStateVar $ liftIO $ fromIntegral <$> fgetFontFaceSize f
{-# INLINE fontFaceSize #-}
foreign import ccall unsafe "ftglSetFontDepth" fsetFontDepth :: Font -> CFloat -> IO ()
{-# INLINE fsetFontDepth #-}
fontDepth :: MonadIO m => Font -> SettableStateVar m Float
fontDepth font = makeSettableStateVar $ \depth -> liftIO $ fsetFontDepth font (realToFrac depth)
{-# INLINE fontDepth #-}
foreign import ccall unsafe "ftglSetFontOutset" fsetFontOutset :: Font -> CFloat -> CFloat -> IO ()
{-# INLINE fsetFontOutset #-}
setFontOutset :: Font -> Float -> Float -> IO ()
setFontOutset font d o = fsetFontOutset font (realToFrac d) (realToFrac o)
{-# INLINE setFontOutset #-}
foreign import ccall unsafe "ftglGetFontBBox" fgetFontBBox :: Font -> CString -> Int -> Ptr CFloat -> IO ()
{-# INLINE fgetFontBBox #-}
-- | Get the text extents of a string as a list of (llx,lly,lly,urx,ury,urz)
getFontBBox :: Font -> BS8.ByteString -> IO [Float]
getFontBBox f s = allocaBytes 24 $ \pf -> do
BS8.useAsCString s $ \ps -> fgetFontBBox f ps (-1) pf
map realToFrac <$> peekArray 6 pf
{-# INLINE getFontBBox #-}
foreign import ccall unsafe "ftglGetFontAscender" fgetFontAscender :: Font -> CFloat
{-# INLINE fgetFontAscender #-}
-- | Get the global ascender height for the face.
getFontAscender :: Font -> Float
getFontAscender = realToFrac . fgetFontAscender
{-# INLINE getFontAscender #-}
foreign import ccall unsafe "ftglGetFontDescender" fgetFontDescender :: Font -> CFloat
{-# INLINE fgetFontDescender #-}
-- | Gets the global descender height for the face.
getFontDescender :: Font -> Float
getFontDescender = realToFrac . fgetFontDescender
{-# INLINE getFontDescender #-}
foreign import ccall unsafe "ftglGetFontLineHeight" fgetFontLineHeight :: Font -> CFloat
{-# INLINE fgetFontLineHeight #-}
-- | Gets the global line spacing for the face.
getFontLineHeight :: Font -> Float
getFontLineHeight = realToFrac . fgetFontLineHeight
{-# INLINE getFontLineHeight #-}
foreign import ccall unsafe "ftglGetFontAdvance" fgetFontAdvance :: Font -> CString -> IO CFloat
{-# INLINE fgetFontAdvance #-}
-- | Get the horizontal span of a string of text using the current font. Input as the xcoord
-- | in any translate operation
getFontAdvance :: Font -> BS8.ByteString -> IO Float
getFontAdvance font str = realToFrac <$> (BS8.useAsCString str $ fgetFontAdvance font)
{-# INLINE getFontAdvance #-}
foreign import ccall unsafe "ftglRenderFont" frenderFont :: Font -> CString -> CInt -> IO ()
{-# INLINE frenderFont #-}
-- | Render a string of text in the current font.
renderFont :: Font -> RenderMode -> BS8.ByteString -> IO ()
renderFont font mode str = BS8.useAsCString str $ \p -> frenderFont font p (fromIntegral $ fromEnum mode)
{-# INLINE renderFont #-}
foreign import ccall unsafe "ftglGetFontError" fgetFontError :: Font -> IO CInt
{-# INLINE fgetFontError #-}
-- | Get any errors associated with loading a font. FIXME return should be a type, not an Int.
fontError :: MonadIO m => Font -> GettableStateVar m Int
fontError f = makeGettableStateVar $ liftIO $ fromIntegral <$> fgetFontError f
{-# INLINE fontError #-}
foreign import ccall unsafe "ftglDestroyLayout" destroyLayout :: Layout -> IO ()
{-# INLINE destroyLayout #-}
foreign import ccall unsafe "ftglRenderLayout" frenderLayout :: Layout -> CString -> IO ()
{-# INLINE frenderLayout #-}
-- | Render a string of text within a layout.
renderLayout :: Layout -> BS8.ByteString -> IO ()
renderLayout layout str = BS8.useAsCString str $ frenderLayout layout
{-# INLINE renderLayout #-}
foreign import ccall unsafe "ftglGetLayoutError" fgetLayoutError :: Layout -> IO CInt
{-# INLINE fgetLayoutError #-}
-- | Get any errors associated with a layout.
layoutError :: MonadIO m => Layout -> GettableStateVar m CInt
layoutError f = makeGettableStateVar $ liftIO $ fgetLayoutError f
{-# INLINE layoutError #-}
-- | Whether or not in polygonal or extrusion mode, the font will render equally front and back
data RenderMode = Front | Back | Side | All deriving (Show, Eq)
instance Enum RenderMode where
fromEnum Front = 0x0001
fromEnum Back = 0x0002
fromEnum Side = 0x0004
fromEnum All = 0xffff
{-# INLINE fromEnum #-}
toEnum 0x0001 = Front
toEnum 0x0002 = Back
toEnum 0x0004 = Side
toEnum 0xffff = All
toEnum x = error $ "Unknown RenderMode as " ++ show x
{-# INLINE toEnum #-}
-- | In a Layout directed render, the layout mode of the text
data TextAlignment = AlignLeft | AlignCenter | AlignRight | Justify
deriving (Show, Eq, Enum)
-- | An opaque type encapsulating a glyph in C. Currently the glyph functions are unimplemented in Haskell.
data Glyph_Opaque
-- | An opaque type encapsulating a font in C.
data Font_Opaque
-- | An opaque type encapsulating a layout in C
data Layout_Opaque
type Glyph = Ptr Glyph_Opaque
type Font = Ptr Font_Opaque
type Layout = Ptr Layout_Opaque
data CharMap =
EncodingNone
| EncodingMSSymbol
| EncodingUnicode
| EncodingSJIS
| EncodingGB2312
| EncodingBig5
| EncodingWanSung
| EncodingJohab
| EncodingAdobeStandard
| EncodingAdobeExpert
| EncodingAdobeCustom
| EncodingAdobeLatin1
| EncodingOldLatin2
| EncodingAppleRoman
encodeTag :: Char -> Char -> Char -> Char -> CInt
encodeTag a b c d =
(fromIntegral (ord a) `shift` 24)
.|. (fromIntegral (ord b) `shift` 16)
.|. (fromIntegral (ord c) `shift` 8)
.|. (fromIntegral (ord d))
marshalCharMap :: CharMap -> CInt
marshalCharMap EncodingNone = 0
marshalCharMap EncodingMSSymbol = encodeTag 's' 'y' 'm' 'b'
marshalCharMap EncodingUnicode =encodeTag 'u' 'n' 'i' 'c'
marshalCharMap EncodingSJIS = encodeTag 's' 'j' 'i' 's'
marshalCharMap EncodingGB2312 = encodeTag 'g' 'b' ' ' ' '
marshalCharMap EncodingBig5= encodeTag 'b' 'i' 'g' '5'
marshalCharMap EncodingWanSung= encodeTag 'w' 'a' 'n' 's'
marshalCharMap EncodingJohab= encodeTag 'j' 'o' 'h' 'a'
marshalCharMap EncodingAdobeStandard= encodeTag 'A' 'D' 'O' 'B'
marshalCharMap EncodingAdobeExpert= encodeTag 'A' 'D' 'B' 'E'
marshalCharMap EncodingAdobeCustom= encodeTag 'A' 'D' 'B' 'C'
marshalCharMap EncodingAdobeLatin1= encodeTag 'l' 'a' 't' '1'
marshalCharMap EncodingOldLatin2= encodeTag 'l' 'a' 't' '2'
marshalCharMap EncodingAppleRoman= encodeTag 'a' 'r' 'm' 'n'
|
seagull-kamome/haskell-FTGL
|
Graphics/Rendering/FTGL.hs
|
bsd-2-clause
| 15,759 | 0 | 13 | 2,655 | 2,795 | 1,502 | 1,293 | -1 | -1 |
module FRP.Ordrea.Pipes
( networkToPipe
, pipeToNetwork
) where
import Control.Applicative
import Control.Monad (forever)
import Data.Monoid (mconcat)
import FRP.Ordrea
import Pipes
import qualified Pipes.Prelude as P
-- | Convert an ordrea event network to a pipe.
networkToPipe
:: MonadIO m
=> (Event a -> SignalGen (Event b))
-- ^ Ordrea network
-> Pipe a b m r
networkToPipe network = do
inputEvent <- liftIO newExternalEvent
sample <- liftIO $ start $ do
ev <- network =<< externalE inputEvent
return $ eventToBehavior ev
P.mapM (go inputEvent sample) >-> P.concat
where
go ext sample a = liftIO $ do
triggerExternalEvent ext a
sample
-- | Convert a pipe to an ordrea event network.
pipeToNetwork
:: Pipe a b IO ()
-> Event a
-> SignalGen (Event b)
pipeToNetwork pipe ev = do
outputEvent <- liftIO newExternalEvent
pipeE <- generatorE $ go outputEvent <$> ev
outputE <- externalE outputEvent
return $ justE $ mconcat
[ Nothing <$ pipeE
, Just <$> outputE
]
where
go ext a = liftIO $ runEffect $
yield a >-> pipe >-> for cat (lift . triggerExternalEvent ext)
|
maoe/ordrea-pipes
|
src/FRP/Ordrea/Pipes.hs
|
bsd-3-clause
| 1,150 | 0 | 13 | 266 | 369 | 182 | 187 | 35 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
module Bootstrap (runBootstrap) where
import Control.Monad
import System.Directory
import System.Environment
import System.Exit.Compat
import System.FilePath
import System.Posix.Files
import System.Process
import PackageSets
import PackageSets.Types
import Path
import Sandboxes
runBootstrap :: Path Bin -> Path Sandboxes -> PackageSetName -> IO ()
runBootstrap binDir sandboxesDir packageSetName = do
packageSet <- either die return (getPackageSet packageSetName)
when (null (packageNames packageSet)) $ do
die ("package set is empty :(\n" ++ show packageSet)
let sandboxDir = getSandbox sandboxesDir packageSetName
createDirectoryIfMissing True (toPath sandboxDir)
setCurrentDirectory (toPath sandboxDir)
callCommand "cabal sandbox init"
writeCabalConfig sandboxDir packageSet
setEnv "CABAL_SANDBOX_CONFIG" (toPath $ getCabalSandboxConfig sandboxDir)
callCommand ("cabal install --force-reinstalls " ++ unwords (packageNames packageSet))
createSymbolicLink
(toPath binDir </> "runstaskell")
(toPath $ mkProgramLink binDir packageSetName)
|
soenkehahn/runstaskell
|
src/Bootstrap.hs
|
bsd-3-clause
| 1,304 | 0 | 13 | 276 | 285 | 140 | 145 | 30 | 1 |
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE LiberalTypeSynonyms #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Distributed.Process.ManagedProcess.UnsafeClient
-- Copyright : (c) Tim Watson 2012 - 2017
-- License : BSD3 (see the file LICENSE)
--
-- Maintainer : Tim Watson <[email protected]>
-- Stability : experimental
-- Portability : non-portable (requires concurrency)
--
-- Unsafe variant of the /Managed Process Client API/. This module implements
-- the client portion of a Managed Process using the unsafe variants of cloud
-- haskell's messaging primitives. It relies on the -extras implementation of
-- @UnsafePrimitives@, which forces evaluation for types that provide an
-- @NFData@ instance. Direct use of the underlying unsafe primitives (from
-- the distributed-process library) without @NFData@ instances is unsupported.
--
-- IMPORTANT NOTE: As per the platform documentation, it is not possible to
-- /guarantee/ that an @NFData@ instance will force evaluation in the same way
-- that a @Binary@ instance would (when encoding to a byte string). Please read
-- the unsafe primitives documentation carefully and make sure you know what
-- you're doing. You have been warned.
--
-- See "Control.Distributed.Process.Extras".
-- See "Control.Distributed.Process.Extras.UnsafePrimitives".
-- See "Control.Distributed.Process.UnsafePrimitives".
-----------------------------------------------------------------------------
-- TODO: This module is basically cut+paste duplicaton of the /safe/ Client - fix
-- Caveats... we've got to support two different type constraints, somehow, so
-- that the correct implementation gets used depending on whether or not we're
-- passing NFData or just Binary instances...
module Control.Distributed.Process.ManagedProcess.UnsafeClient
( -- * Unsafe variants of the Client API
sendControlMessage
, shutdown
, call
, safeCall
, tryCall
, callTimeout
, flushPendingCalls
, callAsync
, cast
, callChan
, syncCallChan
, syncSafeCallChan
) where
import Control.Distributed.Process
( Process
, ProcessId
, ReceivePort
, newChan
, matchChan
, match
, die
, terminate
, receiveTimeout
, unsafeSendChan
, getSelfPid
, catchesExit
, handleMessageIf
)
import Control.Distributed.Process.Async
( Async
, async
, task
)
import Control.Distributed.Process.Extras
( awaitResponse
, Addressable
, Routable(..)
, NFSerializable
, ExitReason(..)
, Shutdown(..)
)
import Control.Distributed.Process.ManagedProcess.Internal.Types
( Message(CastMessage, ChanMessage)
, CallResponse(..)
, ControlPort(..)
, unsafeInitCall
, waitResponse
)
import Control.Distributed.Process.Extras.Time
( TimeInterval
, asTimeout
)
import Control.Distributed.Process.Serializable hiding (SerializableDict)
import Data.Maybe (fromJust)
-- | Send a control message over a 'ControlPort'. This version of
-- @shutdown@ uses /unsafe primitives/.
--
sendControlMessage :: Serializable m => ControlPort m -> m -> Process ()
sendControlMessage cp m = unsafeSendChan (unPort cp) (CastMessage m)
-- | Send a signal instructing the process to terminate. This version of
-- @shutdown@ uses /unsafe primitives/.
shutdown :: ProcessId -> Process ()
shutdown pid = cast pid Shutdown
-- | Make a synchronous call - uses /unsafe primitives/.
call :: forall s a b . (Addressable s, NFSerializable a, NFSerializable b)
=> s -> a -> Process b
call sid msg = unsafeInitCall sid msg >>= waitResponse Nothing >>= decodeResult
where decodeResult (Just (Right r)) = return r
decodeResult (Just (Left err)) = die err
decodeResult Nothing {- the impossible happened -} = terminate
-- | Safe version of 'call' that returns information about the error
-- if the operation fails - uses /unsafe primitives/.
safeCall :: forall s a b . (Addressable s, NFSerializable a, NFSerializable b)
=> s -> a -> Process (Either ExitReason b)
safeCall s m = do
us <- getSelfPid
(fmap fromJust (unsafeInitCall s m >>= waitResponse Nothing) :: Process (Either ExitReason b))
`catchesExit` [\pid msg -> handleMessageIf msg (weFailed pid us)
(return . Left)]
where
weFailed a b (ExitOther _) = a == b
weFailed _ _ _ = False
-- | Version of 'safeCall' that returns 'Nothing' if the operation fails.
-- Uses /unsafe primitives/.
tryCall :: forall s a b . (Addressable s, NFSerializable a, NFSerializable b)
=> s -> a -> Process (Maybe b)
tryCall s m = unsafeInitCall s m >>= waitResponse Nothing >>= decodeResult
where decodeResult (Just (Right r)) = return $ Just r
decodeResult _ = return Nothing
-- | Make a synchronous call, but timeout and return @Nothing@ if a reply
-- is not received within the specified time interval - uses /unsafe primitives/.
--
callTimeout :: forall s a b . (Addressable s, NFSerializable a, NFSerializable b)
=> s -> a -> TimeInterval -> Process (Maybe b)
callTimeout s m d = unsafeInitCall s m >>= waitResponse (Just d) >>= decodeResult
where decodeResult :: (NFSerializable b)
=> Maybe (Either ExitReason b)
-> Process (Maybe b)
decodeResult Nothing = return Nothing
decodeResult (Just (Right result)) = return $ Just result
decodeResult (Just (Left reason)) = die reason
-- | Block for @TimeInterval@ waiting for any matching @CallResponse@
flushPendingCalls :: forall b . (NFSerializable b)
=> TimeInterval
-> (b -> Process b)
-> Process (Maybe b)
flushPendingCalls d proc =
receiveTimeout (asTimeout d) [
match (\(CallResponse (m :: b) _) -> proc m)
]
-- | Invokes 'call' /out of band/, and returns an "async handle."
-- Uses /unsafe primitives/.
--
callAsync :: forall s a b . (Addressable s, NFSerializable a, NFSerializable b)
=> s -> a -> Process (Async b)
callAsync server msg = async $ task $ call server msg
-- | Sends a /cast/ message to the server identified by @server@ - uses /unsafe primitives/.
--
cast :: forall a m . (Addressable a, NFSerializable m)
=> a -> m -> Process ()
cast server msg = unsafeSendTo server ((CastMessage msg) :: Message m ())
-- | Sends a /channel/ message to the server and returns a @ReceivePort@ - uses /unsafe primitives/.
callChan :: forall s a b . (Addressable s, NFSerializable a, NFSerializable b)
=> s -> a -> Process (ReceivePort b)
callChan server msg = do
(sp, rp) <- newChan
unsafeSendTo server ((ChanMessage msg sp) :: Message a b)
return rp
-- | A synchronous version of 'callChan'.
syncCallChan :: forall s a b . (Addressable s, NFSerializable a, NFSerializable b)
=> s -> a -> Process b
syncCallChan server msg = do
r <- syncSafeCallChan server msg
case r of
Left e -> die e
Right r' -> return r'
-- | A safe version of 'syncCallChan', which returns @Left ExitReason@ if the
-- call fails.
syncSafeCallChan :: forall s a b . (Addressable s, NFSerializable a, NFSerializable b)
=> s -> a -> Process (Either ExitReason b)
syncSafeCallChan server msg = do
rp <- callChan server msg
awaitResponse server [ matchChan rp (return . Right) ]
|
haskell-distributed/distributed-process-client-server
|
src/Control/Distributed/Process/ManagedProcess/UnsafeClient.hs
|
bsd-3-clause
| 7,463 | 0 | 13 | 1,616 | 1,530 | 841 | 689 | 117 | 3 |
{-|
Module :
Description :
Copyright : (c) Alexander Vieth, 2015
Licence : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable (GHC only)
-}
{-# LANGUAGE AutoDeriveTypeable #-}
module Database.Relational.Not (
NOT(..)
) where
data NOT term = NOT term
|
avieth/Relational
|
Database/Relational/Not.hs
|
bsd-3-clause
| 318 | 0 | 6 | 73 | 28 | 19 | 9 | 4 | 0 |
{-# LANGUAGE RankNTypes #-}
-- |A parser for RDF in N-Triples format
-- <http://www.w3.org/TR/rdf-testcases/#ntriples>.
module Text.RDF.RDF4H.NTriplesParser(
NTriplesParser(NTriplesParser),
parseNTriplesRDF
) where
-- TODO: switch to OverloadedStrings and use ByteString literals (?).
import Data.RDF
import Data.Char(isLetter, isDigit, isLower)
import qualified Data.Map as Map
import Text.Parsec
import Text.Parsec.ByteString.Lazy
import Data.ByteString.Lazy.Char8(ByteString)
import qualified Data.ByteString.Lazy.Char8 as B
-- |NTriplesParser is an 'RdfParser' implementation for parsing RDF in the
-- NTriples format. It requires no configuration options. To use this parser,
-- pass an 'NTriplesParser' value as the first argument to any of the
-- 'parseString', 'parseFile', or 'parseURL' methods of the 'RdfParser' type
-- class.
data NTriplesParser = NTriplesParser
-- |'NTriplesParser' is an instance of 'RdfParser'.
instance RdfParser NTriplesParser where
parseString _ bs = handleParse mkRdf (runParser nt_ntripleDoc () "" bs)
parseFile _ path = B.readFile path >>= return . runParser nt_ntripleDoc () path >>= return . handleParse mkRdf
parseURL p url = parseURL' (parseString p) url
parseNTriplesRDF :: forall rdf. (RDF rdf)
=> ByteString -- ^ The contents to parse
-> Either ParseFailure rdf -- ^ The RDF representation of the triples or ParseFailure
parseNTriplesRDF bs = handleParse mkRdf (runParser nt_ntripleDoc () "" bs)
-- We define or redefine all here using same names as the spec, but with an
-- 'nt_' prefix in order to avoid name clashes (e.g., ntripleDoc becomes
-- nt_ntripleDoc).
-- |nt_ntripleDoc is simply zero or more lines.
nt_ntripleDoc :: GenParser ByteString () [Maybe Triple]
nt_ntripleDoc = manyTill nt_line eof
nt_line :: GenParser ByteString () (Maybe Triple)
nt_line =
skipMany nt_space >>
(nt_comment <|> nt_triple <|> nt_empty) >>=
\res -> nt_eoln >> return res
-- A comment consists of an initial # character, followed by any number of
-- characters except cr or lf. The spec is redundant in specifying that
-- comment is hash followed by "character - (cr | lf)", since character
-- is already defined as the range #x0020-#x007E, so cr #x000D and
-- lf #x000A are both already excluded. This returns Nothing as we are
-- ignoring comments for now.
nt_comment :: GenParser ByteString () (Maybe Triple)
nt_comment = char '#' >> skipMany nt_character >> return Nothing
-- A triple consists of whitespace-delimited subject, predicate, and object,
-- followed by optional whitespace and a period, and possibly more
-- whitespace.
nt_triple :: GenParser ByteString () (Maybe Triple)
nt_triple =
do
subj <- nt_subject
skipMany1 nt_space
pred <- nt_predicate
skipMany1 nt_space
obj <- nt_object
skipMany nt_space
char '.'
many nt_space
return $ Just (Triple subj pred obj)
-- A literal is either a language literal (with optional language
-- specified) or a datatype literal (with required datatype
-- specified). The literal value is always enclosed in double
-- quotes. A language literal may have '@' after the closing quote,
-- followed by a language specifier. A datatype literal follows
-- the closing quote with ^^ followed by the URI of the datatype.
nt_literal :: GenParser ByteString () LValue
nt_literal =
do lit_str <- between_chars '"' '"' inner_literal
(char '@' >> nt_language >>= return . plainLL lit_str) <|>
(count 2 (char '^') >> nt_uriref >>= return . typedL lit_str . mkFastString) <|>
(return $ plainL lit_str)
where inner_literal = (manyTill inner_string (lookAhead $ char '"') >>= return . B.concat)
-- A language specifier of a language literal is any number of lowercase
-- letters followed by any number of blocks consisting of a hyphen followed
-- by one or more lowercase letters or digits.
nt_language :: GenParser ByteString () ByteString
nt_language =
do str <- fmap B.pack (many (satisfy (\ c -> c == '-' || isLower c)))
if B.null str || B.last str == '-' || B.head str == '-'
then fail ("Invalid language string: '" ++ B.unpack str ++ "'")
else return str
-- nt_empty is a line that isn't a comment or a triple. They appear in the
-- parsed output as Nothing, whereas a real triple appears as (Just triple).
nt_empty :: GenParser ByteString () (Maybe Triple)
nt_empty = skipMany nt_space >> return Nothing
-- A subject is either a URI reference for a resource or a node id for a
-- blank node.
nt_subject :: GenParser ByteString () Node
nt_subject = fmap unode nt_uriref
<|>fmap bnode nt_nodeID
-- A predicate may only be a URI reference to a resource.
nt_predicate :: GenParser ByteString () Node
nt_predicate = fmap unode nt_uriref
-- An object may be either a resource (represented by a URI reference),
-- a blank node (represented by a node id), or an object literal.
nt_object :: GenParser ByteString () Node
nt_object = fmap unode nt_uriref
<|>fmap bnode nt_nodeID
<|>fmap LNode nt_literal
-- A URI reference is one or more nrab_character inside angle brackets.
nt_uriref :: GenParser ByteString () ByteString
nt_uriref = between_chars '<' '>' (fmap B.pack (many (satisfy (/= '>'))))
-- A node id is "_:" followed by a name.
nt_nodeID :: GenParser ByteString () ByteString
nt_nodeID = char '_' >> char ':' >> nt_name >>= \n ->
return ('_' `B.cons'` (':' `B.cons'` n))
-- A name is a letter followed by any number of alpha-numeric characters.
nt_name :: GenParser ByteString () ByteString
nt_name =
do init <- letter
rest <- many (satisfy isLetterOrDigit)
return $ B.pack (init:rest)
isLetterOrDigit :: Char -> Bool
isLetterOrDigit c = isLetter c || isDigit c
-- An nt_character is any character except a double quote character.
nt_character :: GenParser ByteString () Char
nt_character = satisfy is_nonquote_char
-- A character is any Unicode value from ASCII space to decimal 126 (tilde).
is_character :: Char -> Bool
is_character c = c >= '\x0020' && c <= '\x007E'
-- A non-quote character is a character that isn't the double-quote character.
is_nonquote_char :: Char -> Bool
is_nonquote_char c = is_character c && c/= '"'
-- End-of-line consists of either lf or crlf.
-- We also test for eof and consider that to match as well.
nt_eoln :: GenParser ByteString () ()
nt_eoln = eof
<|> (nt_cr >> nt_lf >> return ())
<|> (nt_lf >> return ())
-- Whitespace is either a space or tab character. We must avoid using the
-- built-in space combinator here, because it includes newline.
nt_space :: GenParser ByteString () Char
nt_space = char ' ' <|> nt_tab
-- Carriage return is \r.
nt_cr :: GenParser ByteString () Char
nt_cr = char '\r'
-- Line feed is \n.
nt_lf :: GenParser ByteString () Char
nt_lf = char '\n'
-- Tab is \t.
nt_tab :: GenParser ByteString () Char
nt_tab = char '\t'
-- An inner_string is a fragment of a string (this is used inside double
-- quotes), and consists of the non-quote characters allowed and the
-- standard escapes for a backslash (\\), a tab (\t), a carriage return (\r),
-- a newline (\n), a double-quote (\"), a 4-digit Unicode escape (\uxxxx
-- where x is a hexadecimal digit), and an 8-digit Unicode escape
-- (\Uxxxxxxxx where x is a hexadecimaldigit).
inner_string :: GenParser ByteString () ByteString
inner_string =
try (char '\\' >>
((char 't' >> return b_tab) <|>
(char 'r' >> return b_ret) <|>
(char 'n' >> return b_nl) <|>
(char '\\' >> return b_slash) <|>
(char '"' >> return b_quote) <|>
(char 'u' >> count 4 hexDigit >>= \cs -> return $ B.pack ('\\':'u':cs)) <|>
(char 'U' >> count 8 hexDigit >>= \cs -> return $ B.pack ('\\':'U':cs))))
<|> fmap B.pack (many (satisfy (\ c -> is_nonquote_char c && c /= '\\')))
b_tab = B.singleton '\t'
b_ret = B.singleton '\r'
b_nl = B.singleton '\n'
b_slash = B.singleton '\\'
b_quote = B.singleton '"'
between_chars :: Char -> Char -> GenParser ByteString () ByteString -> GenParser ByteString () ByteString
between_chars start end parser = char start >> parser >>= \res -> char end >> return res
handleParse :: forall rdf. (RDF rdf) => (Triples -> Maybe BaseUrl -> PrefixMappings -> rdf) ->
Either ParseError [Maybe Triple] ->
Either ParseFailure rdf
handleParse _mkRdf result
-- | B.length rem /= 0 = (Left $ ParseFailure $ "Invalid Document. Unparseable end of document: " ++ B.unpack rem)
| otherwise =
case result of
Left err -> Left $ ParseFailure $ "Parse failure: \n" ++ show err
Right ts -> Right $ _mkRdf (conv ts) Nothing (PrefixMappings Map.empty)
where
conv [] = []
conv (Nothing:ts) = conv ts
conv ((Just t):ts) = t : conv ts
_test :: GenParser ByteString () a -> String -> IO a
_test p str =
case result of
(Left err) -> putStr "ParseError: '" >> putStr (show err) >> putStr "\n" >> error ""
(Right a) -> return a
where result = runParser p () "" (B.pack str)
|
amccausl/RDF4H
|
src/Text/RDF/RDF4H/NTriplesParser.hs
|
bsd-3-clause
| 9,200 | 0 | 18 | 1,983 | 2,064 | 1,057 | 1,007 | 133 | 4 |
module Data.Type.Vec where
import Control.Applicative
import Data.Foldable
import Data.Monoid
import Data.Typeable
import Data.Traversable
import Data.Type.BasicFunctors
import Data.Type.Equality
import Data.Type.Fin
import Data.Type.Nat
import Prelude hiding (length, replicate, zipWith, unzip, (++), (!!))
type role Vec nominal representational
data Vec :: Nat -> * -> * where
Nil :: Vec Zero a
(:*) :: a -> Vec n a -> Vec (Suc n) a
infixr 5 :*
deriving instance Eq a => Eq (Vec n a)
deriving instance Ord a => Ord (Vec n a)
deriving instance Show a => Show (Vec n a)
deriving instance Typeable Vec
instance (SNatI n, Monoid a) => Monoid (Vec n a) where
mempty = replicate mempty
mappend = zipWith mappend
head :: Vec (Suc n) a -> a
head (x :* xs) = x
tail :: Vec (Suc n) a -> Vec n a
tail (x :* xs) = xs
length :: Vec n a -> SNat n
length Nil = SZero
length (x :* xs) = SSuc (length xs)
instance Foldable (Vec n) where
foldMap :: (Monoid m) => (a -> m) -> Vec n a -> m
foldMap f Nil = mempty
foldMap f (x :* xs) = f x <> foldMap f xs
zipWith :: (a -> b -> c) -> Vec n a -> Vec n b -> Vec n c
zipWith op Nil Nil = Nil
zipWith op (x :* xs) (y :* ys) = op x y :* zipWith op xs ys
zip :: Vec n a -> Vec n b -> Vec n (a, b)
zip = zipWith (,)
unzip :: Vec n (a, b) -> (Vec n a, Vec n b)
unzip Nil = (Nil, Nil)
unzip ((x , y) :* xys) = let (xs, ys) = unzip xys in (x :* xs, y :* ys)
cata :: forall a r n.
r Zero
-> (forall n. a -> r n -> r (Suc n))
-> Vec n a
-> r n
cata nil cons = go
where
go :: forall n. Vec n a -> r n
go Nil = nil
go (x :* xs) = cons x (go xs)
catal :: forall a r n.
r Zero
-> (forall n. r n -> a -> r (Suc n))
-> Vec n a
-> r n
catal nil cons Nil = nil
catal nil cons (x :* xs) = unShift (catal (Shift (cons nil x)) (\ (Shift r) x -> Shift (cons r x)) xs)
instance Functor (Vec n) where
fmap :: (a -> b) -> Vec n a -> Vec n b
fmap f Nil = Nil
fmap f (x :* xs) = f x :* fmap f xs
instance SNatI n => Applicative (Vec n) where
pure = replicate
(<*>) = zipWith ($)
instance Traversable (Vec n) where
traverse :: Applicative i => (a -> i b) -> Vec n a -> i (Vec n b)
traverse f Nil = pure Nil
traverse f (x :* xs) = (:*) <$> f x <*> traverse f xs
replicate' :: SNat n -> a -> Vec n a
replicate' SZero x = Nil
replicate' (SSuc n) x = x :* replicate' n x
replicate :: SNatI n => a -> Vec n a
replicate = replicate' sNat
(++) :: Vec m a -> Vec n a -> Vec (m + n) a
Nil ++ ys = ys
(x :* xs) ++ ys = x :* (xs ++ ys)
reverse' :: Vec n a -> Vec n a
reverse' xs = unFlip $ catal (Flip Nil) (\ (Flip acc) x -> Flip (x :* acc)) xs
reverse :: Vec n a -> Vec n a
reverse xs = gcastWith (thmPlusZero (length xs)) $ go xs Nil
where
go :: Vec p a -> Vec q a -> Vec (p + q) a
go Nil acc = acc
go (x :* xs) acc =
gcastWith (thmPlusSuc (length xs) (length acc)) $
go xs (x :* acc)
(!!) :: Vec n a -> Fin n -> a
(x :* xs) !! FZero = x
(x :* xs) !! FSuc i = xs !! i
tabulate' :: SNat n -> (Fin n -> a) -> Vec n a
tabulate' SZero f = Nil
tabulate' (SSuc s) f = f FZero :* tabulate' s (f . FSuc)
tabulate :: SNatI n => (Fin n -> a) -> Vec n a
tabulate = tabulate' sNat
allFin' :: SNat n -> Vec n (Fin n)
allFin' n = tabulate' n id
allFin :: SNatI n => Vec n (Fin n)
allFin = allFin' sNat
splitAt' :: SNat m -> Vec (m + n) a -> (Vec m a, Vec n a)
splitAt' SZero xs = (Nil, xs)
splitAt' (SSuc s) (x :* xs) = let (ys, zs) = splitAt' s xs in (x :* ys, zs)
data VecSumPair n a where
VecSumPair :: Vec m a -> Vec n a -> VecSumPair (m + n) a
partition :: (a -> Bool) -> Vec n a -> VecSumPair n a
partition p Nil = VecSumPair Nil Nil
partition p (x :* xs)
| p x = case partition p xs of
VecSumPair ys zs -> VecSumPair (x :* ys) zs
| otherwise = case partition p xs of
VecSumPair ys zs -> gcastWith (thmPlusSuc (length ys) (length zs)) (VecSumPair ys (x :* zs))
|
kosmikus/tilt
|
src/Data/Type/Vec.hs
|
bsd-3-clause
| 4,075 | 0 | 14 | 1,217 | 2,206 | 1,120 | 1,086 | -1 | -1 |
module Main where
{-@ total :: Nat -> Nat @-}
total :: Int -> Int
total 0 = 0
total n = 1 + total (n-1)
|
ssaavedra/liquidhaskell
|
tests/pos/recursion0.hs
|
bsd-3-clause
| 107 | 0 | 8 | 29 | 44 | 24 | 20 | 4 | 1 |
{-# Language OverloadedStrings #-}
{-# Language RankNTypes #-}
{-# Language FlexibleContexts #-}
module Chat where
import Control.Monad.State
import Control.Monad.Reader
import Control.Monad.Writer
import Data.Text (Text)
import Data.Map.Strict (Map)
import Data.Monoid ((<>))
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import Data.Maybe (maybeToList)
default (Text)
type SentenceState = Map Text [Text]
newtype Normalized = Normalized [Text]
-- newtype SentenceStateB a = SentenceStateB {unSentenceStateB :: State (Map Text [Text]) a}
-- deriving (Functor, Applicative, Monad, MonadState (Map Text [Text]))
type SentenceStateB a = forall m. MonadState (Map Text [Text]) m => m a
type Ctx = Map Text [Text]
-- |Describe different matching types
data Matcher = WordMatch !Text -- ^ The most basic matcher, matches the word under cursor
| Star -- ^ Matches any word and is recorded under '*' in the state
| NamedStar !Text -- ^ Matches any word and is recorded under the name given in constructor
deriving (Show, Eq, Ord)
{-|
MatchTree f is the foundation for matching content. It is basically a trie,
with matchers as the leafs. The leafs should be tried against normalized words.
|-}
data MatchTree m = Root [MatchTree m]
| Leaf !Matcher [MatchTree m] (Maybe (Ctx -> m ()))
-- |Match a matcher against a normalized word.
match :: Matcher -> Text -> SentenceStateB Bool
match (WordMatch x) w | x == w = return True
| otherwise = return False
match Star w = modify (M.insertWith (<>) "*" [w]) >> return True
match (NamedStar n) w = modify (M.insertWith (<>) n [w]) >> return True
{-|
Combine tries together. The trie in the second argument is tried against all
the tries in the first argument. If there is a match, the subtrees are joined
recursively, otherwise the new trie is prepended to the list of tries.
|-}
-- XXX: Can this be converted into MatchTree m -> MatchTree m -> MatchTree m
add :: Functor f => [MatchTree f] -> MatchTree f -> [MatchTree f]
add [] l = [l]
add (Root cs : _) l = [Root (add cs l)]
add (o@(Leaf w cs f) : xs) l@(Leaf w' cs' _)
| w == w' = Leaf w (concatMap (add cs) cs') f : xs
| otherwise = o : add xs l
add l (Root cs) = concatMap (add l) cs
{-|
Match a normalized wordlist against a ready tree. Return a list of actions
while building the sentence state at the same time.
|-}
matchTree :: Monad m => Normalized -> MatchTree m -> SentenceStateB [Ctx -> m ()]
matchTree (Normalized xs) = go xs
where
go :: Monad m => [Text] -> MatchTree m -> SentenceStateB [Ctx -> m ()]
go [] (Leaf _ _ f) = return (maybeToList f)
go ws (Root cs) = concat <$> mapM (go ws) cs
go [w] (Leaf m _ f) = match m w >>=
\matched -> if matched then return (maybeToList f) else return []
go (w:ws) (Leaf m cs _) = match m w >>=
\matched -> if matched then concat <$> mapM (go ws) cs else return []
build :: Monad m => [(Ctx -> m (), Text)] -> MatchTree m
build ws =
case foldr (flip add . uncurry fromText) [] ws of
[Root cs] -> Root cs
_ -> Root []
{-|
Build a tree *spine* from a sentence.
The given sentence is uppercased and parsed for some tokens. Tokens '*' and
':variablename:' are understood, creating Star and NamedStar respectively
|-}
fromText :: Monad m => (Ctx -> m ()) -> Text -> MatchTree m
fromText f = Root . addAction . go . T.words . T.toUpper
where
go ("*":ws) = [Leaf Star (go ws) Nothing]
go (w:ws) | isVar w = [Leaf (NamedStar w) (go ws) Nothing]
| otherwise = [Leaf (WordMatch w) (go ws) Nothing]
go [] = []
isVar w = all ($ w) [(":" `T.isSuffixOf`), (":" `T.isPrefixOf`)]
addAction [Leaf w [] _] = [Leaf w [] (Just f)]
addAction [Leaf w cs _] = [Leaf w (addAction cs) Nothing]
addAction x = x
-- |Run the matcher against input text. The text is normalized.
runMatcher :: Monad m => Text -> MatchTree m -> m ()
runMatcher sentence tree =
let (fs,s) = runState (matchTree normalizedSentence tree) M.empty
normalizedSentence = Normalized . T.words . T.toUpper $ sentence
in mapM_ (\f -> f s) fs
|
MasseR/FreeIrc
|
src/Chat.hs
|
bsd-3-clause
| 4,242 | 0 | 14 | 1,021 | 1,403 | 728 | 675 | 71 | 6 |
-- | Type-safe client-server communication framework for Haste.
--
-- In addition to the Haste.App extras, this module exports the same API as
-- "Haste", modified slightly to work better with the automatic program
-- slicing Haste.App provides. This means that you should import either this
-- module *or* Haste, but *not* both.
module Haste.App (
MonadIO, Remotable, App, Server, Remote, Done,
Sessions, SessionID,
liftServerIO, forkServerIO, remote, runApp,
(<.>), getSessionID, getActiveSessions, onSessionEnd,
AppCfg, cfgHost, cfgPort, mkConfig,
Client,
runClient, onServer, liftIO,
JSString, JSAny, URL, alert, prompt, eval, writeLog, catJSStr, fromJSStr,
module Haste.DOM.Core,
module Haste.Random,
module Haste.Prim.JSType,
module Haste.Hash,
module Haste.Binary
) where
import Haste.App.Client
import Haste.App.Monad
import Haste.Binary (Binary (..))
import Haste.DOM.Core
import Haste.Random
import Haste.Prim.JSType
import Haste.Hash
import Haste
import Control.Monad.IO.Class
|
jtojnar/haste-compiler
|
libraries/haste-lib/src/Haste/App.hs
|
bsd-3-clause
| 1,050 | 0 | 6 | 183 | 205 | 139 | 66 | 23 | 0 |
module Mosh (
module Mosh,
OcbKey,
buildOcbParams,
)
where
import Mosh.Crypto.Key
import Mosh.Crypto.Params
-- aeson
import qualified Data.Aeson as J
-- base
import Control.Applicative
import Control.Monad
import Data.Bits
import Data.List
import Data.Monoid
import Data.Word
import Debug.Trace
-- bytestring
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
-- cereal
import Data.Serialize as DS
-- cipher-aes128
--import Crypto.Cipher.AES128
-- lens
import Control.Lens
import Data.Bits.Lens
-- transformers
import Control.Monad.Trans.State.Strict as S
getRemainingByteString :: Get ByteString
getRemainingByteString = getBytes =<< remaining
data Peer = Server | Client deriving Eq
instance J.ToJSON Peer where
toJSON Server = J.String "server"
toJSON Client = J.String "client"
newtype MoshNonce = MoshNonce Word64
moshNonce :: Peer -> Word64 -> Maybe MoshNonce
moshNonce peer counter = do
guard $ counter .&. bit 63 == 0
return . MoshNonce . (bitAt 63 .~ (peer == Client)) $ counter
nonceCounter :: Getter MoshNonce Word64
nonceCounter = to $ \(MoshNonce n) -> bitAt 63 .~ False $ n
nonceDest :: Getter MoshNonce Peer
nonceDest = to $ \(MoshNonce n) -> if n ^. bitAt 63 then Client else Server
instance Serialize MoshNonce where
put (MoshNonce n) = putWord64be n
get = MoshNonce <$> getWord64be
instance J.ToJSON MoshNonce where
toJSON n = J.object ["dest" J..= (n ^. nonceDest),
"counter" J..= (n ^. nonceCounter)]
$(declareLenses [d|
data Packet = Packet {
packetNonce :: MoshNonce
, packetPayload :: ByteString
}
|])
instance Serialize Packet where
put the = do
DS.put (the ^. packetNonce)
putByteString (the ^. packetPayload)
get = Packet <$> DS.get <*> getRemainingByteString
-- | In the original mosh, the timers are not considered part of the fragment.
$(declareLenses [d|
data Fragment = Fragment {
fragmentSender'sTimer :: Word16
, fragmentLastTimerSenderReceived :: Word16
, fragmentInstructionID :: Word64
, fragmentID :: Word16
, fragmentIsFinal :: Bool
, fragmentPayload :: ByteString
}
|])
instance Serialize Fragment where
put the = do
putWord16be (the ^. fragmentSender'sTimer)
putWord16be (the ^. fragmentLastTimerSenderReceived)
putWord64be (the ^. fragmentInstructionID)
putWord16be ((the ^. fragmentID)
.|. (if the ^. fragmentIsFinal then 0x8000 else 0))
putByteString (the ^. fragmentPayload)
get = do
tSender <- getWord16be
tReceived <- getWord16be
iid <- getWord64be
fid <- getWord16be
payload <- getRemainingByteString
return $ Fragment tSender tReceived iid (fid .&. 0x7fff)
(testBit fid 15) payload
instance J.ToJSON Fragment where
toJSON the
= J.object ["senders-timer"
J..= (the ^. fragmentSender'sTimer),
"last-timer-sender-received"
J..= (the ^. fragmentLastTimerSenderReceived),
"instruction-id"
J..= (the ^. fragmentInstructionID),
"id"
J..= (the ^. fragmentID),
"is-final"
J..= (the ^. fragmentIsFinal),
"payload-length"
J..= B.length (the ^. fragmentPayload)]
-- OCB encryption is described in RFC 7253.
-- | Hardcoding:
-- * key length = 128 bits
-- * nonce length = 96 bits
-- * method of expanding nonce from 64 bits
-- * no associated data
-- * tag length = 128 bits
ocbAesEncrypt :: OcbParams -> ByteString -> ByteString
ocbAesEncrypt param plaintext = let
(plains, plainStar) = slicePlaintext plaintext
(ciphers, cipherStar, tag)
= flip evalState (ocbOffset0 param, zeroes)
$ (,,) <$> zipWithM round' plains [1..]
<*> finalRound plainStar
<*> computeTag
in mconcat ciphers <> cipherStar <> tag
where
round' plainblock i = do
(prevOffset, prevChecksum) <- S.get
let nextOffset = traceBS ("Offset_" ++ show i) $
prevOffset `xorBS` (ocbLAt key (ntz i))
cipherblock = nextOffset
`xorBS` ocbEncryptBlock param
(plainblock `xorBS` nextOffset)
nextChecksum = traceBS ("Checksum_" ++ show i) $
prevChecksum `xorBS` plainblock
S.put (nextOffset, nextChecksum)
return cipherblock
finalRound plainStar | B.null plainStar = return ""
| otherwise = do
(prevOffset, prevChecksum) <- S.get
let nextOffset = traceBS "Offset_*" $
prevOffset `xorBS` ocbLStar key
pad = ocbEncryptBlock param nextOffset
cipherStar = plainStar `xorBS` pad
nextChecksum = traceBS "Checksum_*" $
prevChecksum
`xorBS` (plainStar <> B.cons 128 zeroes)
S.put (nextOffset, nextChecksum)
return cipherStar
computeTag = do
(offset, checksum) <- S.get
return . ocbEncryptBlock param
$ checksum `xorBS` offset `xorBS` ocbLDollar key
key = ocbKey param
ocbAesDecrypt :: OcbParams -> ByteString -> Maybe ByteString
ocbAesDecrypt param ciphertext = do
(ciphers, cipherStar, givenTag) <- sliceCiphertext ciphertext
let (plains, plainStar, computedTag)
= flip evalState (ocbOffset0 param, zeroes)
$ (,,) <$> zipWithM round' ciphers [1..]
<*> finalRound cipherStar
<*> computeTag
guard $ traceBS "tag" computedTag == givenTag
return . traceBS "plaintext" $ mconcat plains <> plainStar
where
round' cipherblock i = do
(prevOffset, prevChecksum) <- S.get
let nextOffset = traceBS ("Offset_" ++ show i) $
prevOffset `xorBS` (ocbLAt key (ntz i))
plainblock = nextOffset
`xorBS` ocbDecryptBlock param
(cipherblock `xorBS` nextOffset)
nextChecksum = traceBS ("Checksum_" ++ show i) $
prevChecksum `xorBS` plainblock
S.put (nextOffset, nextChecksum)
return plainblock
finalRound cipherblock | B.null cipherblock = return ""
| otherwise = do
(prevOffset, prevChecksum) <- S.get
let nextOffset = traceBS "Offset_*" $
prevOffset `xorBS` ocbLStar key
pad = ocbEncryptBlock param nextOffset
plainStar = cipherblock `xorBS` pad
nextChecksum = traceBS "Checksum_*" $
prevChecksum
`xorBS` (plainStar <> B.cons 128 zeroes)
S.put (nextOffset, nextChecksum)
return plainStar
computeTag = do
(offset, checksum) <- S.get
return . ocbEncryptBlock param
$ checksum `xorBS` offset `xorBS` ocbLDollar key
key = ocbKey param
-- | number of trailing zero bits
ntz :: Int -> Int
ntz n = f 0 where
f i | n ^. bitAt i = i
| otherwise = f (i + 1)
nonceFromMoshNonce :: MoshNonce -> ByteString
nonceFromMoshNonce nonce64 = B.pack (replicate 4 0) <> encode nonce64
sliceCiphertext :: ByteString -> Maybe ([ByteString], ByteString, ByteString)
sliceCiphertext full = do
let cb = B.length full - cbTag
guard $ 0 <= cb
let (nonTag, tag) = B.splitAt cb full
(fullSized, leftOvers) = slicePlaintext nonTag
return (fullSized, leftOvers, tag)
slicePlaintext :: ByteString -> ([ByteString], ByteString)
slicePlaintext full
| B.length full < cbBlock = ([], full)
| otherwise = (begin : rest, final) where
(begin, cont) = B.splitAt cbBlock full
(rest, final) = slicePlaintext cont
trace' :: String -> a -> a
trace' | tracing = trace
| otherwise = const id
tracing :: Bool
tracing = False
traceVar :: Show a => String -> a -> a
traceVar msg x = trace' (msg ++ " = " ++ show x) x where
traceBS :: String -> ByteString -> ByteString
traceBS message bs = trace' (message ++ showBS bs) bs
showBS :: ByteString -> String
showBS = concatMap ("\n\t" ++)
. map (intercalate " ")
. group' 4
. map (intercalate ".")
. group' 4
. map showByte
. B.unpack
showByte :: Word8 -> String
showByte byte = [hex high, hex low] where
hex x | 0 <= x && x <= 9 = toEnum $ 48 + fromIntegral x
| 10 <= x && x <= 15 = toEnum $ 55 + fromIntegral x
| otherwise = '-'
(high, low) = byte `divMod` 16
group' :: Int -> [a] -> [[a]]
group' _ [] = []
group' c xs = let (ys, zs) = splitAt c xs in ys : group' c zs
|
dave4420/mosh-clone
|
src/Mosh.hs
|
bsd-3-clause
| 9,856 | 0 | 17 | 3,708 | 2,538 | 1,314 | 1,224 | -1 | -1 |
{- |
Module : Web.RBB.Crawler
Description : Implementation of a meta data collector for the blog entry
repository
Copyright : (c) Sebastian Witte
License : BSD3
Maintainer : [email protected]
Stability : experimental
-}
module Web.RBB.Crawler.Repository
where
import Control.Applicative
import Control.Concurrent.STM
import Control.Lens
import Control.Monad
import Control.Monad.State
import Control.Monad.Trans.Except
import Data.FileStore (Change (..), FileStore,
Revision (..), darcsFileStore,
gitFileStore, mercurialFileStore)
import qualified Data.FileStore as FS
import Data.IxSet
import qualified Data.IxSet as IxSet
import Data.List (foldl')
import Data.Maybe
import Data.Monoid
import Data.Time
import System.Directory
import System.FilePath
import Web.RBB.Config
import Web.RBB.Crawler.MetaCombiner
import Web.RBB.Crawler.MetaParser
import Web.RBB.Types as E
import Web.RBB.Types.Blog
import Web.RBB.Util
-- | Initialize the 'Blog' state by providing a path inside a repository.
initBlog :: (Functor io, MonadIO io) => BlogConfig m -> ExceptT String io (Blog m)
initBlog bcfg = do
b <- initialBlog
collectEntryData Nothing b
where
initialBlog = do
(rp, crp, fs) <- initializeFileStore (entryPath bcfg)
Blog <$> pure 1
<*> pure mempty
<*> pure (EntryUpdate (UTCTime (ModifiedJulianDay 0) 0) "")
<*> liftIO getCurrentTime
<*> pure fs
<*> pure mempty
<*> (liftIO . atomically) newTChan
<*> pure rp
<*> pure crp
<*> pure bcfg
-- | Update the entries in the 'Blog' state.
updateBlog :: (Functor io, MonadIO io) => Blog m -> ExceptT String io (Blog m)
updateBlog blog = collectEntryData (Just (blog^.lastEntryUpdate)) blog
collectEntryData :: (Functor io, MonadIO io)
=> Maybe EntryUpdate -- initial (Nothing) or update?
-> Blog m
-> ExceptT String io (Blog m)
collectEntryData eu blog =
let interval = FS.TimeRange (entryUpdateTime <$> eu) Nothing
fs = blog^.fileStore
hist = FS.history fs
notLatestKnownEntry = case entryRevisionId <$> eu of
Nothing -> const True
Just commit -> not . FS.idsMatch fs commit . revId
in foldr collect blog . takeWhile notLatestKnownEntry -- . sortBy (compare `on` revDateTime)
<$> liftIO (hist [blog^.contentRelativePath] interval Nothing)
collect :: Revision -> Blog m -> Blog m
collect r blog = foldl' go blog (revChanges r)
where
go b (Added fp) = maybe b (addEntry r b fp) $ fileTypeFromExtension fp
go b (Modified fp) = maybe b (modEntry r b fp) $ fileTypeFromExtension fp
go b (Deleted fp) = b & entries %~ IxSet.deleteIx (RelativePath fp)
& lastEntryUpdate .~ EntryUpdate (revDateTime r) (revId r)
metaFromRevision :: Revision -> [Meta]
metaFromRevision = either (const []) id . parseMeta . revDescription
addEntry :: Revision -> Blog m -> FilePath -> FileType -> Blog m
addEntry r blog fp ft =
let meta = metaFromRevision r
eu = EntryUpdate (revDateTime r) (revId r)
newEntry = Entry
{ _entryId = blog^.nextEntryId
, E._title = (pack . takeBaseName . dropExtensions) fp
, _author = (pack . FS.authorName . revAuthor) r
, _authorEmail = (pack . FS.authorEmail . revAuthor) r
, E._tags = mempty
, _fileType = ft
, _relativePath = fp
, _fullPath = blog^.repositoryPath </> fp
, _updates = fromList [eu]
, _lastUpdate = eu
}
in blog & nextEntryId %~ succ
& entries %~ contract (Just fp) meta . IxSet.insert newEntry
& lastEntryUpdate .~ eu
modEntry :: Revision -> Blog m -> FilePath -> FileType -> Blog m
modEntry r blog fp _ =
let meta = metaFromRevision r
eu = EntryUpdate (revDateTime r) (revId r)
insertUpdateTime = ixSetModifyIx (RelativePath fp) $ \e ->
e & updates %~ IxSet.insert eu
& lastUpdate .~ eu
in blog & entries %~ (contract (Just fp) meta . insertUpdateTime)
& lastEntryUpdate .~ eu
-- | Initialize a 'FileStore' object for the given directory. This function
-- should automatically detect the underlying repository type and traverse into
-- parent directories if necessary. The result is the associated 'FileStore'
-- object together with the relative path relative to the repository for the
-- blog content.
--
-- The return value is a triplet containing:
-- * The absolute path to the repository
-- * The content relative path inside the repository
-- * The associated 'FileStore' object for the repository
initializeFileStore :: (Functor io, MonadIO io)
=> FilePath
-> ExceptT String io (FilePath, FilePath, FileStore)
initializeFileStore dir = do
cd <- liftIO $ canonicalizePath dir
d <- liftIO $ doesDirectoryExist cd
unless d $ throwE $ "The directory '" ++ cd ++ "' does not exist."
fileStores <- catMaybes `liftM` sequence
[ lift (maybeGit cd)
, lift (maybeDarcs cd)
, lift (maybeMercurial cd)
]
when (Prelude.null fileStores) $ throwE $ concat
[ "The directory '", dir, "' which has been canonicalized to '"
, cd, "' points to an unsupported repository "
, "(includes no repository)."
]
return $ head fileStores
where
maybeGit = maybeFileStore gitFileStore ".git"
maybeDarcs = maybeFileStore darcsFileStore "_darcs"
maybeMercurial = maybeFileStore mercurialFileStore ".hg"
maybeFileStore :: (Functor io, MonadIO io)
=> (FilePath -> FileStore)
-> FilePath
-> FilePath
-> io (Maybe (FilePath, FilePath, FileStore))
maybeFileStore f qry cd =
fmap (\p -> (cd, makeRelative p cd, f p)) <$> findDirInParents cd qry
-- | Search for a directory named as the second argument to thins function.
-- Traverse the directory tree up to the root if the directory cannot be found
-- in one of the starting directory's parent directories.
findDirInParents :: (MonadIO io) => FilePath -> FilePath -> io (Maybe FilePath)
findDirInParents dir qry = do
adir <- normalise `liftM` liftIO (canonicalizePath dir)
containsQry . takeWhile (not . isDrive) $ iterate takeDirectory adir
where
containsQry [] = return Nothing
containsQry (d:ds) = do
p <- liftIO $ doesDirectoryExist (d </> qry)
case () of
_ | p -> return $ Just d
_ -> containsQry ds
|
saep/repo-based-blog
|
library/Web/RBB/Crawler/Repository.hs
|
bsd-3-clause
| 7,197 | 0 | 23 | 2,307 | 1,783 | 924 | 859 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import Control.Monad
import Data.Binary
import Network.HTTP.Client (newManager)
import Network.HTTP.Client.TLS (tlsManagerSettings)
import Servant.Client
import System.Directory
import Text.Printf (printf)
import Text.Show.Pretty
import qualified Data.Map as Map
import qualified Data.ByteString.Lazy as BS.L
import qualified Data.Text as T
import Servant.ChinesePod.API
import Servant.ChinesePod.Client
import qualified Servant.ChinesePod.Vocab.V2 as Vocab
import Options
-- | Download the full lesson index
downloadIndex :: ChinesePodAPI -> RespLogin -> ClientM ()
downloadIndex ChinesePodAPI{..} RespLogin{..} = do
liftIO $ createDirectoryIfMissing True "./index"
go 0
where
go :: Int -> ClientM ()
go page = do
pageDownloaded <- liftIO $ doesFileExist pageFile
if pageDownloaded
then do
liftIO $ putStrLn $ "Skipping page " ++ show page
go (page + 1)
else do
liftIO $ putStrLn $ "Downloading page " ++ show page
respGetLatestLessons <- cpodGetLatestLessons ReqGetLatestLessons {
reqGetLatestLessonsAccessToken = respLoginAccessToken
, reqGetLatestLessonsUserId = respLoginUserId
, reqGetLatestLessonsPage = Just page
, reqGetLatestLessonsCount = Just resultsPerPage
, reqGetLatestLessonsLang = Nothing
, reqGetLatestLessonsLevelId = Nothing
}
if null (searchResults respGetLatestLessons)
then
liftIO $ putStrLn "Done"
else do
liftIO $ encodeFile pageFile respGetLatestLessons
go (page + 1)
where
pageFile = "./index/" ++ printf "%04d" page
resultsPerPage :: Int
resultsPerPage = 10
-- | Download lesson content
downloadContent :: ChinesePodAPI -> RespLogin -> ClientM ()
downloadContent ChinesePodAPI{..} RespLogin{..} = do
liftIO $ createDirectoryIfMissing True "./content"
goPage 0
where
goPage :: Int -> ClientM ()
goPage pageNum = do
pageExists <- liftIO $ doesFileExist pageFile
if not pageExists
then liftIO $ putStrLn "Done"
else do
page <- liftIO $ decodeFile pageFile
mapM_ (goLesson . snd) (Map.toList (searchResults page))
goPage (pageNum + 1)
where
pageFile = "./index/" ++ printf "%04d" pageNum
goLesson :: Lesson -> ClientM ()
goLesson Lesson{lessonV3Id = V3Id lessonId} = do
lessonExists <- liftIO $ doesFileExist lessonFile
if lessonExists
then do
liftIO $ putStrLn $ "Skipping lesson " ++ lessonId
else do
liftIO $ putStrLn $ "Downloading lesson " ++ lessonId
content <- cpodGetLesson ReqGetLesson {
reqGetLessonAccessToken = respLoginAccessToken
, reqGetLessonUserId = respLoginUserId
, reqGetLessonV3Id = V3Id lessonId
, reqGetLessonType = Nothing
}
liftIO $ encodeFile lessonFile content
where
lessonFile = "./content/" ++ lessonId
-- | Export all vocabulary to a single file
exportVocab :: IO ()
exportVocab = do
lessonFiles <- filter (not . hidden) <$> getDirectoryContents "./content"
lessons <- mapM decodeFile $ map ("./content/" ++) lessonFiles
let (skipped, vocab) = Vocab.extractVocab lessons
encodeFile "./vocab" vocab
forM_ skipped $ \Vocab.Skipped{..} -> putStrLn $ concat [
"Skipped " ++ v3IdString skippedV3Id
, " (" ++ skippedTitle ++ "): "
, T.unpack skippedReason
]
where
hidden :: FilePath -> Bool
hidden ('.':_) = True
hidden _ = False
exec :: Command -> IO ()
exec (CommandSearch optsCPod optsCmd) =
withCPod optsCPod $ \ChinesePodAPI{..} respLogin -> do
respSearchLessons <- cpodSearchLessons $ fromLogin respLogin optsCmd
liftIO $ putStrLn $ dumpStr respSearchLessons
exec (CommandLatest optsCPod optsCmd) =
withCPod optsCPod $ \ChinesePodAPI{..} respLogin -> do
respGetLatestLessons <- cpodGetLatestLessons $ fromLogin respLogin optsCmd
liftIO $ putStrLn $ dumpStr respGetLatestLessons
exec (CommandGetLesson optsCPod optsCmd) =
withCPod optsCPod $ \ChinesePodAPI{..} respLogin -> do
respGetLesson <- cpodGetLesson $ fromLogin respLogin optsCmd
liftIO $ putStrLn $ dumpStr respGetLesson
exec (CommandDownloadIndex optsCPod) =
withCPod optsCPod $ downloadIndex
exec (CommandDownloadContent optsCPod) =
withCPod optsCPod $ downloadContent
exec CommandExportVocab =
exportVocab
withCPod :: OptionsCPod
-> (ChinesePodAPI -> RespLogin -> ClientM ())
-> IO ()
withCPod OptionsCPod{..} handler = do
mgr <- newManager tlsManagerSettings
mRes <- runClientM runHandler (ClientEnv mgr optionsBaseUrl)
case mRes of
Left err -> logServantError err
Right () -> return ()
where
runHandler :: ClientM ()
runHandler = do
respLogin <- cpodLogin optionsReqLogin
handler cpodAPI respLogin
OK <- cpodLogout $ fromLogin respLogin ()
return ()
cpodAPI@ChinesePodAPI{..} = chinesePodAPI
logServantError :: ServantError -> IO ()
logServantError err = do
let (err', mBody) = body err
case mBody of
Nothing -> return ()
Just bs -> BS.L.writeFile "responseBody.servant" bs
print err'
where
body :: ServantError -> (ServantError, Maybe BS.L.ByteString)
body (FailureResponse r) = ( FailureResponse r { responseBody = omitted } , Just (responseBody r) )
body (DecodeFailure txt r) = ( DecodeFailure txt r { responseBody = omitted } , Just (responseBody r) )
body (UnsupportedContentType typ r) = ( UnsupportedContentType typ r { responseBody = omitted } , Just (responseBody r) )
body (InvalidContentTypeHeader r) = ( InvalidContentTypeHeader r { responseBody = omitted } , Just (responseBody r) )
body (ConnectionError e ) = ( ConnectionError e , Nothing )
omitted :: BS.L.ByteString
omitted = "<<written to responseBody.servant>>"
main :: IO ()
main = do
Options{..} <- getOptions
exec optionsCommand
|
edsko/ChinesePodAPI
|
src-main/Main.hs
|
bsd-3-clause
| 6,441 | 0 | 17 | 1,802 | 1,732 | 869 | 863 | -1 | -1 |
module Main where
import qualified Compiler
main :: IO ()
main = either print putStrLn =<< Compiler.compile =<< getContents
|
letsbreelhere/egg
|
app/EggcExe.hs
|
bsd-3-clause
| 126 | 0 | 7 | 21 | 38 | 21 | 17 | 4 | 1 |
{-# LANGUAGE TupleSections, OverloadedStrings, QuasiQuotes, TemplateHaskell, TypeFamilies, RecordWildCards,
DeriveGeneric ,MultiParamTypeClasses ,FlexibleInstances #-}
module Protocol.ROC.PointTypes.PointType172 where
import GHC.Generics
import qualified Data.ByteString as BS
import Data.Word
import Data.Binary
import Data.Binary.Get
import Protocol.ROC.Utils
data PointType172 = PointType172 {
pointType172RemoteRTUTag :: !PointType172RemoteRTUTag
,pointType172ROCDeviceID :: !PointType172ROCDeviceID
,pointType172ComissionListIndex :: !PointType172ComissionListIndex
,pointType172ComissionFlag :: !PointType172ComissionFlag
} deriving (Eq, Show, Generic)
type PointType172RemoteRTUTag = BS.ByteString
type PointType172ROCDeviceID = Word32
type PointType172ComissionListIndex = Word8
type PointType172ComissionFlag = Word8
pointType172Parser :: Get PointType172
pointType172Parser = do
remoteRTUTag <- getByteString 20
rOCDeviceID <- getWord32le
comissionListIndex <- getWord8
comissionFlag <- getWord8
return $ PointType172 remoteRTUTag rOCDeviceID comissionListIndex comissionFlag
|
jqpeterson/roc-translator
|
src/Protocol/ROC/PointTypes/PointType172.hs
|
bsd-3-clause
| 1,385 | 0 | 9 | 394 | 182 | 103 | 79 | 34 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
module Network.Sock.Types.Session
( Session(..)
, SessionStatus(..)
, SessionID
) where
------------------------------------------------------------------------------
import Control.Concurrent.Lifted (ThreadId)
import Control.Concurrent.MVar.Lifted (MVar)
import Control.Concurrent.STM.TMChan (TMChan)
import Control.Concurrent.Timer.Lifted (Timer)
import Control.Monad.Trans.Control (MonadBaseControl)
------------------------------------------------------------------------------
import qualified Data.ByteString.Lazy as BL (ByteString)
import qualified Data.Conduit as C (ResourceT)
import qualified Data.Text as TS (Text)
------------------------------------------------------------------------------
import Network.Sock.Types.Protocol
------------------------------------------------------------------------------
-- | Session
data Session where
Session ::
{ sessionID :: SessionID
, sessionStatus :: MVar SessionStatus -- ^ The status can be "fresh", "opened", "closed" or in case the MVar is empty, it should be interpreted as "currently being used"/"waiting".
, sessionIncomingBuffer :: TMChan BL.ByteString -- ^ This buffer is filled with incoming messages (parsed from request body or from WS' receive).
, sessionOutgoingBuffer :: TMChan Protocol -- ^ This buffer is filled with outgoing messages which are then sent (as a response or with WS' sendSink).
, sessionApplicationThread :: MVar (Maybe ThreadId) -- ^ If the MVar is empty, some thread is already trying to fork application.
-- If it contains Nothing, noone is forking nor has anyone forked yet.
-- If it contains Just a value, application was already forked.
-- * Timers. They are restarted with every request made by the client to this session.
--, sessionTimeoutTimer :: Timer (C.ResourceT IO) -- ^ Timer that every 5 seconds closes the session. If the session is already closed, it removes it from the session map.
--, sessionHeartbeatTimer :: Timer (C.ResourceT IO) -- ^ Timer that every 25 seconds sends a heartbeat frame.
} -> Session
-- | SessionID
type SessionID = TS.Text
-- | SessionStatus
data SessionStatus
= SessionFresh -- ^ Right after creation, Session is "Fresh"
| SessionOpened -- ^ Right after we send opening frame, Session is "Opened". We also start the timeout & heartbeat timer at this point.
| SessionClosed Int BL.ByteString -- ^ Right after we send closing frame, Session is "Closed".
|
Palmik/wai-sockjs
|
src/Network/Sock/Types/Session.hs
|
bsd-3-clause
| 2,973 | 0 | 10 | 850 | 243 | 164 | 79 | 28 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Tests.Readers.LaTeX (tests) where
import Text.Pandoc.Definition
import Test.Framework
import Tests.Helpers
import Tests.Arbitrary()
import Text.Pandoc.Builder
import Text.Pandoc
latex :: String -> Pandoc
latex = readLaTeX defaultParserState
infix 5 =:
(=:) :: ToString c
=> String -> (String, c) -> Test
(=:) = test latex
tests :: [Test]
tests = [ testGroup "basic"
[ "simple" =:
"word" =?> para "word"
, "space" =:
"some text" =?> para ("some text")
, "emphasized" =:
"\\emph{emphasized}" =?> para (emph "emphasized")
]
, testGroup "headers"
[ "level 1" =:
"\\section{header}" =?> header 1 "header"
, "level 2" =:
"\\subsection{header}" =?> header 2 "header"
, "level 3" =:
"\\subsubsection{header}" =?> header 3 "header"
, "emph" =:
"\\section{text \\emph{emph}}" =?>
header 1 ("text" +++ space +++ emph "emph")
, "link" =:
"\\section{text \\href{/url}{link}}" =?>
header 1 ("text" +++ space +++ link "/url" "" "link")
]
, testGroup "space and comments"
[ "blank lines + space at beginning" =:
"\n \n hi" =?> para "hi"
, "blank lines + space + comments" =:
"% my comment\n\n \n % another\n\nhi" =?> para "hi"
, "comment in paragraph" =:
"hi % this is a comment\nthere\n" =?> para "hi there"
]
, testGroup "citations"
[ natbibCitations
, biblatexCitations
]
]
baseCitation :: Citation
baseCitation = Citation{ citationId = "item1"
, citationPrefix = []
, citationSuffix = []
, citationMode = AuthorInText
, citationNoteNum = 0
, citationHash = 0 }
natbibCitations :: Test
natbibCitations = testGroup "natbib"
[ "citet" =: "\\citet{item1}"
=?> para (cite [baseCitation] empty)
, "suffix" =: "\\citet[p.~30]{item1}"
=?> para
(cite [baseCitation{ citationSuffix = toList $ text "p.\160\&30" }] empty)
, "suffix long" =: "\\citet[p.~30, with suffix]{item1}"
=?> para (cite [baseCitation{ citationSuffix =
toList $ text "p.\160\&30, with suffix" }] empty)
, "multiple" =: "\\citeauthor{item1} \\citetext{\\citeyear{item1}; \\citeyear[p.~30]{item2}; \\citealp[see also][]{item3}}"
=?> para (cite [baseCitation{ citationMode = AuthorInText }
,baseCitation{ citationMode = SuppressAuthor
, citationSuffix = [Str "p.\160\&30"]
, citationId = "item2" }
,baseCitation{ citationId = "item3"
, citationPrefix = [Str "see",Space,Str "also"]
, citationMode = NormalCitation }
] empty)
, "group" =: "\\citetext{\\citealp[see][p.~34--35]{item1}; \\citealp[also][chap. 3]{item3}}"
=?> para (cite [baseCitation{ citationMode = NormalCitation
, citationPrefix = [Str "see"]
, citationSuffix = [Str "p.\160\&34",EnDash,Str "35"] }
,baseCitation{ citationMode = NormalCitation
, citationId = "item3"
, citationPrefix = [Str "also"]
, citationSuffix = [Str "chap.",Space,Str "3"] }
] empty)
, "suffix and locator" =: "\\citep[pp.~33, 35--37, and nowhere else]{item1}"
=?> para (cite [baseCitation{ citationMode = NormalCitation
, citationSuffix = [Str "pp.\160\&33,",Space,Str "35",EnDash,Str "37,",Space,Str "and",Space,Str "nowhere",Space, Str "else"] }] empty)
, "suffix only" =: "\\citep[and nowhere else]{item1}"
=?> para (cite [baseCitation{ citationMode = NormalCitation
, citationSuffix = toList $ text "and nowhere else" }] empty)
, "no author" =: "\\citeyearpar{item1}, and now Doe with a locator \\citeyearpar[p.~44]{item2}"
=?> para (cite [baseCitation{ citationMode = SuppressAuthor }] empty +++
text ", and now Doe with a locator " +++
cite [baseCitation{ citationMode = SuppressAuthor
, citationSuffix = [Str "p.\160\&44"]
, citationId = "item2" }] empty)
, "markup" =: "\\citep[\\emph{see}][p. \\textbf{32}]{item1}"
=?> para (cite [baseCitation{ citationMode = NormalCitation
, citationPrefix = [Emph [Str "see"]]
, citationSuffix = [Str "p.",Space,
Strong [Str "32"]] }] empty)
]
biblatexCitations :: Test
biblatexCitations = testGroup "biblatex"
[ "textcite" =: "\\textcite{item1}"
=?> para (cite [baseCitation] empty)
, "suffix" =: "\\textcite[p.~30]{item1}"
=?> para
(cite [baseCitation{ citationSuffix = toList $ text "p.\160\&30" }] empty)
, "suffix long" =: "\\textcite[p.~30, with suffix]{item1}"
=?> para (cite [baseCitation{ citationSuffix =
toList $ text "p.\160\&30, with suffix" }] empty)
, "multiple" =: "\\textcites{item1}[p.~30]{item2}[see also][]{item3}"
=?> para (cite [baseCitation{ citationMode = AuthorInText }
,baseCitation{ citationMode = NormalCitation
, citationSuffix = [Str "p.\160\&30"]
, citationId = "item2" }
,baseCitation{ citationId = "item3"
, citationPrefix = [Str "see",Space,Str "also"]
, citationMode = NormalCitation }
] empty)
, "group" =: "\\autocites[see][p.~34--35]{item1}[also][chap. 3]{item3}"
=?> para (cite [baseCitation{ citationMode = NormalCitation
, citationPrefix = [Str "see"]
, citationSuffix = [Str "p.\160\&34",EnDash,Str "35"] }
,baseCitation{ citationMode = NormalCitation
, citationId = "item3"
, citationPrefix = [Str "also"]
, citationSuffix = [Str "chap.",Space,Str "3"] }
] empty)
, "suffix and locator" =: "\\autocite[pp.~33, 35--37, and nowhere else]{item1}"
=?> para (cite [baseCitation{ citationMode = NormalCitation
, citationSuffix = [Str "pp.\160\&33,",Space,Str "35",EnDash,Str "37,",Space,Str "and",Space,Str "nowhere",Space, Str "else"] }] empty)
, "suffix only" =: "\\autocite[and nowhere else]{item1}"
=?> para (cite [baseCitation{ citationMode = NormalCitation
, citationSuffix = toList $ text "and nowhere else" }] empty)
, "no author" =: "\\autocite*{item1}, and now Doe with a locator \\autocite*[p.~44]{item2}"
=?> para (cite [baseCitation{ citationMode = SuppressAuthor }] empty +++
text ", and now Doe with a locator " +++
cite [baseCitation{ citationMode = SuppressAuthor
, citationSuffix = [Str "p.\160\&44"]
, citationId = "item2" }] empty)
, "markup" =: "\\autocite[\\emph{see}][p. \\textbf{32}]{item1}"
=?> para (cite [baseCitation{ citationMode = NormalCitation
, citationPrefix = [Emph [Str "see"]]
, citationSuffix = [Str "p.",Space,
Strong [Str "32"]] }] empty)
, "parencite" =: "\\parencite{item1}"
=?> para (cite [baseCitation{ citationMode = NormalCitation }] empty)
]
|
Lythimus/lptv
|
sites/all/modules/jgm-pandoc-8be6cc2/src/Tests/Readers/LaTeX.hs
|
gpl-2.0
| 7,965 | 0 | 17 | 2,765 | 1,705 | 947 | 758 | 144 | 1 |
-- LANGUAGE: Haskell
-- AUTHOR: Brent Scheppmann
-- GITHUB: https://github.com/bareon
-- COMPILE: ghc --make hello_world_bareon.hs -o hello
module Main where
main :: IO ()
main = putStrLn "Hello World!!"
|
bikoheke/hacktoberfest
|
scripts/hello_world_bareon.hs
|
gpl-3.0
| 206 | 0 | 6 | 31 | 26 | 16 | 10 | 3 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CloudWatchLogs.DescribeLogGroups
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Returns all the log groups that are associated with the AWS account making
-- the request. The list returned in the response is ASCII-sorted by log group
-- name.
--
-- By default, this operation returns up to 50 log groups. If there are more
-- log groups to list, the response would contain a 'nextToken' value in the
-- response body. You can also limit the number of log groups returned in the
-- response by specifying the 'limit' parameter in the request.
--
-- <http://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_DescribeLogGroups.html>
module Network.AWS.CloudWatchLogs.DescribeLogGroups
(
-- * Request
DescribeLogGroups
-- ** Request constructor
, describeLogGroups
-- ** Request lenses
, dlgLimit
, dlgLogGroupNamePrefix
, dlgNextToken
-- * Response
, DescribeLogGroupsResponse
-- ** Response constructor
, describeLogGroupsResponse
-- ** Response lenses
, dlgrLogGroups
, dlgrNextToken
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.CloudWatchLogs.Types
import qualified GHC.Exts
data DescribeLogGroups = DescribeLogGroups
{ _dlgLimit :: Maybe Nat
, _dlgLogGroupNamePrefix :: Maybe Text
, _dlgNextToken :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'DescribeLogGroups' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dlgLimit' @::@ 'Maybe' 'Natural'
--
-- * 'dlgLogGroupNamePrefix' @::@ 'Maybe' 'Text'
--
-- * 'dlgNextToken' @::@ 'Maybe' 'Text'
--
describeLogGroups :: DescribeLogGroups
describeLogGroups = DescribeLogGroups
{ _dlgLogGroupNamePrefix = Nothing
, _dlgNextToken = Nothing
, _dlgLimit = Nothing
}
-- | The maximum number of items returned in the response. If you don't specify a
-- value, the request would return up to 50 items.
dlgLimit :: Lens' DescribeLogGroups (Maybe Natural)
dlgLimit = lens _dlgLimit (\s a -> s { _dlgLimit = a }) . mapping _Nat
dlgLogGroupNamePrefix :: Lens' DescribeLogGroups (Maybe Text)
dlgLogGroupNamePrefix =
lens _dlgLogGroupNamePrefix (\s a -> s { _dlgLogGroupNamePrefix = a })
-- | A string token used for pagination that points to the next page of results.
-- It must be a value obtained from the response of the previous 'DescribeLogGroups' request.
dlgNextToken :: Lens' DescribeLogGroups (Maybe Text)
dlgNextToken = lens _dlgNextToken (\s a -> s { _dlgNextToken = a })
data DescribeLogGroupsResponse = DescribeLogGroupsResponse
{ _dlgrLogGroups :: List "logGroups" LogGroup
, _dlgrNextToken :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'DescribeLogGroupsResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dlgrLogGroups' @::@ ['LogGroup']
--
-- * 'dlgrNextToken' @::@ 'Maybe' 'Text'
--
describeLogGroupsResponse :: DescribeLogGroupsResponse
describeLogGroupsResponse = DescribeLogGroupsResponse
{ _dlgrLogGroups = mempty
, _dlgrNextToken = Nothing
}
dlgrLogGroups :: Lens' DescribeLogGroupsResponse [LogGroup]
dlgrLogGroups = lens _dlgrLogGroups (\s a -> s { _dlgrLogGroups = a }) . _List
dlgrNextToken :: Lens' DescribeLogGroupsResponse (Maybe Text)
dlgrNextToken = lens _dlgrNextToken (\s a -> s { _dlgrNextToken = a })
instance ToPath DescribeLogGroups where
toPath = const "/"
instance ToQuery DescribeLogGroups where
toQuery = const mempty
instance ToHeaders DescribeLogGroups
instance ToJSON DescribeLogGroups where
toJSON DescribeLogGroups{..} = object
[ "logGroupNamePrefix" .= _dlgLogGroupNamePrefix
, "nextToken" .= _dlgNextToken
, "limit" .= _dlgLimit
]
instance AWSRequest DescribeLogGroups where
type Sv DescribeLogGroups = CloudWatchLogs
type Rs DescribeLogGroups = DescribeLogGroupsResponse
request = post "DescribeLogGroups"
response = jsonResponse
instance FromJSON DescribeLogGroupsResponse where
parseJSON = withObject "DescribeLogGroupsResponse" $ \o -> DescribeLogGroupsResponse
<$> o .:? "logGroups" .!= mempty
<*> o .:? "nextToken"
|
kim/amazonka
|
amazonka-cloudwatch-logs/gen/Network/AWS/CloudWatchLogs/DescribeLogGroups.hs
|
mpl-2.0
| 5,240 | 0 | 12 | 1,106 | 683 | 408 | 275 | 74 | 1 |
-- | Maintainer: Félix Sipma <[email protected]>
module Propellor.Property.DebianMirror
( DebianPriority (..)
, showPriority
, mirror
, RsyncExtra (..)
, Method (..)
, DebianMirror
, debianMirrorHostName
, debianMirrorSuites
, debianMirrorArchitectures
, debianMirrorSections
, debianMirrorSourceBool
, debianMirrorPriorities
, debianMirrorMethod
, debianMirrorKeyring
, debianMirrorRsyncExtra
, mkDebianMirror
) where
import Propellor.Base
import qualified Propellor.Property.File as File
import qualified Propellor.Property.Apt as Apt
import qualified Propellor.Property.Cron as Cron
import qualified Propellor.Property.User as User
import Data.List
data DebianPriority = Essential | Required | Important | Standard | Optional | Extra
deriving (Show, Eq)
showPriority :: DebianPriority -> String
showPriority Essential = "essential"
showPriority Required = "required"
showPriority Important = "important"
showPriority Standard = "standard"
showPriority Optional = "optional"
showPriority Extra = "extra"
data RsyncExtra = Doc | Indices | Tools | Trace
deriving (Show, Eq)
showRsyncExtra :: RsyncExtra -> String
showRsyncExtra Doc = "doc"
showRsyncExtra Indices = "indices"
showRsyncExtra Tools = "tools"
showRsyncExtra Trace = "trace"
data Method = Ftp | Http | Https | Rsync | MirrorFile
showMethod :: Method -> String
showMethod Ftp = "ftp"
showMethod Http = "http"
showMethod Https = "https"
showMethod Rsync = "rsync"
showMethod MirrorFile = "file"
-- | To get a new DebianMirror and set options, use:
--
-- > mkDebianMirror mymirrordir mycrontimes
-- > . debianMirrorHostName "otherhostname"
-- > . debianMirrorSourceBool True
data DebianMirror = DebianMirror
{ _debianMirrorHostName :: HostName
, _debianMirrorDir :: FilePath
, _debianMirrorSuites :: [DebianSuite]
, _debianMirrorArchitectures :: [Architecture]
, _debianMirrorSections :: [Apt.Section]
, _debianMirrorSourceBool :: Bool
, _debianMirrorPriorities :: [DebianPriority]
, _debianMirrorMethod :: Method
, _debianMirrorKeyring :: FilePath
, _debianMirrorRsyncExtra :: [RsyncExtra]
, _debianMirrorCronTimes :: Cron.Times
}
mkDebianMirror :: FilePath -> Cron.Times -> DebianMirror
mkDebianMirror dir crontimes = DebianMirror
{ _debianMirrorHostName = "deb.debian.org"
, _debianMirrorDir = dir
, _debianMirrorSuites = []
, _debianMirrorArchitectures = []
, _debianMirrorSections = []
, _debianMirrorSourceBool = False
, _debianMirrorPriorities = []
, _debianMirrorMethod = Http
, _debianMirrorKeyring = "/usr/share/keyrings/debian-archive-keyring.gpg"
, _debianMirrorRsyncExtra = [Trace]
, _debianMirrorCronTimes = crontimes
}
debianMirrorHostName :: HostName -> DebianMirror -> DebianMirror
debianMirrorHostName hn m = m { _debianMirrorHostName = hn }
debianMirrorSuites :: [DebianSuite] -> DebianMirror -> DebianMirror
debianMirrorSuites s m = m { _debianMirrorSuites = s }
debianMirrorArchitectures :: [Architecture] -> DebianMirror -> DebianMirror
debianMirrorArchitectures a m = m { _debianMirrorArchitectures = a }
debianMirrorSections :: [Apt.Section] -> DebianMirror -> DebianMirror
debianMirrorSections s m = m { _debianMirrorSections = s }
debianMirrorSourceBool :: Bool -> DebianMirror -> DebianMirror
debianMirrorSourceBool s m = m { _debianMirrorSourceBool = s }
debianMirrorPriorities :: [DebianPriority] -> DebianMirror -> DebianMirror
debianMirrorPriorities p m = m { _debianMirrorPriorities = p }
debianMirrorMethod :: Method -> DebianMirror -> DebianMirror
debianMirrorMethod me m = m { _debianMirrorMethod = me }
debianMirrorKeyring :: FilePath -> DebianMirror -> DebianMirror
debianMirrorKeyring k m = m { _debianMirrorKeyring = k }
debianMirrorRsyncExtra :: [RsyncExtra] -> DebianMirror -> DebianMirror
debianMirrorRsyncExtra r m = m { _debianMirrorRsyncExtra = r }
mirror :: DebianMirror -> Property DebianLike
mirror mirror' = propertyList ("Debian mirror " ++ dir) $ props
& Apt.installed ["debmirror"]
& User.accountFor (User "debmirror")
& File.dirExists dir
& File.ownerGroup dir (User "debmirror") (Group "debmirror")
& check (not . and <$> mapM suitemirrored suites)
(cmdProperty "debmirror" args)
`describe` "debmirror setup"
& Cron.niceJob ("debmirror_" ++ dir) (_debianMirrorCronTimes mirror') (User "debmirror") "/"
(unwords ("/usr/bin/debmirror" : args))
where
dir = _debianMirrorDir mirror'
suites = _debianMirrorSuites mirror'
suitemirrored suite = doesDirectoryExist $ dir </> "dists" </> Apt.showSuite suite
architecturearg = intercalate ","
suitearg = intercalate "," $ map Apt.showSuite suites
priorityRegex pp = "(" ++ intercalate "|" (map showPriority pp) ++ ")"
rsyncextraarg [] = "none"
rsyncextraarg res = intercalate "," $ map showRsyncExtra res
args =
[ "--dist" , suitearg
, "--arch", architecturearg $ map architectureToDebianArchString (_debianMirrorArchitectures mirror')
, "--section", intercalate "," $ _debianMirrorSections mirror'
, "--limit-priority", "\"" ++ priorityRegex (_debianMirrorPriorities mirror') ++ "\""
]
++
(if _debianMirrorSourceBool mirror' then [] else ["--nosource"])
++
[ "--host", _debianMirrorHostName mirror'
, "--method", showMethod $ _debianMirrorMethod mirror'
, "--rsync-extra", rsyncextraarg $ _debianMirrorRsyncExtra mirror'
, "--keyring", _debianMirrorKeyring mirror'
, dir
]
|
ArchiveTeam/glowing-computing-machine
|
src/Propellor/Property/DebianMirror.hs
|
bsd-2-clause
| 5,363 | 32 | 15 | 782 | 1,330 | 747 | 583 | 121 | 3 |
module Usage.Usage where
import qualified Definition.Definition as Definition
test :: Int
test = Definition.s<caret>even + 1
|
charleso/intellij-haskforce
|
tests/gold/codeInsight/QualifiedImportMultipleLevels/Usage/Usage.hs
|
apache-2.0
| 129 | 0 | 8 | 20 | 38 | 23 | 15 | -1 | -1 |
{-
Copyright 2015 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE NoImplicitPrelude #-}
module Control.Concurrent (module M) where
import "base" Control.Concurrent as M
|
Ye-Yong-Chi/codeworld
|
codeworld-base/src/Control/Concurrent.hs
|
apache-2.0
| 751 | 0 | 4 | 136 | 23 | 17 | 6 | 4 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Actions.CycleWS
-- Copyright : (c) Joachim Breitner <[email protected]>,
-- Nelson Elhage <[email protected]> (`toggleWS' function)
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Joachim Breitner <[email protected]>
-- Stability : unstable
-- Portability : unportable
--
-- Provides bindings to cycle forward or backward through the list of
-- workspaces, to move windows between workspaces, and to cycle
-- between screens. More general combinators provide ways to cycle
-- through workspaces in various orders, to only cycle through some
-- subset of workspaces, and to cycle by more than one workspace at a
-- time.
--
-- Note that this module now subsumes the functionality of the former
-- @XMonad.Actions.RotView@. Former users of @rotView@ can simply replace
-- @rotView True@ with @moveTo Next NonEmptyWS@, and so on.
--
-- If you want to exactly replicate the action of @rotView@ (cycling
-- through workspace in order lexicographically by tag, instead of in
-- the order specified in the config), it can be implemented as:
--
-- > rotView b = do t <- findWorkspace getSortByTag (bToDir b) NonEmptyWS 1
-- > windows . greedyView $ t
-- > where bToDir True = Next
-- > bToDir False = Prev
--
-----------------------------------------------------------------------------
module XMonad.Actions.CycleWS (
-- * Usage
-- $usage
-- * Moving between workspaces
-- $moving
nextWS
, prevWS
, shiftToNext
, shiftToPrev
-- * Toggling the previous workspace
-- $toggling
, toggleWS
, toggleWS'
, toggleOrView
-- * Moving between screens (xinerama)
, nextScreen
, prevScreen
, shiftNextScreen
, shiftPrevScreen
, swapNextScreen
, swapPrevScreen
-- * Moving between workspaces, take two!
-- $taketwo
, Direction1D(..)
, WSType(..)
, shiftTo
, moveTo
, doTo
-- * The mother-combinator
, findWorkspace
, toggleOrDoSkip
, skipTags
, screenBy
) where
import Control.Monad ( unless )
import Data.List ( findIndex )
import Data.Maybe ( isNothing, isJust )
import XMonad hiding (workspaces)
import XMonad.StackSet hiding (filter)
import XMonad.Util.Types
import XMonad.Util.WorkspaceCompare
-- $usage
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@ file:
--
-- > import XMonad.Actions.CycleWS
-- >
-- > -- a basic CycleWS setup
-- >
-- > , ((modm, xK_Down), nextWS)
-- > , ((modm, xK_Up), prevWS)
-- > , ((modm .|. shiftMask, xK_Down), shiftToNext)
-- > , ((modm .|. shiftMask, xK_Up), shiftToPrev)
-- > , ((modm, xK_Right), nextScreen)
-- > , ((modm, xK_Left), prevScreen)
-- > , ((modm .|. shiftMask, xK_Right), shiftNextScreen)
-- > , ((modm .|. shiftMask, xK_Left), shiftPrevScreen)
-- > , ((modm, xK_z), toggleWS)
--
-- If you want to follow the moved window, you can use both actions:
--
-- > , ((modm .|. shiftMask, xK_Down), shiftToNext >> nextWS)
-- > , ((modm .|. shiftMask, xK_Up), shiftToPrev >> prevWS)
--
-- You can also get fancier with 'moveTo', 'shiftTo', and 'findWorkspace'.
-- For example:
--
-- > , ((modm , xK_f), moveTo Next EmptyWS) -- find a free workspace
-- > , ((modm .|. controlMask, xK_Right), -- a crazy keybinding!
-- > do t <- findWorkspace getSortByXineramaRule Next NonEmptyWS 2
-- > windows . view $ t )
--
-- For detailed instructions on editing your key bindings, see
-- "XMonad.Doc.Extending#Editing_key_bindings".
{- $moving
The following commands for moving the view and windows between
workspaces are somewhat inflexible, but are very simple and probably
Do The Right Thing for most users.
All of the commands in this section cycle through workspaces in the
order in which they are given in your config.
-}
-- | Switch to the next workspace.
nextWS :: X ()
nextWS = switchWorkspace 1
-- | Switch to the previous workspace.
prevWS :: X ()
prevWS = switchWorkspace (-1)
-- | Move the focused window to the next workspace.
shiftToNext :: X ()
shiftToNext = shiftBy 1
-- | Move the focused window to the previous workspace.
shiftToPrev :: X ()
shiftToPrev = shiftBy (-1)
-- $toggling
-- | Toggle to the workspace displayed previously.
toggleWS :: X ()
toggleWS = toggleWS' []
-- | Toggle to the previous workspace while excluding some workspaces.
--
-- > -- Ignore the scratchpad workspace while toggling:
-- > ("M-b", toggleWS' ["NSP"])
toggleWS' :: [WorkspaceId] -> X ()
toggleWS' skips = do
hs' <- cleanHiddens skips
unless (null hs') (windows . view . tag $ head hs')
-- | 'XMonad.StackSet.greedyView' a workspace, or if already there, view
-- the previously displayed workspace ala weechat. Change @greedyView@ to
-- @toggleOrView@ in your workspace bindings as in the 'XMonad.StackSet.view'
-- faq at <http://haskell.org/haskellwiki/Xmonad/Frequently_asked_questions>.
-- For more flexibility see 'toggleOrDoSkip'.
toggleOrView :: WorkspaceId -> X ()
toggleOrView = toggleOrDoSkip [] greedyView
-- | Allows ignoring listed workspace tags (such as scratchpad's \"NSP\"), and
-- running other actions such as view, shift, etc. For example:
--
-- > import qualified XMonad.StackSet as W
-- > import XMonad.Actions.CycleWS
-- >
-- > -- toggleOrView for people who prefer view to greedyView
-- > toggleOrView' = toggleOrDoSkip [] W.view
-- >
-- > -- toggleOrView ignoring scratchpad and named scratchpad workspace
-- > toggleOrViewNoSP = toggleOrDoSkip ["NSP"] W.greedyView
toggleOrDoSkip :: [WorkspaceId] -> (WorkspaceId -> WindowSet -> WindowSet)
-> WorkspaceId -> X ()
toggleOrDoSkip skips f toWS = do
hs' <- cleanHiddens skips
cur <- gets (currentTag . windowset)
if toWS == cur
then unless (null hs') (windows . f . tag $ head hs')
else windows (f toWS)
-- | List difference ('\\') for workspaces and tags. Removes workspaces
-- matching listed tags from the given workspace list.
skipTags :: (Eq i) => [Workspace i l a] -> [i] -> [Workspace i l a]
skipTags wss ids = filter ((`notElem` ids) . tag) wss
cleanHiddens :: [WorkspaceId] -> X [WindowSpace]
cleanHiddens skips = gets $ (flip skipTags) skips . hidden . windowset
switchWorkspace :: Int -> X ()
switchWorkspace d = wsBy d >>= windows . greedyView
shiftBy :: Int -> X ()
shiftBy d = wsBy d >>= windows . shift
wsBy :: Int -> X (WorkspaceId)
wsBy = findWorkspace getSortByIndex Next AnyWS
{- $taketwo
A few more general commands are also provided, which allow cycling
through subsets of workspaces.
For example,
> moveTo Next EmptyWS
will move to the first available workspace with no windows, and
> shiftTo Prev (WSIs $ return (('p' `elem`) . tag))
will move the focused window backwards to the first workspace containing
the letter 'p' in its name. =)
-}
-- | What type of workspaces should be included in the cycle?
data WSType = EmptyWS -- ^ cycle through empty workspaces
| NonEmptyWS -- ^ cycle through non-empty workspaces
| HiddenWS -- ^ cycle through non-visible workspaces
| HiddenNonEmptyWS -- ^ cycle through non-empty non-visible workspaces
| AnyWS -- ^ cycle through all workspaces
| WSTagGroup Char
-- ^ cycle through workspaces in the same group, the
-- group name is all characters up to the first
-- separator character or the end of the tag
| WSIs (X (WindowSpace -> Bool))
-- ^ cycle through workspaces satisfying
-- an arbitrary predicate
-- | Convert a WSType value to a predicate on workspaces.
wsTypeToPred :: WSType -> X (WindowSpace -> Bool)
wsTypeToPred EmptyWS = return (isNothing . stack)
wsTypeToPred NonEmptyWS = return (isJust . stack)
wsTypeToPred HiddenWS = do hs <- gets (map tag . hidden . windowset)
return (\w -> tag w `elem` hs)
wsTypeToPred HiddenNonEmptyWS = do ne <- wsTypeToPred NonEmptyWS
hi <- wsTypeToPred HiddenWS
return (\w -> hi w && ne w)
wsTypeToPred AnyWS = return (const True)
wsTypeToPred (WSTagGroup sep) = do cur <- (groupName.workspace.current) `fmap` gets windowset
return $ (cur ==).groupName
where groupName = takeWhile (/=sep).tag
wsTypeToPred (WSIs p) = p
-- | View the next workspace in the given direction that satisfies
-- the given condition.
moveTo :: Direction1D -> WSType -> X ()
moveTo dir t = doTo dir t getSortByIndex (windows . greedyView)
-- | Move the currently focused window to the next workspace in the
-- given direction that satisfies the given condition.
shiftTo :: Direction1D -> WSType -> X ()
shiftTo dir t = doTo dir t getSortByIndex (windows . shift)
-- | Using the given sort, find the next workspace in the given
-- direction of the given type, and perform the given action on it.
doTo :: Direction1D -> WSType -> X WorkspaceSort -> (WorkspaceId -> X ()) -> X ()
doTo dir t srt act = findWorkspace srt dir t 1 >>= act
-- | Given a function @s@ to sort workspaces, a direction @dir@, a
-- predicate @p@ on workspaces, and an integer @n@, find the tag of
-- the workspace which is @n@ away from the current workspace in
-- direction @dir@ (wrapping around if necessary), among those
-- workspaces, sorted by @s@, which satisfy @p@.
--
-- For some useful workspace sorting functions, see
-- "XMonad.Util.WorkspaceCompare".
--
-- For ideas of what to do with a workspace tag once obtained, note
-- that 'moveTo' and 'shiftTo' are implemented by applying @(>>=
-- (windows . greedyView))@ and @(>>= (windows . shift))@, respectively,
-- to the output of 'findWorkspace'.
findWorkspace :: X WorkspaceSort -> Direction1D -> WSType -> Int -> X WorkspaceId
findWorkspace s dir t n = findWorkspaceGen s (wsTypeToPred t) (maybeNegate dir n)
where
maybeNegate Next d = d
maybeNegate Prev d = (-d)
findWorkspaceGen :: X WorkspaceSort -> X (WindowSpace -> Bool) -> Int -> X WorkspaceId
findWorkspaceGen _ _ 0 = gets (currentTag . windowset)
findWorkspaceGen sortX wsPredX d = do
wsPred <- wsPredX
sort <- sortX
ws <- gets windowset
let cur = workspace (current ws)
sorted = sort (workspaces ws)
pivoted = let (a,b) = span ((/= (tag cur)) . tag) sorted in b ++ a
ws' = filter wsPred pivoted
mCurIx = findWsIndex cur ws'
d' = if d > 0 then d - 1 else d
next = if null ws'
then cur
else case mCurIx of
Nothing -> ws' !! (d' `mod` length ws')
Just ix -> ws' !! ((ix + d) `mod` length ws')
return $ tag next
findWsIndex :: WindowSpace -> [WindowSpace] -> Maybe Int
findWsIndex ws wss = findIndex ((== tag ws) . tag) wss
-- | View next screen
nextScreen :: X ()
nextScreen = switchScreen 1
-- | View prev screen
prevScreen :: X ()
prevScreen = switchScreen (-1)
switchScreen :: Int -> X ()
switchScreen d = do s <- screenBy d
mws <- screenWorkspace s
case mws of
Nothing -> return ()
Just ws -> windows (view ws)
{- | Get the 'ScreenId' /d/ places over. Example usage is a variation of the
the default screen keybindings:
> -- mod-{w,e}, Switch to previous/next Xinerama screen
> -- mod-shift-{w,e}, Move client to previous/next Xinerama screen
> --
> [((m .|. modm, key), sc >>= screenWorkspace >>= flip whenJust (windows . f))
> | (key, sc) <- zip [xK_w, xK_e] [(screenBy (-1)),(screenBy 1)]
> , (f, m) <- [(W.view, 0), (W.shift, shiftMask)]]
-}
screenBy :: Int -> X (ScreenId)
screenBy d = do ws <- gets windowset
--let ss = sortBy screen (screens ws)
let now = screen (current ws)
return $ (now + fromIntegral d) `mod` fromIntegral (length (screens ws))
-- | Swap current screen with next screen
swapNextScreen :: X ()
swapNextScreen = swapScreen 1
-- | Swap current screen with previous screen
swapPrevScreen :: X ()
swapPrevScreen = swapScreen (-1)
swapScreen :: Int -> X ()
swapScreen d = do s <- screenBy d
mws <- screenWorkspace s
case mws of
Nothing -> return ()
Just ws -> windows (greedyView ws)
-- | Move focused window to workspace on next screen
shiftNextScreen :: X ()
shiftNextScreen = shiftScreenBy 1
-- | Move focused window to workspace on prev screen
shiftPrevScreen :: X ()
shiftPrevScreen = shiftScreenBy (-1)
shiftScreenBy :: Int -> X ()
shiftScreenBy d = do s <- screenBy d
mws <- screenWorkspace s
case mws of
Nothing -> return ()
Just ws -> windows (shift ws)
|
markus1189/xmonad-contrib-710
|
XMonad/Actions/CycleWS.hs
|
bsd-3-clause
| 14,169 | 0 | 19 | 4,344 | 2,164 | 1,170 | 994 | 148 | 4 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ur-PK">
<title>Directory List v1.0</title>
<maps>
<homeID>directorylistv1</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/directorylistv1/src/main/javahelp/help_ur_PK/helpset_ur_PK.hs
|
apache-2.0
| 976 | 78 | 66 | 157 | 412 | 209 | 203 | -1 | -1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE BangPatterns #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Foreign
-- Copyright : (c) The University of Glasgow, 2008-2011
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable
--
-- Foreign marshalling support for CStrings with configurable encodings
--
-----------------------------------------------------------------------------
module GHC.Foreign (
-- * C strings with a configurable encoding
-- conversion of C strings into Haskell strings
--
peekCString,
peekCStringLen,
-- conversion of Haskell strings into C strings
--
newCString,
newCStringLen,
-- conversion of Haskell strings into C strings using temporary storage
--
withCString,
withCStringLen,
withCStringsLen,
charIsRepresentable,
) where
import Foreign.Marshal.Array
import Foreign.C.Types
import Foreign.Ptr
import Foreign.Storable
import Data.Word
-- Imports for the locale-encoding version of marshallers
import Data.Tuple (fst)
import GHC.Show ( show )
import Foreign.Marshal.Alloc
import Foreign.ForeignPtr
import GHC.Debug
import GHC.List
import GHC.Num
import GHC.Base
import GHC.IO
import GHC.IO.Exception
import GHC.IO.Buffer
import GHC.IO.Encoding.Types
c_DEBUG_DUMP :: Bool
c_DEBUG_DUMP = False
putDebugMsg :: String -> IO ()
putDebugMsg | c_DEBUG_DUMP = debugLn
| otherwise = const (return ())
-- These definitions are identical to those in Foreign.C.String, but copied in here to avoid a cycle:
type CString = Ptr CChar
type CStringLen = (Ptr CChar, Int)
-- exported functions
-- ------------------
-- | Marshal a NUL terminated C string into a Haskell string.
--
peekCString :: TextEncoding -> CString -> IO String
peekCString enc cp = do
sz <- lengthArray0 nUL cp
peekEncodedCString enc (cp, sz * cCharSize)
-- | Marshal a C string with explicit length into a Haskell string.
--
peekCStringLen :: TextEncoding -> CStringLen -> IO String
peekCStringLen = peekEncodedCString
-- | Marshal a Haskell string into a NUL terminated C string.
--
-- * the Haskell string may /not/ contain any NUL characters
--
-- * new storage is allocated for the C string and must be
-- explicitly freed using 'Foreign.Marshal.Alloc.free' or
-- 'Foreign.Marshal.Alloc.finalizerFree'.
--
newCString :: TextEncoding -> String -> IO CString
newCString enc = liftM fst . newEncodedCString enc True
-- | Marshal a Haskell string into a C string (ie, character array) with
-- explicit length information.
--
-- * new storage is allocated for the C string and must be
-- explicitly freed using 'Foreign.Marshal.Alloc.free' or
-- 'Foreign.Marshal.Alloc.finalizerFree'.
--
newCStringLen :: TextEncoding -> String -> IO CStringLen
newCStringLen enc = newEncodedCString enc False
-- | Marshal a Haskell string into a NUL terminated C string using temporary
-- storage.
--
-- * the Haskell string may /not/ contain any NUL characters
--
-- * the memory is freed when the subcomputation terminates (either
-- normally or via an exception), so the pointer to the temporary
-- storage must /not/ be used after this.
--
withCString :: TextEncoding -> String -> (CString -> IO a) -> IO a
withCString enc s act = withEncodedCString enc True s $ \(cp, _sz) -> act cp
-- | Marshal a Haskell string into a C string (ie, character array)
-- in temporary storage, with explicit length information.
--
-- * the memory is freed when the subcomputation terminates (either
-- normally or via an exception), so the pointer to the temporary
-- storage must /not/ be used after this.
--
withCStringLen :: TextEncoding -> String -> (CStringLen -> IO a) -> IO a
withCStringLen enc = withEncodedCString enc False
-- | Marshal a list of Haskell strings into an array of NUL terminated C strings
-- using temporary storage.
--
-- * the Haskell strings may /not/ contain any NUL characters
--
-- * the memory is freed when the subcomputation terminates (either
-- normally or via an exception), so the pointer to the temporary
-- storage must /not/ be used after this.
--
withCStringsLen :: TextEncoding
-> [String]
-> (Int -> Ptr CString -> IO a)
-> IO a
withCStringsLen enc strs f = go [] strs
where
go cs (s:ss) = withCString enc s $ \c -> go (c:cs) ss
go cs [] = withArrayLen (reverse cs) f
-- | Determines whether a character can be accurately encoded in a
-- 'Foreign.C.String.CString'.
--
-- Pretty much anyone who uses this function is in a state of sin because
-- whether or not a character is encodable will, in general, depend on the
-- context in which it occurs.
charIsRepresentable :: TextEncoding -> Char -> IO Bool
-- We force enc explicitly because `catch` is lazy in its
-- first argument. We would probably like to force c as well,
-- but unfortunately worker/wrapper produces very bad code for
-- that.
--
-- TODO If this function is performance-critical, it would probably
-- pay to use a single-character specialization of withCString. That
-- would allow worker/wrapper to actually eliminate Char boxes, and
-- would also get rid of the completely unnecessary cons allocation.
charIsRepresentable !enc c =
withCString enc [c]
(\cstr -> do str <- peekCString enc cstr
case str of
[ch] | ch == c -> pure True
_ -> pure False)
`catch`
\(_ :: IOException) -> pure False
-- auxiliary definitions
-- ----------------------
-- C's end of string character
nUL :: CChar
nUL = 0
-- Size of a CChar in bytes
cCharSize :: Int
cCharSize = sizeOf (undefined :: CChar)
{-# INLINE peekEncodedCString #-}
peekEncodedCString :: TextEncoding -- ^ Encoding of CString
-> CStringLen
-> IO String -- ^ String in Haskell terms
peekEncodedCString (TextEncoding { mkTextDecoder = mk_decoder }) (p, sz_bytes)
= bracket mk_decoder close $ \decoder -> do
let chunk_size = sz_bytes `max` 1 -- Decode buffer chunk size in characters: one iteration only for ASCII
from0 <- fmap (\fp -> bufferAdd sz_bytes (emptyBuffer fp sz_bytes ReadBuffer)) $ newForeignPtr_ (castPtr p)
to <- newCharBuffer chunk_size WriteBuffer
let go !iteration from = do
(why, from', to') <- encode decoder from to
if isEmptyBuffer from'
then
-- No input remaining: @why@ will be InputUnderflow, but we don't care
withBuffer to' $ peekArray (bufferElems to')
else do
-- Input remaining: what went wrong?
putDebugMsg ("peekEncodedCString: " ++ show iteration ++ " " ++ show why)
(from'', to'') <- case why of InvalidSequence -> recover decoder from' to' -- These conditions are equally bad because
InputUnderflow -> recover decoder from' to' -- they indicate malformed/truncated input
OutputUnderflow -> return (from', to') -- We will have more space next time round
putDebugMsg ("peekEncodedCString: from " ++ summaryBuffer from ++ " " ++ summaryBuffer from' ++ " " ++ summaryBuffer from'')
putDebugMsg ("peekEncodedCString: to " ++ summaryBuffer to ++ " " ++ summaryBuffer to' ++ " " ++ summaryBuffer to'')
to_chars <- withBuffer to'' $ peekArray (bufferElems to'')
fmap (to_chars++) $ go (iteration + 1) from''
go (0 :: Int) from0
{-# INLINE withEncodedCString #-}
withEncodedCString :: TextEncoding -- ^ Encoding of CString to create
-> Bool -- ^ Null-terminate?
-> String -- ^ String to encode
-> (CStringLen -> IO a) -- ^ Worker that can safely use the allocated memory
-> IO a
withEncodedCString (TextEncoding { mkTextEncoder = mk_encoder }) null_terminate s act
= bracket mk_encoder close $ \encoder -> withArrayLen s $ \sz p -> do
from <- fmap (\fp -> bufferAdd sz (emptyBuffer fp sz ReadBuffer)) $ newForeignPtr_ p
let go !iteration to_sz_bytes = do
putDebugMsg ("withEncodedCString: " ++ show iteration)
allocaBytes to_sz_bytes $ \to_p -> do
mb_res <- tryFillBufferAndCall encoder null_terminate from to_p to_sz_bytes act
case mb_res of
Nothing -> go (iteration + 1) (to_sz_bytes * 2)
Just res -> return res
-- If the input string is ASCII, this value will ensure we only allocate once
go (0 :: Int) (cCharSize * (sz + 1))
{-# INLINE newEncodedCString #-}
newEncodedCString :: TextEncoding -- ^ Encoding of CString to create
-> Bool -- ^ Null-terminate?
-> String -- ^ String to encode
-> IO CStringLen
newEncodedCString (TextEncoding { mkTextEncoder = mk_encoder }) null_terminate s
= bracket mk_encoder close $ \encoder -> withArrayLen s $ \sz p -> do
from <- fmap (\fp -> bufferAdd sz (emptyBuffer fp sz ReadBuffer)) $ newForeignPtr_ p
let go !iteration to_p to_sz_bytes = do
putDebugMsg ("newEncodedCString: " ++ show iteration)
mb_res <- tryFillBufferAndCall encoder null_terminate from to_p to_sz_bytes return
case mb_res of
Nothing -> do
let to_sz_bytes' = to_sz_bytes * 2
to_p' <- reallocBytes to_p to_sz_bytes'
go (iteration + 1) to_p' to_sz_bytes'
Just res -> return res
-- If the input string is ASCII, this value will ensure we only allocate once
let to_sz_bytes = cCharSize * (sz + 1)
to_p <- mallocBytes to_sz_bytes
go (0 :: Int) to_p to_sz_bytes
tryFillBufferAndCall :: TextEncoder dstate -> Bool -> Buffer Char -> Ptr Word8 -> Int
-> (CStringLen -> IO a) -> IO (Maybe a)
tryFillBufferAndCall encoder null_terminate from0 to_p to_sz_bytes act = do
to_fp <- newForeignPtr_ to_p
go (0 :: Int) (from0, emptyBuffer to_fp to_sz_bytes WriteBuffer)
where
go !iteration (from, to) = do
(why, from', to') <- encode encoder from to
putDebugMsg ("tryFillBufferAndCall: " ++ show iteration ++ " " ++ show why ++ " " ++ summaryBuffer from ++ " " ++ summaryBuffer from')
if isEmptyBuffer from'
then if null_terminate && bufferAvailable to' == 0
then return Nothing -- We had enough for the string but not the terminator: ask the caller for more buffer
else do
-- Awesome, we had enough buffer
let bytes = bufferElems to'
withBuffer to' $ \to_ptr -> do
when null_terminate $ pokeElemOff to_ptr (bufR to') 0
fmap Just $ act (castPtr to_ptr, bytes) -- NB: the length information is specified as being in *bytes*
else case why of -- We didn't consume all of the input
InputUnderflow -> recover encoder from' to' >>= go (iteration + 1) -- These conditions are equally bad
InvalidSequence -> recover encoder from' to' >>= go (iteration + 1) -- since the input was truncated/invalid
OutputUnderflow -> return Nothing -- Oops, out of buffer during decoding: ask the caller for more
|
sdiehl/ghc
|
libraries/base/GHC/Foreign.hs
|
bsd-3-clause
| 11,587 | 0 | 25 | 2,993 | 2,201 | 1,149 | 1,052 | 151 | 5 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeInType #-}
module T15308 where
import Data.Kind
data Foo (a :: Type) :: forall b. (a -> b -> Type) -> Type where
MkFoo :: Foo a f
f :: Foo a f -> String
f = show
|
sdiehl/ghc
|
testsuite/tests/dependent/should_fail/T15308.hs
|
bsd-3-clause
| 244 | 0 | 9 | 53 | 75 | 45 | 30 | -1 | -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.