code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE OverloadedStrings #-}
module Data.Attoparsec.Text.Machine where
import Data.Attoparsec.Machine (processParserWith, streamParserWith)
import Data.Attoparsec.Text (Parser, parse, takeWhile)
import Data.Machine (ProcessT, asParts, auto, (<~))
import Data.Text (Text)
asLines :: Monad m => ProcessT m Text Text
asLines = asParts <~ auto unpackLine <~ streamParser ((Data.Attoparsec.Text.takeWhile $ \w -> w /= '\n') <* "\n")
where
unpackLine (Right txt) = [txt]
unpackLine (Left _) = []
processParser :: Monad m => Parser a -> ProcessT m Text (Either String (Text, a))
processParser p = processParserWith $ parse p
streamParser :: Monad m => Parser a -> ProcessT m Text (Either String a)
streamParser p = streamParserWith $ parse p
| aloiscochard/sarsi | src/Data/Attoparsec/Text/Machine.hs | apache-2.0 | 756 | 0 | 12 | 120 | 275 | 149 | 126 | 14 | 2 |
-- |
-- Module : Test.QuickCheck.Util.Combinator
--
-- Copyright : (C) 2010-2012 Joachim Fasting
-- License : BSD-style (see COPYING)
-- Maintainer : Joachim Fasting <[email protected]>
--
-- Additional combinators for QuickCheck.
module Test.QuickCheck.Util.Combinator
( pairOf
, tripleOf
, possibly
) where
import Control.Applicative
import Test.QuickCheck (Gen)
import qualified Test.QuickCheck as QC
-- | Create a pair generator.
pairOf :: Applicative m => m a -> m (a, a)
pairOf m = (,) <$> m <*> m
-- | Create a triple generator.
tripleOf :: Applicative m => m a -> m (a, a, a)
tripleOf m = (,,) <$> m <*> m <*> m
-- | Turn a value generator into a generator that _might_ generate a value.
--
-- Example:
--
-- @possibly $ tripleOf negative@
possibly :: Gen a -> Gen (Maybe a)
possibly m = QC.oneof [ Just <$> m , pure Nothing ]
| joachifm/QuickCheck-util | Test/QuickCheck/Util/Combinator.hs | bsd-2-clause | 872 | 0 | 8 | 177 | 206 | 119 | 87 | 13 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedLabels #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
module YX.Shell
( findShell
, shellExePaths
, ExecuteShellException(..)
, module YX.Type.Shell
-- * Utility functions
, findExecutables
)
where
import Control.Applicative (pure)
import Control.Exception (Exception, throwIO)
import Control.Monad ((>>=))
import Data.Foldable (foldlM)
import Data.Function (($), (.), const, id)
import Data.Functor (fmap)
import Data.Maybe (Maybe(Just, Nothing), maybe)
import Data.String (String)
import System.IO (FilePath, IO)
import Text.Show (Show)
import System.Directory
( doesFileExist
, executable
, findFileWith
, getPermissions
)
import System.FilePath (splitSearchPath)
import Data.Bool.Lifted ((<&&>))
import YX.Type.Shell
data ExecuteShellException = UnableToFindShellExecutable
deriving Show
instance Exception ExecuteShellException
shellExePaths :: Shell -> [FilePath]
shellExePaths = \case
Bash ->
[ "/bin/bash"
, "/usr/bin/bash"
, "/usr/local/bin/bash" -- E.g. FreeBSD
, "bash" -- Try to locate it in "$PATH".
]
findShell
:: Maybe String
-- ^ Search path.
-> Maybe FilePath
-- ^ Preferred shell executable.
-> [FilePath]
-- ^ Absolute paths and executable names of shells to try, in order.
-> IO FilePath
findShell pathVar shellVar otherShells =
findExecutables path shellPaths >>= reportError
where
path = maybe [] splitSearchPath pathVar
shellPaths = maybe id (:) shellVar $ otherShells
reportError :: Maybe FilePath -> IO FilePath
reportError = maybe (throwIO UnableToFindShellExecutable) pure
findExecutables :: [FilePath] -> [FilePath] -> IO (Maybe FilePath)
findExecutables path = go $ \case
r@(Just _) -> const $ pure r
Nothing -> \case
fp@('/' : _) -> do
isExe <- doesFileExist fp <&&> isExecutable fp
pure $ if isExe then Just fp else Nothing
fp -> findFileWith isExecutable path fp
where
go :: (Maybe FilePath -> FilePath -> IO (Maybe FilePath))
-> [FilePath]
-> IO (Maybe FilePath)
go f = foldlM f Nothing
isExecutable = fmap executable . getPermissions
| trskop/yx | src/YX/Shell.hs | bsd-3-clause | 2,371 | 0 | 16 | 551 | 601 | 340 | 261 | 64 | 4 |
{-# LANGUAGE FlexibleContexts #-}
module Opaleye.Manipulation (module Opaleye.Manipulation,
U.Unpackspec) where
import qualified Opaleye.Internal.Sql as Sql
import qualified Opaleye.Internal.Print as Print
import qualified Opaleye.RunQuery as RQ
import qualified Opaleye.Internal.RunQuery as IRQ
import qualified Opaleye.Table as T
import qualified Opaleye.Internal.Table as TI
import Opaleye.Internal.Column (Column(Column))
import Opaleye.Internal.Helpers ((.:), (.:.), (.::), (.::.))
import qualified Opaleye.Internal.Unpackspec as U
import Opaleye.PGTypes (PGBool)
import qualified Opaleye.Internal.HaskellDB.Sql as HSql
import qualified Opaleye.Internal.HaskellDB.Sql.Print as HPrint
import qualified Opaleye.Internal.HaskellDB.Sql.Default as SD
import qualified Opaleye.Internal.HaskellDB.Sql.Generate as SG
import qualified Database.PostgreSQL.Simple as PGS
import qualified Data.Profunctor.Product.Default as D
import Data.Int (Int64)
import Data.String (fromString)
import qualified Data.List.NonEmpty as NEL
arrangeInsert :: T.Table columns a -> columns -> HSql.SqlInsert
arrangeInsert t c = arrangeInsertMany t (return c)
arrangeInsertSql :: T.Table columns a -> columns -> String
arrangeInsertSql = show . HPrint.ppInsert .: arrangeInsert
runInsert :: PGS.Connection -> T.Table columns columns' -> columns -> IO Int64
runInsert conn = PGS.execute_ conn . fromString .: arrangeInsertSql
arrangeInsertMany :: T.Table columns a -> NEL.NonEmpty columns -> HSql.SqlInsert
arrangeInsertMany (T.Table tableName (TI.TableProperties writer _)) columns = insert
where (columnExprs, columnNames) = TI.runWriter' writer columns
insert = SG.sqlInsert SD.defaultSqlGenerator
tableName columnNames columnExprs
arrangeInsertManySql :: T.Table columns a -> NEL.NonEmpty columns -> String
arrangeInsertManySql = show . HPrint.ppInsert .: arrangeInsertMany
runInsertMany :: PGS.Connection
-> T.Table columns columns'
-> [columns]
-> IO Int64
runInsertMany conn table columns = case NEL.nonEmpty columns of
-- Inserting the empty list is just the same as returning 0
Nothing -> return 0
Just columns' -> (PGS.execute_ conn . fromString .: arrangeInsertManySql) table columns'
arrangeUpdate :: T.Table columnsW columnsR
-> (columnsR -> columnsW) -> (columnsR -> Column PGBool)
-> HSql.SqlUpdate
arrangeUpdate (TI.Table tableName (TI.TableProperties writer (TI.View tableCols)))
update cond =
SG.sqlUpdate SD.defaultSqlGenerator tableName [condExpr] (update' tableCols)
where update' = map (\(x, y) -> (y, x))
. TI.runWriter writer
. update
Column condExpr = cond tableCols
arrangeUpdateSql :: T.Table columnsW columnsR
-> (columnsR -> columnsW) -> (columnsR -> Column PGBool)
-> String
arrangeUpdateSql = show . HPrint.ppUpdate .:. arrangeUpdate
runUpdate :: PGS.Connection -> T.Table columnsW columnsR
-> (columnsR -> columnsW) -> (columnsR -> Column PGBool)
-> IO Int64
runUpdate conn = PGS.execute_ conn . fromString .:. arrangeUpdateSql
arrangeDelete :: T.Table a columnsR -> (columnsR -> Column PGBool) -> HSql.SqlDelete
arrangeDelete (TI.Table tableName (TI.TableProperties _ (TI.View tableCols)))
cond =
SG.sqlDelete SD.defaultSqlGenerator tableName [condExpr]
where Column condExpr = cond tableCols
arrangeDeleteSql :: T.Table a columnsR -> (columnsR -> Column PGBool) -> String
arrangeDeleteSql = show . HPrint.ppDelete .: arrangeDelete
runDelete :: PGS.Connection -> T.Table a columnsR -> (columnsR -> Column PGBool)
-> IO Int64
runDelete conn = PGS.execute_ conn . fromString .: arrangeDeleteSql
arrangeInsertReturning :: U.Unpackspec returned ignored
-> T.Table columnsW columnsR
-> columnsW
-> (columnsR -> returned)
-> Sql.Returning HSql.SqlInsert
arrangeInsertReturning unpackspec table columns returningf =
Sql.Returning insert returningSEs
where insert = arrangeInsert table columns
TI.Table _ (TI.TableProperties _ (TI.View columnsR)) = table
returningPEs = U.collectPEs unpackspec (returningf columnsR)
returningSEs = Sql.ensureColumnsGen id (map Sql.sqlExpr returningPEs)
arrangeInsertReturningSql :: U.Unpackspec returned ignored
-> T.Table columnsW columnsR
-> columnsW
-> (columnsR -> returned)
-> String
arrangeInsertReturningSql = show
. Print.ppInsertReturning
.:: arrangeInsertReturning
runInsertReturningExplicit :: RQ.QueryRunner returned haskells
-> PGS.Connection
-> T.Table columnsW columnsR
-> columnsW
-> (columnsR -> returned)
-> IO [haskells]
runInsertReturningExplicit qr conn t w r = PGS.queryWith_ parser conn
(fromString
(arrangeInsertReturningSql u t w r))
where IRQ.QueryRunner u _ _ = qr
parser = IRQ.prepareRowParser qr (r v)
TI.Table _ (TI.TableProperties _ (TI.View v)) = t
-- This method of getting hold of the return type feels a bit
-- suspect. I haven't checked it for validity.
-- | @runInsertReturning@'s use of the 'D.Default' typeclass means that the
-- compiler will have trouble inferring types. It is strongly
-- recommended that you provide full type signatures when using
-- @runInsertReturning@.
runInsertReturning :: (D.Default RQ.QueryRunner returned haskells)
=> PGS.Connection
-> T.Table columnsW columnsR
-> columnsW
-> (columnsR -> returned)
-> IO [haskells]
runInsertReturning = runInsertReturningExplicit D.def
arrangeUpdateReturning :: U.Unpackspec returned ignored
-> T.Table columnsW columnsR
-> (columnsR -> columnsW)
-> (columnsR -> Column PGBool)
-> (columnsR -> returned)
-> Sql.Returning HSql.SqlUpdate
arrangeUpdateReturning unpackspec table updatef cond returningf =
Sql.Returning update returningSEs
where update = arrangeUpdate table updatef cond
TI.Table _ (TI.TableProperties _ (TI.View columnsR)) = table
returningPEs = U.collectPEs unpackspec (returningf columnsR)
returningSEs = Sql.ensureColumnsGen id (map Sql.sqlExpr returningPEs)
arrangeUpdateReturningSql :: U.Unpackspec returned ignored
-> T.Table columnsW columnsR
-> (columnsR -> columnsW)
-> (columnsR -> Column PGBool)
-> (columnsR -> returned)
-> String
arrangeUpdateReturningSql = show
. Print.ppUpdateReturning
.::. arrangeUpdateReturning
runUpdateReturningExplicit :: RQ.QueryRunner returned haskells
-> PGS.Connection
-> T.Table columnsW columnsR
-> (columnsR -> columnsW)
-> (columnsR -> Column PGBool)
-> (columnsR -> returned)
-> IO [haskells]
runUpdateReturningExplicit qr conn t update cond r =
PGS.queryWith_ parser conn
(fromString (arrangeUpdateReturningSql u t update cond r))
where IRQ.QueryRunner u _ _ = qr
parser = IRQ.prepareRowParser qr (r v)
TI.Table _ (TI.TableProperties _ (TI.View v)) = t
runUpdateReturning :: (D.Default RQ.QueryRunner returned haskells)
=> PGS.Connection
-> T.Table columnsW columnsR
-> (columnsR -> columnsW)
-> (columnsR -> Column PGBool)
-> (columnsR -> returned)
-> IO [haskells]
runUpdateReturning = runUpdateReturningExplicit D.def
| danse/haskell-opaleye | src/Opaleye/Manipulation.hs | bsd-3-clause | 8,377 | 0 | 13 | 2,492 | 2,056 | 1,082 | 974 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module Horbits.UI.Camera.Internal where
import Control.Lens
import Data.Fixed
import Data.List.NonEmpty as NE
import Linear
import Horbits.UI.Camera.Zoom
-- data type
data OrthoCamera a = OrthoCamera { _orthoCameraCenter :: V3 a
, _orthoCameraColatitude :: a
, _orthoCameraLongitude :: a
, _orthoCameraScale :: a
, _orthoCameraViewportWidth :: Int
, _orthoCameraViewportHeight :: Int
, _orthoCameraZoomModel :: ZoomModel a
} deriving (Show, Eq)
makeLenses ''OrthoCamera
initOrthoCamera :: Num a => ZoomModel a -> OrthoCamera a
initOrthoCamera z@(ZoomModel zs) = OrthoCamera zero 0 0 (NE.last zs) 1 1 z
-- transform matrices
orthoCameraMatrix :: (RealFloat a, Epsilon a) => OrthoCamera a -> M44 a
orthoCameraMatrix cam = scale cam !*! rotateColat cam !*! rotateLong cam !*! translate cam
invOrthoCameraMatrix :: (RealFloat a, Epsilon a) => OrthoCamera a -> M44 a
invOrthoCameraMatrix cam = invTranslate cam !*! invRotateLong cam !*! invRotateColat cam !*! invScale cam
orthoCameraZIndex :: (RealFloat a, Epsilon a) => OrthoCamera a -> V3 a -> a
orthoCameraZIndex c = negate . dot (orthoCameraMatrix c ^. _z . _xyz)
-- update API
addColatitude :: RealFloat a => a -> OrthoCamera a -> OrthoCamera a
addColatitude a cam = cam & orthoCameraColatitude %~ addClamped
where addClamped b = min pi $ max 0 $ a + b
addLongitude :: RealFloat a => a -> OrthoCamera a -> OrthoCamera a
addLongitude a cam = cam & orthoCameraLongitude %~ addWrapped
where addWrapped b = mod' (a + b) (2 * pi)
addTranslation :: (RealFloat a, Epsilon a) => V2 a -> OrthoCamera a -> OrthoCamera a
addTranslation v cam = cam & orthoCameraCenter %~ (^-^ v')
where v' = (invOrthoCameraMatrix cam !* (zero & _xy .~ v)) ^. _xyz
zoomOut :: (Ord a, Num a) => OrthoCamera a -> OrthoCamera a
zoomOut cam = cam & orthoCameraScale %~ zoomModelOut (cam ^. orthoCameraZoomModel)
where
zoomModelOut (ZoomModel zooms) z = NE.head $ foldr NE.cons (NE.last zooms :| []) (NE.dropWhile (<= z) zooms)
zoomIn :: (Ord a, Num a) => OrthoCamera a -> OrthoCamera a
zoomIn cam = cam & orthoCameraScale %~ zoomModelIn (cam ^. orthoCameraZoomModel)
where zoomModelIn (ZoomModel zooms) z = NE.last $ NE.head zooms :| NE.takeWhile (< z) zooms
-- transformation parts
orthoCameraAspectRatio :: (RealFloat a) => OrthoCamera a -> a
orthoCameraAspectRatio cam =
realToFrac (cam ^. orthoCameraViewportWidth) / realToFrac (cam ^. orthoCameraViewportHeight)
translate :: Num a => OrthoCamera a -> M44 a
translate = translate' . negate . view orthoCameraCenter
scale :: RealFloat a => OrthoCamera a -> M44 a
scale = scaling f
where
f sz ar mz = V4 0 0 0 1 & _xy .~ (1 / sz) *^ rs & _z .~ 1 / mz
where
rs = if ar > 1 then V2 (1/ar) (-1) else V2 1 (-ar)
rotateLong :: (Epsilon a, Floating a) => OrthoCamera a -> M44 a
rotateLong = rotateZ . view orthoCameraLongitude
rotateColat :: (Epsilon a, Floating a) => OrthoCamera a -> M44 a
rotateColat = rotateX . views orthoCameraColatitude (pi -)
-- inverse transformation parts
invTranslate :: Num a => OrthoCamera a -> M44 a
invTranslate = translate' . view orthoCameraCenter
invScale :: RealFloat a => OrthoCamera a -> M44 a
invScale = scaling f
where
f sz ar mz = V4 0 0 0 1 & _xy .~ sz *^ rs & _z .~ mz
where
rs = if ar > 1 then V2 ar (-1) else V2 1 (-1 / ar)
invRotateLong :: (Epsilon a, Floating a) => OrthoCamera a -> M44 a
invRotateLong = rotateZ . negate . view orthoCameraLongitude
invRotateColat :: (Epsilon a, Floating a) => OrthoCamera a -> M44 a
invRotateColat = rotateX . negate . views orthoCameraColatitude (pi -)
-- primitive transformations
rotateZ :: (Epsilon a, Floating a) => a -> M44 a
rotateZ a = mkTransformation (axisAngle (V3 0 0 1) a) zero
rotateX :: (Epsilon a, Floating a) => a -> M44 a
rotateX a = mkTransformation (axisAngle (V3 1 0 0) a) zero
translate' :: (Num a) => V3 a -> M44 a
translate' v = identity & column _w . _xyz .~ v
scaling :: (RealFloat a) =>
(a -> a -> a -> V4 a) -> OrthoCamera a -> M44 a
scaling f = do
sz <- view orthoCameraScale
ar <- orthoCameraAspectRatio
mz <- view $ orthoCameraZoomModel . maxZoom
return $ scaled $ f sz ar mz
| chwthewke/horbits | src/horbits/Horbits/UI/Camera/Internal.hs | bsd-3-clause | 4,533 | 0 | 13 | 1,161 | 1,648 | 833 | 815 | -1 | -1 |
module FP.Parser.SExp where
import FP.Prelude
import FP.Parser.Parser
import FP.Pretty
import qualified Prelude
data SNumber =
SNInteger β€
| SNDouble π»
deriving (Eq,Ord)
makePrettySum ''SNumber
data SLit =
SLNumber SNumber
| SLString π
deriving (Eq,Ord)
makePrettySum ''SLit
data SToken =
STLParen
| STRParen
| STLit SLit
| STSymbol π
| STWhitespace π
deriving (Eq,Ord)
makePrettySum ''SToken
makePrisms ''SToken
lparenTok β· Parser β ()
lparenTok = pRender darkGray $ void $ pLit '('
rparenTok β· Parser β ()
rparenTok = pRender darkGray $ void $ pLit ')'
litTok β· Parser β SLit
litTok = pRender darkRed $ mconcat
[ SLNumber ^$ pError "number" numberTok
, SLString ^$ pError "string" stringTok
]
where
numberTok β· Parser β SNumber
numberTok = mconcat
[ SNInteger ^$ pError "integer" integerTok
, SNDouble ^$ pError "double" doubleTok
]
where
integerTok β· Parser β β€
integerTok = do
sign β signTok
digits β π€ ^$ pOneOrMoreGreedy $ pSatisfies "digit" isDigit
return $ Prelude.read $ chars $ sign β§Ί digits
doubleTok β· Parser β π»
doubleTok = do
sign β signTok
digitsBefore β π€ ^$ pOneOrMoreGreedy $ pSatisfies "digit" isDigit
dec β π€ ^$ mapM pLit $ chars "."
digitsAfter β π€ ^$ pOneOrMoreGreedy $ pSatisfies "digit" isDigit
return $ Prelude.read $ chars $ sign β§Ί digitsBefore β§Ί dec β§Ί digitsAfter
signTok β· Parser β π
signTok = mconcat
[ π€ ^$ mapM pLit $ chars "-"
, return ""
]
stringTok β· Parser β π
stringTok = do
void $ pLit '"'
s β concat ^$ pManyGreedy $ mconcat
[ π€ β single ^$ pSatisfies "anything but '\"' or '\\'" $ \ c β not $ c β '"' β¨ c β '\\'
, pAppendError "escape sequence" $ do
bslash β π€ β single ^$ pLit '\\'
c β π€ β single ^$ pLit '\\' <β§Ί> pLit 'n'
return $ bslash β§Ί c
]
void $ pLit '"'
return s
symbolTok β· Parser β π
symbolTok = π€ ^$ pOneOrMoreGreedy $ pSatisfies "letter" isLetter
whitespaceTok β· Parser β π
whitespaceTok = π€ ^$ pOneOrMoreGreedy $ pSatisfies "space" isSpace
tok β· Parser β SToken
tok = mconcat
[ const STLParen ^$ pError "lparen" lparenTok
, const STRParen ^$ pError "rparen" rparenTok
, STLit ^$ pError "lit" litTok
, STSymbol ^$ pError "symbol" symbolTok
, STWhitespace ^$ pError "whitespace" whitespaceTok
]
testSExpTokenizerSuccess β· IO ()
testSExpTokenizerSuccess = tokenizeIOMain tok $ tokens "((-1-2-1.42(\"astringwith\\\\stuff\\n\" ( "
testSExpTokenizerFailure1 β· IO ()
testSExpTokenizerFailure1 = tokenizeIOMain tok $ tokens "((foo-1and0.01+bar"
testSExpTokenizerFailure2 β· IO ()
testSExpTokenizerFailure2 = tokenizeIOMain tok $ tokens "()foo-1\"astring\\badescape\""
data FullContext t = FullContext
{ fullContextCaptured β· ParserContext t
, fullContextFutureInput β· ParserInput t
}
instance Pretty (FullContext t) where
pretty (FullContext (ParserContext pre _ display _ _) (ParserInput ss _)) = concat
[ ppPun "β¬"
, ppAlign $ pre β§Ί (ppUT '^' green display) β§Ί concat (map tokenRender ss)
, ppPun "β"
]
data SAtom =
SALit SLit
| SASymbol π
makePrettySum ''SAtom
data TaggedFix t (f β· β
β β
) = TaggedFix
{ taggedFixContext β· FullContext t
, taggedFixValue β· f (TaggedFix t f)
}
makePrettySum ''TaggedFix
data PreSExp e =
SEAtom SAtom
| SEExp [e]
makePrettySum ''PreSExp
type SExp = TaggedFix SToken PreSExp
atomPar β· Parser SToken SAtom
atomPar = pError "atom" $ mconcat
[ SALit ^$ litPar
, SASymbol ^$ symbolPar
]
litPar β· Parser SToken SLit
litPar = pShaped "lit" $ view sTLitL
symbolPar β· Parser SToken π
symbolPar = pShaped "symbol" $ view sTSymbolL
preSExpPar β· Parser SToken (PreSExp SExp)
preSExpPar = mconcat
[ SEAtom ^$ atomPar
, SEExp ^$ inParensPar
]
inParensPar β· Parser SToken [SExp]
inParensPar = do
void $ pLit STLParen
es β sexpsPar
void $ pLit STRParen
return es
sexpsPar β· Parser SToken [SExp]
sexpsPar = do
void $ pOptionalGreedy $ pSatisfies "whitespace" $ shape sTWhitespaceL
xs β pManySepByGreedy (void $ pOptionalGreedy $ pSatisfies "whitespace" $ shape sTWhitespaceL) sexpPar
void $ pOptionalGreedy $ pSatisfies "whitespace" $ shape sTWhitespaceL
return xs
sexpPar β· Parser SToken SExp
sexpPar = do
(s,cc) β pCapture $ preSExpPar
pin β getL parserStateInputL
return $ TaggedFix (FullContext cc pin) s
testSExpParserSuccess β· IO ()
testSExpParserSuccess = do
toks β tokenizeIO tok input
parseIOMain sexpsPar toks
where
input β· Stream (Token β)
input = tokens " x y ( -1-2) 0.0"
| davdar/darailude | src/FP/Parser/SExp.hs | bsd-3-clause | 4,949 | 128 | 18 | 1,191 | 1,391 | 718 | 673 | -1 | -1 |
{-# LANGUAGE OverloadedStrings, RecursiveDo, ScopedTypeVariables, FlexibleContexts, TypeFamilies, ConstraintKinds #-}
module Frontend.Properties.R53
(
r53Properties
) where
import Prelude hiding (mapM, mapM_, all, sequence)
import qualified Data.Map as Map
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text as T
import Reflex
import Reflex.Dom.Core
------
import AWSFunction
--------------------------------------------------------------------------
-- Route53 Properties
-------
calcR53ZoneF :: Text -> Text -> Text
calcR53ZoneF hz tf = toText $ hosZ (readDouble hz) + (50.0 * (readDouble tf))
where
hosZ hz'
| hz' <= 25 = 0.50 * hz'
| otherwise = (0.50 * 25) + (0.10 * (hz' - 25))
calcR53Queries :: Text -> Int -> Text -> Int -> Text -> Int -> Text
calcR53Queries sq skey lbrq lkey gdq gkey =
toText $ (standardQ (readDouble sq) skey)
+ (latencyQ (readDouble lbrq) lkey)
+ (geoDnsQ (readDouble gdq) gkey)
where
standardQ i k1 = case k1 of
1 -> 30 * standardQ' i
2 -> 4 * standardQ' i
_ -> standardQ' i
where
standardQ' sq'
| sq' <= 1000 = 0.400 * sq'
| otherwise = (0.400 * 1000) + (0.200 * (sq' - 1000))
latencyQ i k2 = case k2 of
1 -> 30 * latencyQ' i
2 -> 4 * latencyQ' i
_ -> latencyQ' i
where
latencyQ' lq'
| lq' <= 1000 = 0.600 * lq'
| otherwise = (0.600 * 1000) + (0.300 * (lq' - 1000))
geoDnsQ i k3 = case k3 of
1 -> 30 * geoDnsQ' i
2 -> 4 * geoDnsQ' i
_ -> geoDnsQ' i
where
geoDnsQ' gq'
| gq' <= 1000 = 0.700 * gq'
| otherwise = (0.700 * 1000) + (0.350 * (gq' - 1000))
-------------------------------------
r53Properties :: (Reflex t, MonadWidget t m) => Dynamic t (Map.Map T.Text T.Text) -> m (Dynamic t Text)
r53Properties dynAttrs = do
result <- elDynAttr "div" ((constDyn $ idAttrs R53) <> dynAttrs <> rightClassAttrs) $ do
rec
r53HostZ <- r53HostedZone evReset
evReset <- button "Reset"
return $ r53HostZ
return $ result
r53HostedZone :: (Reflex t, MonadWidget t m) => Event t a -> m (Dynamic t Text)
r53HostedZone evReset = do
rec
let
resultR53ZoneF = calcR53ZoneF <$> (value r53Hz)
<*> (value r53Tf)
resultR53Queries = calcR53Queries <$> (value r53Sq)
<*> (value ddR53Sq)
<*> (value r53Lbrq)
<*> (value ddR53Lbrq)
<*> (value r53GeoDQ)
<*> (value ddR53GeoDQ)
resultR53HZone = (+) <$> (readDouble <$> resultR53ZoneF)
<*> (readDouble <$> resultR53Queries)
el "h4" $ text "Hosted Zone: "
el "p" $ text "Hosted Zone:"
r53Hz <- textInput $ def & textInputConfig_inputType .~ "number"
& textInputConfig_initialValue .~ "0"
& setValue .~ (leftmost ["0" <$ evReset])
el "p" $ text "Traffic Flow:"
r53Tf <- textInput $ def & textInputConfig_inputType .~ "number"
& textInputConfig_initialValue .~ "0"
& setValue .~ (leftmost ["0" <$ evReset])
el "p" $ text "Standard Queries"
el "p" $ text "(in Million Queries):"
r53Sq <- textInput $ def & textInputConfig_inputType .~ "number"
& textInputConfig_initialValue .~ "0"
& setValue .~ (leftmost ["0" <$ evReset])
ddR53Sq <- dropdown 3 (constDyn ddPerMonth) def
el "p" $ text "Latency Based"
el "p" $ text "Routing Queries"
el "p" $ text "(in Million Queries):"
r53Lbrq <- textInput $ def & textInputConfig_inputType .~ "number"
& textInputConfig_initialValue .~ "0"
& setValue .~ (leftmost ["0" <$ evReset])
ddR53Lbrq <- dropdown 3 (constDyn ddPerMonth) def
el "p" $ text "Geo DNS Queries"
el "p" $ text "(in Million Queries):"
r53GeoDQ <- textInput $ def & textInputConfig_inputType .~ "number"
& textInputConfig_initialValue .~ "0"
& setValue .~ (leftmost ["0" <$ evReset])
ddR53GeoDQ <- dropdown 3 (constDyn ddPerMonth) def
return $ toDynText resultR53HZone
| Rizary/awspi | Lib/Frontend/Properties/R53.hs | bsd-3-clause | 4,264 | 29 | 55 | 1,299 | 1,198 | 643 | 555 | 94 | 7 |
{-# LANGUAGE FlexibleInstances #-}
module Eval (
runEval
) where
import Control.Monad.State
import Control.Monad.Writer (WriterT, runWriterT, tell)
import Data.List
import Data.Maybe
import qualified Data.Map as Map
import Text.PrettyPrint
import Pretty
import Syntax
-- Values
data Value
= VInt Integer
| VBool Bool
| VClosure String Expr (Eval.Scope)
instance Pretty Value where
ppr _ (VInt x) = text $ show x
ppr _ (VBool x) = text $ show x
ppr p (VClosure v x env) =
text "\\" <> text v <+> text "." <+> ppr (p+1) x
<> if null env
then text ""
else text " |" <+> (hsep $ map (ppr 0) (Map.assocs env))
instance Pretty (String, Value) where
ppr p (n, v) = text "(" <> text n <> text "," <+> ppr 0 v <> text ")"
type Scope = Map.Map String Value
emptyScope = Map.empty
type Step = (Int, Expr) -- (depth, partially evaluated expression)
type Eval a = WriterT [Step] (State EvalState) a
-- State and logging of evaluation
data EvalState = EvalState
{ depth :: Int
} deriving (Show)
inc :: Eval a -> Eval a
inc m = do
modify $ \s -> s { depth = depth s + 1}
out <- m
modify $ \s -> s { depth = depth s - 1}
return out
record :: Expr -> Eval ()
record x = do
d <- gets depth
tell [(d,x)]
return ()
-- Evaluation
eval :: Eval.Scope -> Expr -> Eval Value
eval scope x = case x of
Lam n _ body -> inc $ do
return $ VClosure n body scope
App a b -> inc $ do
x <- eval scope a
record a
y <- eval scope b
record b
appl x y
Var n -> do
record x
return $ scope Map.! n
Lit (LInt a) -> return $ VInt (fromIntegral a)
Lit (LBool a) -> return $ VBool a
appl :: Value -> Value -> Eval Value
appl (VClosure n e scope) x = do
eval (Map.insert n x scope) e
appl _ _ = error "Tried to apply non-closure"
-- Interface
runEval :: Expr -> (Value, [Step])
runEval x = evalState (runWriterT (eval emptyScope x)) (EvalState 0)
| zanesterling/haskell-compiler | src/Eval.hs | bsd-3-clause | 1,951 | 0 | 14 | 516 | 858 | 433 | 425 | 64 | 5 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module HTIG.IRCServer.Core
( IRCState(..)
, IRCM
, runIRCM
, runIRCM'
, getConn
, getGlobal
, setGlobal
, modifyGlobal
, modifyGlobal'
, getLocal
, setLocal
, modifyLocal
, modifyLocal'
, liftIO
) where
import Control.Applicative (Applicative(pure, (<*>)))
import Control.Concurrent.STM (TVar, atomically, readTVar, writeTVar)
import Control.Monad.Trans (MonadIO, liftIO)
import Control.Monad.Reader (ReaderT, MonadReader, runReaderT, asks)
import HTIG.IRCServer.Connection (Connection)
data IRCState g l = IRCState { ircGlobal :: TVar g
, ircLocal :: TVar l
, ircConn :: Connection
}
newtype IRCM g l a = IRCM { unIRCM :: ReaderT (IRCState g l) IO a }
deriving (Monad, Functor, MonadIO, MonadReader (IRCState g l))
instance Applicative (IRCM g l) where
pure = return
f <*> x = do
f' <- f
x' <- x
return $ f' x'
runIRCM :: IRCM g l a -> TVar g -> TVar l -> Connection -> IO a
runIRCM m g l conn = runIRCM' m $ IRCState g l conn
runIRCM' :: IRCM g l a -> IRCState g l -> IO a
runIRCM' m s = runReaderT (unIRCM m) s
getConn :: IRCM g l Connection
getConn = asks ircConn
getGlobal :: IRCM g l g
getGlobal = mkGet ircGlobal
setGlobal :: g -> IRCM g l ()
setGlobal g = mkSet ircGlobal g
modifyGlobal :: (g -> g) -> IRCM g l ()
modifyGlobal f = mkModify ircGlobal $ \g -> (f g, ())
modifyGlobal' :: (g -> (g, a)) -> IRCM g l a
modifyGlobal' f = mkModify ircGlobal f
getLocal :: IRCM g l l
getLocal = mkGet ircLocal
setLocal :: l -> IRCM g l ()
setLocal l = mkSet ircLocal l
modifyLocal :: (l -> l) -> IRCM g l ()
modifyLocal f = mkModify ircLocal $ \l -> (f l, ())
modifyLocal' :: (l -> (l, a)) -> IRCM g l a
modifyLocal' f = mkModify ircLocal f
mkGet :: (IRCState g l -> TVar a) -> IRCM g l a
mkGet f = liftIO . atomically . readTVar =<< asks f
mkSet :: (IRCState g l -> TVar a) -> a -> IRCM g l ()
mkSet f v = liftIO . atomically . flip writeTVar v =<< asks f
mkModify :: (IRCState g l -> TVar a) -> (a -> (a, b)) -> IRCM g l b
mkModify f f' = do
tv <- asks f
liftIO $ atomically $ do
v <- readTVar tv
let (v', r) = f' v
writeTVar tv v'
return r
| nakamuray/htig | HTIG/IRCServer/Core.hs | bsd-3-clause | 2,338 | 0 | 13 | 666 | 965 | 504 | 461 | 66 | 1 |
{- |
Module : Skel
Description : Description
Copyright : 2014, Peter Harpending.
License : BSD3
Maintainer : Peter Harpending <[email protected]>
Stability : experimental
Portability : archlinux
-}
module Skel where
| pharpend/flogger | skel/Skel.hs | bsd-3-clause | 240 | 0 | 2 | 53 | 5 | 4 | 1 | 1 | 0 |
{-
Copyright (c) 2014-2015, Johan Nordlander, Jonas DuregΓ₯rd, MichaΕ PaΕka,
Patrik Jansson and Josef Svenningsson
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Chalmers University of Technology nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module ARText where
import Data.Map
data Package = Package {
packagename :: QualName,
imports :: [Import],
typedefsApp :: Map TypeName Type,
typedefsImpl :: Map TypeName Type,
mappingSets :: Map MapName Mapping,
constraints :: Map ConstrName Constraint,
constants :: Map ConstName Constant,
interfaces :: Map IfaceName Interface,
components :: Map CompName Component,
behaviors :: Map BehName Behavior,
implementations :: Map ImpName Implementation,
modegroups :: Map GroupName ModeGroup,
compositions :: Map CompName Composition,
root :: CompName
}
type Name = Int
type TypeName = Name
type MapName = Name
type ConstrName = Name
type ConstName = Name
type IfaceName = Name
type CompName = Name
type BehName = Name
type ImpName = Name
type InstName = Name
type ProtName = Name
type GroupName = Name
type ModeName = Name
type PortName = Name
type ErrName = Name
type OpName = Name
type ElemName = Name
type VarName = Name
type FieldName = Name
type EnumName = Name
type ParName = Name
type ExclName = Name
type RunName = Name
type ShortName = Name
type ParNameOrStar = Name
type ElemNameOrStar = Name
type OpNameOrStar = Name
type QualName = [Name]
data Import = Import QualName
| ImportAll QualName
-- Types --------------------------------------------------------------------------------
data Type = TBool InvalidValue Extends
| TInt Min Max Unit ConstraintRef InvalidValue Extends
| TReal Min Max Encoding AllowNaN Unit InvalidValue Extends ConstraintRef
| TString Length Encoding InvalidValue
| TArray TypeName Int
| TRecord (Map FieldName TypeName)
| TEnum Min Max (Map EnumName (Maybe Int))
| TFixed Slope Bias Min Max Unit ConstraintRef InvalidValue Extends
data Min = Min Value Interval
data Max = Max Value Interval
data Length = Length Int
data Interval = Closed
| Open
| Infinite
data Unit = Unit QualName
| NoUnit
data ConstraintRef = ConstraintRef ConstrName
| NoConstraint
data InvalidValue = InvalidValue Value
| NoInvalid
data Extends = Extends QualName
| NoExtends
data Encoding = EncodingDouble
| EncodingSingle
| Encoding String
| NoEncoding
data AllowNaN = AllowNaN
| NoAllowNaN
data Slope = Slope Double
data Bias = Bias Double
| NoBias
-- Mappings -------------------------------------------------------------------------------
data Mapping = MapT (Map TypeName TypeName)
| MapG (Map TypeName GroupName)
-- Constraints ----------------------------------------------------------------------------
data Constraint = Constraint [Rule]
data Rule = Rule PhysInt Min Max Unit
data PhysInt = Physical
| Internal
-- Constants ------------------------------------------------------------------------------
data Constant = Const TypeName Value
data Value = Void --
| VBool Bool
| VInt Int
| VReal Double
| VString String
| VArray [Value]
-- | VArray TypeName ArrayValue
| VRecord TypeName (Map FieldName Value)
| VEnum EnumName
| VRef ConstName
deriving (Eq,Ord,Show)
data ArrayValue = Init [Value]
| InitAll Value
deriving (Eq,Ord,Show)
-- Interfaces ----------------------------------------------------------------------------
data Interface = SenderReceiver Service (Map ElemName Data)
| ClientServer Service (Map ErrName Int) (Map OpName Operation)
| Param Service (Map ParName Param)
| ModeSwitch Service (Map ProtName GroupName)
data Service = IsService
| NotService
data Data = Data TypeName Queued InitValue
data Queued = Queued
| UnQueued
data InitValue = InitValue Value
| NoInitValue
data Operation = Operation [ErrName] (Map ParName Argument)
data Argument = In TypeName Policy
| InOut TypeName Policy
| Out TypeName Policy
data Policy = UseArgumentType
| UseArrayBaseType
| UseVoid
| NoPolicy
data Param = TypeName String InitValue
-- Components ---------------------------------------------------------------------------
data Component = Application (Map PortName Port)
| SensorActuator (Map PortName Port) Hw
| Service (Map PortName Port)
| Parameter (Map PortName Port)
data Port = SenderProvides IfaceName (Map ElemName ComSpecS)
| ReceiverRequires IfaceName (Map ElemName ComSpecR)
| ClientRequires IfaceName (Map OpName ComSpec0)
| ServerProvides IfaceName (Map OpName ComSpec1)
| ParamProvides IfaceName
| ParamRequires IfaceName
data ComSpecS = QueuedComSpecS CanInvalidate InitValue E2EProtection OutOfRange
| UnQueuedComSpecS E2EProtection OutOfRange
data ComSpecR = QueuedComSpecR Length E2EProtection OutOfRange
| UnQueuedComSpecR TimeOut ResyncTime InvalidType InitValue
EnableUpdate NeverReceived E2EProtection OutOfRange
data ComSpec0 = ComSpec0
data ComSpec1 = ComSpec1 Length
data CanInvalidate = CanInvalidate
| CannotInvalidate
data E2EProtection = UsesEndToEndProtection
| NoEndToEndProtection
data OutOfRange = NONE
| IGNORE
| SATURATE
| DEFAULT
| INVALID
data TimeOut = TimeOut Double
| NoTimeOut
data ResyncTime = ResyncTime Double
| NoResyncTime
data InvalidType = HandleInvalidTypeKeep
| HandleInvalidTypeReplace
| NoHandleInvalidType
data EnableUpdate = EnableUpdate Bool
| NoEnableUpdate
data NeverReceived = HandleNeverReceived
| NoHandleNeverReceived
data Hw = Hw QualName
data Behavior = InternalBehavior {
supportsMultipleInstantiation :: Bool,
forComponent :: CompName,
dataTypeMappings :: [MapName],
exclusiveAreas :: [ExclName],
interRunnableVariables :: Map VarName Variable,
calibrationParams :: Map ParName CalParam,
perInstanceMemories :: Map Name PerInstMem,
portAPIOptions :: [PortAPIOption],
runnables :: Map RunName Runnable
}
data Variable = Var TypeName Explicit InitValue
data Explicit = Explicit
| Implicit
data CalParam = InstanceParam TypeName String
| SharedParam TypeName String
data PerInstMem = PerInstanceMemory String String
data PortAPIOption = PortAPIOption IndirectAPI TakeAddress PortName [(Type,Value)]
data IndirectAPI = IndirectAPI
| NoIndirectAPI
data TakeAddress = EnableTakeAddress
| DisableTakeAddress
data Runnable = Runnable {
concurrent :: Bool,
minimumStartInterval :: Double,
inExclusiveAreas :: [ExclName],
usesExclusiveAreas :: [ExclName],
symbol :: Maybe String,
readVariables :: [VarName],
writtenVariables :: [VarName],
events :: [Event],
parameterAccesses :: [ParamAccess],
dataReadAccesses :: [DataRdAccess],
dataReceivePoints :: [DataRcvPt],
dataSendPoints :: [DataSndPt],
dataWriteAccesses :: [DataWrAccess],
modeSwitchPoints :: [ModeSwitchPt],
modeAccessPoints :: [ModeAccessPt],
serverCallPoints :: [ServerCallPt],
waitPoints :: [WaitPt]
}
data Event = DataReceivedEvent PortName ElemName As Dis
| OperationInvokedEvent PortName OpName As Dis
| ModeSwitchEvent Activation PortName GroupName ModeName As Dis
| InitEvent
| BackgroundEvent
| TimingEvent Double As Dis
| DataSendCompletedEvent ShortName As Dis
| DataWriteCompletedEvent PortName ElemName
| AsynchronousServerCallReturnsEvent ServerCallPt
| ModeSwitchAckEvent PortName GroupName
| ReceiveErrorEvent PortName ElemName As Dis
| ModeManagerErrorEvent
| ExternalTriggerOccurredEvent
| InternalTriggerOccurredEvent
{-
data WPEvent = DataSendCompleted -- Rte_Feedback(PortName,ElemName)
| DataReceived -- Rte_Receive(PortName,ElemName,...)
| AsynchronousServerCallReturns -- Rte_Result(PortName,OpName,...)
| ModeSwitchAck -- Rte_SwitchAck(PortName,GroupName)
-}
data ParamAccess = ParameterAccess ParName As
| ParamPortAccess PortName ParNameOrStar As
data DataRdAccess = DataReadAccess PortName ElemNameOrStar As
data DataRcvPt = DataReceivePoint PortName ElemNameOrStar As
data DataSndPt = DataSendPoint PortName ElemNameOrStar As
data DataWrAccess = DataWriteAccess PortName ElemNameOrStar As
data ModeSwitchPt = ModeSwitchPoint PortName ProtName As
data ModeAccessPt = ModeAccessPoint Activation PortName GroupName As
data ServerCallPt = ServerCallPoint SyncOrAsync TimeOut PortName OpNameOrStar As
data WaitPt = WaitPoint ShortName TimeOut [ShortName]
data SyncOrAsync = Synchronous
| Asynchronous
data Activation = Entry
| Exit
data As = As ShortName
| NoName
data Dis = DisabledFor PortName GroupName ModeName
| NoDis
data Implementation = Implementation {
forBehavior :: BehName,
language :: Language,
codeDescriptor :: String,
codeGenerator :: Maybe String,
requiredRTEVendor :: RTEVendor,
compilers :: [Compiler]
}
data Language = C
| Cpp
| Java
data RTEVendor = RTEVendor String SwVersion VendorId
| NoRTEVendor
data SwVersion = SwVersion Int
| NoSwVersion
data VendorId = VendorId Int
| NoVendorId
data Compiler = Compiler {
compilerName :: Name,
vendor :: String,
version :: String
}
-- ModeGroups ------------------------------------------------------------------------------
data ModeGroup = ModeGroup Initial [ModeName]
data Initial = Initial ModeName
| NoInitial
-- Compositions ----------------------------------------------------------------------------
data Composition = Composition {
subcomponents :: Map InstName CompPrototype,
delegations :: Map PortName Delegation,
connectors :: [Connector]
}
data CompPrototype = Prototype CompName
data Delegation = DelegateRequires IfaceName [(InstName,PortName)]
| DelegateProvides IfaceName [(InstName,PortName)]
deriving (Eq)
data Connector = Connect (InstName,PortName) (InstName,PortName)
| AutoConnect InstName InstName
deriving (Eq)
| josefs/autosar | oldARSim/ARText.hs | bsd-3-clause | 16,113 | 0 | 10 | 6,857 | 2,216 | 1,295 | 921 | 261 | 0 |
import Control.Monad
import Control.Concurrent
import Control.Concurrent.STM
import System.Posix.IO
import GHC.Event
main_ :: IO ()
main_ = do
Just em <- getSystemEventManager
registerTimeout em 1000000 (print 888)
registerFd em (\k e -> getLine >>= print >> print k >> print e) stdInput evtRead
threadDelay 2000000
return ()
main :: IO ()
main = do
c <- atomically newTChan
Just em <- getSystemEventManager
registerTimeout em 1000000 (print 888)
forkIO . void $ registerFd em
(\k e -> void $ print k >> print e >> atomically (writeTChan c ()))
stdInput evtRead
atomically $ readTChan c
getLine >>= print
threadDelay 2000000
| YoshikuniJujo/xmpipe | test/testPolling.hs | bsd-3-clause | 643 | 4 | 15 | 116 | 262 | 123 | 139 | 23 | 1 |
{-# LANGUAGE TemplateHaskell, FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, TypeFamilies #-}
module WormLikeChain where
import Control.Applicative
import Control.Monad
import qualified Data.Vector.Unboxed as V
import qualified Data.Vector.Generic
import qualified Data.Vector.Generic.Mutable
import Data.Vector.Unboxed.Deriving
import Data.VectorSpace
import Data.Cross
import Data.AffineSpace
import Data.AffineSpace.Point
import Control.Newtype
import Data.Random
import Data.Random.Distribution.Bernoulli
import Data.Number.LogFloat hiding (realToFrac)
derivingUnbox "Point"
[t| (V.Unbox a) => Point (a,a,a) -> (a,a,a) |]
[| \(P x)->x |]
[| P |]
data WormLikeChain = WLC { lp :: Double
, links :: Int
}
type Angle = Double -- ^ Angle in radians
type Dist = Double
type Energy = Double
type Mass = Double
type Charge = Double -- ^ Electric charge
type PTrans = Double -- ^ Momentum transfer
type Intensity = Double -- ^ Scattering Amplitude
type R3 = (Double, Double, Double)
type P3 = Point R3
-- | Chain parametrized by bend angles
newtype ChainConfig = ChainC (V.Vector Angle)
deriving (Show)
instance Newtype ChainConfig (V.Vector Angle) where
pack = ChainC
unpack (ChainC a) = a
-- | Embedding of chain parametrized by dihedral angles
newtype ChainEmbedding = ChainE (V.Vector (Angle,Angle))
deriving (Show)
instance Newtype ChainEmbedding (V.Vector (Angle,Angle)) where
pack = ChainE
unpack (ChainE a) = a
-- | Position of links parametrized in Cartesian 3-space
newtype ChainPos = ChainP (V.Vector P3)
deriving (Show)
instance Newtype ChainPos (V.Vector P3) where
pack = ChainP
unpack (ChainP a) = a
-- | The bend angles of a Euler-angle parametrized embedding
embeddingToConfig :: ChainEmbedding -> ChainConfig
embeddingToConfig (ChainE v) =
ChainC $ V.map (\(Ξ±,Ξ²)->undefined) v
-- | Dihedral angles to Cartesian embedding given link length
embeddingToPositions :: Dist -> ChainEmbedding -> ChainPos
embeddingToPositions d (ChainE e) =
ChainP $ V.fromList $ reverse $ go [] (V.toList e)
where go :: [P3] -> [(Angle,Angle)] -> [P3]
go ps [] = ps
go [] ((_,_):rest) = go [origin] rest
go ps@(p1:[]) ((_,_):rest) = let p0 = p1 .+^ (1,0,0)
in go (p0:ps) rest
go ps@(p1:p2:[]) ((Ξ±,_):rest) = let p0 = p1 .+^ d *^ (cos Ξ±, sin Ξ±, 0)
in go (p0:ps) rest
go ps@(p1:p2:p3:_) ((Ξ±,Ξ²):rest) =
let p0 = p1 .+^ d *^ dihedralDir p3 p2 p1 (Ξ±,Ξ²)
in go (p0:ps) rest
-- | Normalized vector in direction specified by dihedral angles
-- relative to the three points given
--
-- ...--p3--p2 dir
-- \ /
-- p1
dihedralDir :: P3 -> P3 -> P3 -> (Angle,Angle) -> R3
dihedralDir p3 p2 p1 (Ξ±,Ξ²) =
let x = normalized $ p1 .-. p2
y = x `cross3` z
z = case (p2 .-. p1) `cross3` (p3 .-. p1) of
a | magnitude a < 1e-4 -> (0,0,1)
a -> a
in x ^* cos Ξ± ^* cos Ξ² ^+^ y ^* sin Ξ± ^* cos Ξ² ^+^ sin Ξ² *^ z
-- | A straight chain
straightChain :: Int -> ChainEmbedding
straightChain n = ChainE $ V.replicate n (0,0)
-- | Bending energy of given configuration under worm-like chain model
bendEnergy :: WormLikeChain -> ChainConfig -> Energy
bendEnergy (WLC lp links) (ChainC config) =
V.sum $ V.map energy config
where energy ΞΈ = undefined
-- | Electrostatic self-energy
selfEnergy :: Charge -> Dist -> ChainPos -> Energy
selfEnergy chainQ debyeL (ChainP v) =
sum $ map pairEnergy $ pairsWith distance v
where pairEnergy :: Dist -> Energy
pairEnergy r = 2*chainQ / r * exp (-r / debyeL)
-- | Zip together all combinations (not permutations) of distinct
-- elements with function f
pairsWith :: V.Unbox a => (a -> a -> b) -> V.Vector a -> [b]
pairsWith f v =
case V.toList v of
x:xs -> map (f x) xs ++ pairsWith f (V.tail v)
[] -> []
-- | Generate a random chain
randomChain :: Int -> RVar ChainEmbedding
randomChain n =
(ChainE . V.fromList) <$> replicateM n randomLink
where randomLink = do
Ξ± <- uniform 0 (2*pi)
Ξ² <- uniform 0 pi
return (Ξ±, Ξ²)
--- Importance sampling
-- | Propose a new embedding
proposal :: ChainEmbedding -> RVar ChainEmbedding
proposal (ChainE e) = do
n <- uniform 0 (V.length e - 1)
Ξ± <- uniform 0 (2*pi)
Ξ² <- uniform 0 pi
return $ ChainE $ e V.// [(n,(Ξ±,Ξ²))]
-- | Metropolis acceptance
accept :: (a -> LogFloat) -> a -> a -> RVar a
accept prob x x'
| p' > p = return x'
| otherwise = do a <- bernoulli $ (realToFrac $ p' / p :: Double)
return $ if a then x' else x
where (p, p') = (prob x, prob x')
-- | Monte Carlo sampling of embedding space
-- 'evolve n energy beta e0' produces 'n' configurations evolved from
-- initial chain configuration 'e0' 'under energy function 'energy' at
-- temperature 'T = 1 / beta / k_B'
evolve :: Int -> (ChainEmbedding -> Energy) -> Energy -> ChainEmbedding -> RVar [ChainEmbedding]
evolve n energy beta = iterateM n go
where go e = proposal e >>= accept prob e
prob x = logToLogFloat $ -energy x * beta
-- | Scattering amplitude for given chain configuration
scattering :: V.Vector P3 -> R3 -> Intensity
scattering v q =
n + 1 + 2*sum (map (\d->cos $ 2*pi * (d <.> q)) $ pairsWith (.-.) v)
where n = realToFrac $ V.length v
--- Observables
-- | End to end distance
endToEndDist :: ChainPos -> Double
endToEndDist (ChainP p) = V.last p `distance` V.head p
-- | Squared radius of gyration
gyrationRad :: V.Vector Mass -> ChainPos -> Double
gyrationRad masses (ChainP e) =
weight * V.sum (V.zipWith (\m p->m * p `distanceSq` origin) masses e) - magnitudeSq cm
where weight = V.sum masses
cm = V.foldl1 (^+^) $ V.zipWith (\m p->m *^ (p .-. origin)) masses e
iterateM :: Monad m => Int -> (a -> m a) -> a -> m [a]
iterateM 0 _ _ = return []
iterateM n f x = do
x' <- f x
xs <- iterateM (n-1) f x'
return $ x':xs
| bgamari/polymer-models | WormLikeChain.hs | bsd-3-clause | 6,303 | 1 | 16 | 1,718 | 2,147 | 1,150 | 997 | 126 | 5 |
{-# LANGUAGE TypeFamilies ,MultiParamTypeClasses,DeriveFunctor,DeriveFoldable,DeriveGeneric ,TypeOperators#-}
module Scaling.S1 where
import Space.Class
import Exponential.Class
import Data.Foldable
import Multiplicative.Class
import Data.FMonoid.Class
import Data.Distributive
import Data.Monoid
import Linear.V2
import Linear.V1
import Linear.Vector
import Local
import Data.Functor.Product
import qualified Prelude as Prelude
import Prelude hiding((*))
import SemiProduct
newtype Scale a = Scale {unScale :: a} deriving(Functor,Foldable,Read,Show)
instance Distributive Scale where
distribute x = Scale (fmap unScale x)
instance Exponential Scale where
logM (Scale x) = V1 $ log x
expM (V1 x) = Scale $ exp x
instance Group Scale where
mult (Scale x) (Scale y)= Scale (x Prelude.* y)
invert (Scale x) = Scale (-x )
type instance Local Scale = V1
instance Action Scale V1 where
Scale x |> v = fmap (Prelude.*x) v
instance Floating a => Multiplicative (Scale a) where
one = Scale 1
Scale x * Scale y = Scale $ x Prelude.* y
inversion (Scale x) = Scale $ 1/x
instance Space Scale where
x |+| y = x * expM y
x |-| y = logM $ inversion y * x
| massudaw/mtk | Scaling/S1.hs | bsd-3-clause | 1,199 | 1 | 8 | 232 | 432 | 228 | 204 | 36 | 0 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module Text.HeX.Standard.LaTeX (defaults) where
import Text.HeX
import Text.HeX.Standard.TeX (ctl, ch, grp)
import Text.HeX.Standard.Generic (getSectionNum)
defaults :: HeX ()
defaults = do
addParser [Inline] $ basicInline ch
addParser [Block] $ basicBlock toPara
newCommand [Inline] "emph" emph
newCommand [Inline] "strong" strong
newCommand [Block] "section" (section 1)
newCommand [Block] "subsection" (section 2)
newCommand [Block] "subsubsection" (section 3)
newCommand [Block] "paragraph" (section 4)
newCommand [Block] "subparagraph" (section 5)
toPara :: [Doc] -> Doc
toPara xs = mconcat xs +++ "\n\n"
emph :: InlineDoc -> Doc
emph (InlineDoc arg) = ctl "emph" +++ grp [arg]
strong :: InlineDoc -> Doc
strong (InlineDoc arg) = ctl "textbf" +++ grp [arg]
section :: Int -> InlineDoc -> HeX Doc
section lev (InlineDoc d) = do
_ <- getSectionNum lev -- we need to increment the number
let secheading = case lev of
1 -> "section"
2 -> "subsection"
3 -> "subsubsection"
4 -> "paragraph"
_ -> "subparagraph"
return $ ctl secheading +++ grp [d] +++ "\n"
| jgm/HeX | Text/HeX/Standard/LaTeX.hs | bsd-3-clause | 1,264 | 0 | 12 | 317 | 426 | 215 | 211 | 32 | 5 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Network.Linode.Internal where
import Control.Error
import Control.Exception (IOException, handle)
import Control.Lens ((&), (.~), (^?))
import Control.Monad.IO.Class (liftIO)
import Data.Aeson (FromJSON)
import qualified Data.ByteString.Lazy as B
--import Data.Foldable (traverse_)
import Data.Monoid ((<>))
import qualified Data.Text as T
--import Data.Text.Encoding (decodeUtf8)
--import qualified Data.Text.IO as TIO
import qualified Network.Wreq as W
--import Network.Wreq.Lens
import Network.Linode.Parsing
import Network.Linode.Types
diskTypeToString :: DiskType -> String
diskTypeToString Ext3 = "ext3"
diskTypeToString Ext4 = "ext4"
diskTypeToString Swap = "swap"
diskTypeToString RawDisk = "raw"
paymentTermToInt :: PaymentTerm -> Int
paymentTermToInt OneMonth = 1
paymentTermToInt OneYear = 12
paymentTermToInt TwoYears = 24
getWith :: FromJSON a => W.Options -> ExceptT LinodeError IO a
getWith opts = ExceptT g
where g = handle (\(e :: IOException) -> return (Left $ NetworkError e)) $ do
--liftIO $ print (view params opts :: [(T.Text, T.Text)])
response <- W.getWith opts "https://api.linode.com"
--liftIO $ traverse_ (TIO.putStrLn . decodeUtf8. B.toStrict) $ response ^? W.responseBody
return $ parseResponse (fromMaybe B.empty (response ^? W.responseBody))
simpleGetter :: FromJSON a => String -> ApiKey -> ExceptT LinodeError IO a
simpleGetter action apiKey = getWith opts
where opts = W.defaults & W.param "api_key" .~ [T.pack apiKey]
& W.param "api_action" .~ [T.pack action]
maybeOr :: Monad m => Maybe a -> ExceptT e m a -> ExceptT e m a
maybeOr v p = maybe p return v
fetchAndSelect :: IO (Either LinodeError [a]) -> ([a] -> Maybe a) -> String -> ExceptT LinodeError IO a
fetchAndSelect fetch select name = do
r <- liftIO fetch
case r of
Left e -> throwE $ SelectionError ("Error which fetching a " <> name <> " . " ++ show e)
Right xs -> case select xs of
Nothing -> throwE $ SelectionError ("Error: Selection of " <> name <> " returned no value")
Just x -> return x
| Helkafen/haskell-linode | src/Network/Linode/Internal.hs | bsd-3-clause | 2,388 | 0 | 17 | 623 | 632 | 334 | 298 | 42 | 3 |
import Tutorial.Chapter8.Bug (Sex(..), BugColour(..), buildBug)
import ALife.Creatur.Universe (store, mkSimpleUniverse)
import ALife.Creatur.Genetics.BRGCBool (put, runWriter,
runDiploidReader)
import Control.Monad.State.Lazy (evalStateT)
main :: IO ()
main = do
let u = mkSimpleUniverse "Chapter8" "chapter8"
-- Create some Bugs and save them in the population directory.
let g1 = runWriter (put Male >> put Green)
let (Right b1) = runDiploidReader (buildBug "Bugsy") (g1,g1)
evalStateT (store b1) u
let g2 = runWriter (put Male >> put Purple)
let (Right b2) = runDiploidReader (buildBug "Mel") (g2,g2)
evalStateT (store b2) u
let g3 = runWriter (put Female >> put Green)
let (Right b3) = runDiploidReader (buildBug "Flo") (g3, g3)
evalStateT (store b3) u
let g4 = runWriter (put Male >> put Purple)
let (Right b4) = runDiploidReader (buildBug "Buzz") (g4, g4)
evalStateT (store b4) u
| mhwombat/creatur-examples | src/Tutorial/Chapter8/GeneratePopulation.hs | bsd-3-clause | 925 | 0 | 13 | 166 | 388 | 193 | 195 | 20 | 1 |
module Main where
import Control.Monad
import System.Exit (exitFailure)
import System.Environment
import L2.AbsL
import L2.ParL
import L2.ErrM
import Liveness.Liveness
main :: IO ()
main = do
args <- getArgs
when (length args /= 1) $ do
putStrLn "usage: filename"
exitFailure
ts <- liftM myLexer $ readFile (head args)
case pParenListInstruction ts of
Bad s -> do
putStrLn "\nParse Failed...\n"
putStrLn "Tokens:"
print ts
putStrLn s
Ok (PLI is) -> putStrLn . displayLiveArray . liveness $ is
| mhuesch/scheme_compiler | src/Liveness/Main.hs | bsd-3-clause | 585 | 0 | 12 | 167 | 188 | 89 | 99 | 22 | 2 |
module Matterhorn.Events.ChannelListOverlay
( onEventChannelListOverlay
, channelListOverlayKeybindings
, channelListOverlayKeyHandlers
)
where
import Prelude ()
import Matterhorn.Prelude
import qualified Graphics.Vty as Vty
import Matterhorn.Events.Keybindings
import Matterhorn.State.ChannelListOverlay
import Matterhorn.State.ListOverlay
import Matterhorn.Types
onEventChannelListOverlay :: Vty.Event -> MH ()
onEventChannelListOverlay =
void . onEventListOverlay (csCurrentTeam.tsChannelListOverlay) channelListOverlayKeybindings
-- | The keybindings we want to use while viewing a channel list overlay
channelListOverlayKeybindings :: KeyConfig -> KeyHandlerMap
channelListOverlayKeybindings = mkKeybindings channelListOverlayKeyHandlers
channelListOverlayKeyHandlers :: [KeyEventHandler]
channelListOverlayKeyHandlers =
[ mkKb CancelEvent "Close the channel search list" (exitListOverlay (csCurrentTeam.tsChannelListOverlay))
, mkKb SearchSelectUpEvent "Select the previous channel" channelListSelectUp
, mkKb SearchSelectDownEvent "Select the next channel" channelListSelectDown
, mkKb PageDownEvent "Page down in the channel list" channelListPageDown
, mkKb PageUpEvent "Page up in the channel list" channelListPageUp
, mkKb ActivateListItemEvent "Join the selected channel" (listOverlayActivateCurrent (csCurrentTeam.tsChannelListOverlay))
]
| matterhorn-chat/matterhorn | src/Matterhorn/Events/ChannelListOverlay.hs | bsd-3-clause | 1,458 | 0 | 10 | 232 | 208 | 118 | 90 | 24 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
import Diagrams.Prelude
import Diagrams.Backend.Cairo.CmdLine
import System.Environment
main = withArgs [ "-o", "test4.png", "-w", "400", "-h", "400" ] $ defaultMain $ ((text "ABCDEFGHabcdefgh" # fontSize 2 # translateX 8 <> rect 10 1 # lw 0.1) # translateX (-5)) <> rect 12 12 # lw 0.2 | diagrams/diagrams-test | misc/av-font.hs | bsd-3-clause | 332 | 0 | 15 | 51 | 120 | 63 | 57 | 5 | 1 |
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE RecordWildCards #-}
module LDrive.Platforms where
-- ( testPlatformParser
-- , ColoredLEDs(..)
-- , TestUART(..)
-- , TestSPI(..)
-- , TestCAN(..)
-- , TestDMA(..)
-- , TestPlatform(..)
-- , testplatform_clockconfig
-- , odrive
-- , drv8301
-- , drv8301_en_gate
-- , m1_ncs
-- , pinOut
-- ) where
import Ivory.Language
import Ivory.Tower.Config
import Data.Char (toUpper)
import qualified Ivory.BSP.STM32F405.ADC as F405
import qualified Ivory.BSP.STM32F405.ATIM18 as F405
import qualified Ivory.BSP.STM32F405.CAN as F405
import qualified Ivory.BSP.STM32F405.UART as F405
import qualified Ivory.BSP.STM32F405.GPIO as F405
import qualified Ivory.BSP.STM32F405.GPIO.AF as F405
import qualified Ivory.BSP.STM32F405.SPI as F405
import qualified Ivory.BSP.STM32F405.RNG as F405
import qualified Ivory.BSP.STM32F405.GTIM2345 as F405
import qualified Ivory.BSP.STM32F405.Interrupt as F405
import Ivory.BSP.STM32.Peripheral.ADC
import Ivory.BSP.STM32.Peripheral.CAN
import Ivory.BSP.STM32.Peripheral.GPIOF4
import Ivory.BSP.STM32.Peripheral.UART
import Ivory.BSP.STM32.Peripheral.SPI as SPI -- hiding (ActiveHigh, ActiveLow)
import Ivory.BSP.STM32.Peripheral.RNG
import Ivory.BSP.STM32.Peripheral.UART.DMA
import Ivory.BSP.STM32.ClockConfig
import Ivory.BSP.STM32.Config
import Ivory.BSP.STM32.Interrupt
import LDrive.LED as LED
import Ivory.Tower.Drivers.PWM.ATIM
testPlatformParser :: ConfigParser TestPlatform
testPlatformParser = do
p <- subsection "args" $ subsection "platform" string
case map toUpper p of
"ODRIVE" -> result odrive
"CAN4DISCO" -> result c4d
_ -> fail ("no such platform " ++ p)
where
result platform = do
conf <- stm32ConfigParser (testplatform_stm32 platform)
return platform { testplatform_stm32 = conf }
data ColoredLEDs =
ColoredLEDs
{ redLED :: LED
, greenLED :: LED
}
data TestUART =
TestUART
{ testUARTPeriph :: UART
, testUARTPins :: UARTPins
}
data TestSPI =
TestSPI
{ testSPIPeriph :: SPIPeriph
, testSPIPins :: SPIPins
-- TODO FIXME: move CS pins for test devices into TestSPI
}
data TestCAN =
TestCAN
{ testCAN :: CANPeriph
, testCANRX :: GPIOPin
, testCANTX :: GPIOPin
, testCANFilters :: CANPeriphFilters
}
data TestDMA =
TestDMA
{ testDMAUARTPeriph :: DMAUART
, testDMAUARTPins :: UARTPins
}
data ADC = ADC {
adcId :: Uint8
, adcPeriph :: ADCPeriph
, adcChan :: (Uint8, GPIOPin)
, adcInjChan :: (Uint8, GPIOPin)
, adcInt :: HasSTM32Interrupt
}
data Enc = EncTimer {
encTim :: F405.GTIM16
, encChan1 :: GPIOPin
, encChan2 :: GPIOPin
, encAf :: GPIO_AF
}
data ExtInt =
ExtInt
{ extInt :: HasSTM32Interrupt,
extPin :: GPIOPin
}
type ADCs = (ADC, ADC, ADC)
data TestPlatform =
TestPlatform
{ testplatform_leds :: ColoredLEDs
, testplatform_uart :: TestUART
, testplatform_spi :: TestSPI
, testplatform_can :: TestCAN
, testplatform_rng :: RNG
, testplatform_stm32 :: STM32Config
, testplatform_enc :: Enc
, testplatform_pwm :: PWMTimer
, testplatform_adc1 :: ADC
, testplatform_adc2 :: ADC
, testplatform_adc3 :: ADC
, testplatform_adcs :: ADCs
}
testplatform_clockconfig :: TestPlatform -> ClockConfig
testplatform_clockconfig = stm32config_clock . testplatform_stm32
--testExti :: ExtInt
--testExti = ExtInt (HasSTM32Interrupt F405.EXTI0) F405.pinD1
testExti :: ExtInt
testExti = ExtInt (HasSTM32Interrupt F405.EXTI4) gpio3
adcint :: HasSTM32Interrupt
adcint = HasSTM32Interrupt F405.ADC
adc1, adc2, adc3 :: ADC
adc1 = ADC 1 F405.adc1 (5, F405.pinA5) (0, F405.pinA0) adcint
adc2 = ADC 2 F405.adc2 (13, F405.pinC3) (10, F405.pinC0) adcint
adc3 = ADC 3 F405.adc3 (12, F405.pinC2) (11, F405.pinC1) adcint
spi3_pins :: SPIPins
spi3_pins = SPIPins
{ spiPinMiso = F405.pinC12
, spiPinMosi = F405.pinC11
, spiPinSck = F405.pinC10
, spiPinAF = F405.gpio_af_spi3
}
gpio1, gpio2, gpio3, gpio4 :: GPIOPin
gpio1 = F405.pinB2
gpio2 = F405.pinA5
gpio3 = F405.pinA4
gpio4 = F405.pinA3
drv8301_en_gate :: GPIOPin
drv8301_en_gate = F405.pinB12
m0_dc_cal, m1_dc_cal :: GPIOPin
m0_dc_cal = F405.pinC9
m1_dc_cal = F405.pinC15
m0_nCS :: GPIOPin
m0_nCS = F405.pinC13
m1_nCS :: GPIOPin
m1_nCS = F405.pinC14
enc0 :: Enc
enc0 = EncTimer F405.tim3 F405.pinB4 F405.pinB5 F405.gpio_af_tim3
enc0Z0 :: GPIOPin
enc0Z0 = F405.pinA15
enc1 :: Enc
enc1 = EncTimer F405.tim4 F405.pinB6 F405.pinB7 F405.gpio_af_tim4
enc1Z0 :: GPIOPin
enc1Z0 = F405.pinB3
pwm0 :: PWMTimer
pwm0 = PWMTimer F405.tim1
F405.pinA8 F405.pinA9 F405.pinA10
F405.pinB13 F405.pinB14 F405.pinB15
F405.gpio_af_tim1 0 tim_period_clocks
pwm1 :: PWMTimer
pwm1 = PWMTimer F405.tim8
F405.pinC6 F405.pinC7 F405.pinC8
F405.pinA7 F405.pinB0 F405.pinB1
F405.gpio_af_tim8 0 tim_period_clocks
drv8301M0 :: SPIDevice
drv8301M0 = SPIDevice
{ spiDevPeripheral = F405.spi3
, spiDevCSPin = m0_nCS
, spiDevClockHz = 500000
, spiDevCSActive = SPI.ActiveLow
, spiDevClockPolarity = ClockPolarityLow
, spiDevClockPhase = ClockPhase2
, spiDevBitOrder = MSBFirst
, spiDevName = "drv8301m0"
}
drv8301M1 :: SPIDevice
drv8301M1 = SPIDevice
{ spiDevPeripheral = F405.spi3
, spiDevCSPin = m1_nCS
, spiDevClockHz = 500000
, spiDevCSActive = SPI.ActiveLow
, spiDevClockPolarity = ClockPolarityLow
, spiDevClockPhase = ClockPhase2
, spiDevBitOrder = MSBFirst
, spiDevName = "drv8301m1"
}
tim_period_clocks :: Uint16
tim_period_clocks = 8192
currentMeasPeriod :: ClockConfig -> IFloat
currentMeasPeriod cc = (2 * (safeCast tim_period_clocks) / (fromIntegral pclkhz))
where
pclkbus = PClk2
pclkhz = clockPClkHz pclkbus cc
currentMeasHz :: ClockConfig -> IFloat
currentMeasHz cc = (fromIntegral pclkhz) / (safeCast $ 2 * tim_period_clocks)
where
pclkbus = PClk2
pclkhz = clockPClkHz pclkbus cc
odrive :: TestPlatform
odrive = TestPlatform
{ testplatform_leds = ColoredLEDs
{ redLED = LED gpio1 LED.ActiveHigh
, greenLED = LED gpio2 LED.ActiveHigh
}
, testplatform_uart = TestUART
{ testUARTPeriph = F405.uart1
, testUARTPins = UARTPins
{ uartPinTx = F405.pinB6
, uartPinRx = F405.pinB7
, uartPinAF = F405.gpio_af_uart1
}
}
, testplatform_spi = TestSPI
{ testSPIPeriph = F405.spi3
, testSPIPins = spi3_pins
}
, testplatform_can = TestCAN
{ testCAN = F405.can1
, testCANRX = F405.pinB8
, testCANTX = F405.pinB9
, testCANFilters = F405.canFilters
}
, testplatform_rng = F405.rng
, testplatform_enc = enc0
, testplatform_pwm = pwm0
, testplatform_adc1 = adc1
, testplatform_adc2 = adc2
, testplatform_adc3 = adc3
, testplatform_adcs = (adc1, adc2, adc3)
, testplatform_stm32 = odriveSTMConfig 8
}
c4d :: TestPlatform
c4d = TestPlatform
{ testplatform_leds = ColoredLEDs
{ redLED = LED F405.pinD14 LED.ActiveHigh
, greenLED = LED F405.pinD15 LED.ActiveHigh
}
, testplatform_uart = TestUART
{ testUARTPeriph = F405.uart2
, testUARTPins = UARTPins
{ uartPinTx = F405.pinA2
, uartPinRx = F405.pinA3
, uartPinAF = F405.gpio_af_uart2
}
}
, testplatform_spi = TestSPI
{ testSPIPeriph = F405.spi3
, testSPIPins = spi3_pins
}
, testplatform_can = TestCAN
{ testCAN = F405.can1
, testCANRX = F405.pinB8
, testCANTX = F405.pinB9
, testCANFilters = F405.canFilters
}
, testplatform_rng = F405.rng
, testplatform_enc = enc0
, testplatform_pwm = pwm0
, testplatform_adc1 = adc1
, testplatform_adc2 = adc2
, testplatform_adc3 = adc3
, testplatform_adcs = (adc1, adc2, adc3)
, testplatform_stm32 = odriveSTMConfig 8
}
--- XXX: clock hackery, suggest upstream
data Divs = Divs {
div_hclk :: Integer
, div_pclk1 :: Integer
, div_pclk2 :: Integer
}
externalXtalDivs :: Integer -> Integer -> Divs -> ClockConfig
externalXtalDivs xtal_mhz sysclk_mhz Divs{..} = ClockConfig
{ clockconfig_source = External (xtal_mhz * 1000 * 1000)
, clockconfig_pll = PLLFactor
{ pll_m = xtal_mhz
, pll_n = sysclk_mhz * 2
, pll_p = 2
, pll_q = 7
}
, clockconfig_hclk_divider = div_hclk
, clockconfig_pclk1_divider = div_pclk1
, clockconfig_pclk2_divider = div_pclk2
}
-- STM32F405RGT6
odriveSTMConfig :: Integer -> STM32Config
odriveSTMConfig xtal_mhz = STM32Config
{ stm32config_processor = STM32F405
, stm32config_px4version = Nothing
, stm32config_clock = externalXtalDivs xtal_mhz 168 divs
-- XXX: this is 192 in total (112+16+64)
-- 64 is CCM (core coupled memory)
-- + 4kb additional backup sram
-- , stm32config_sram = 128 * 1024
, stm32config_sram = 164 * 1024
}
where
divs = Divs
{ div_hclk = 1
, div_pclk1 = 2
, div_pclk2 = 1
}
| sorki/odrive | src/LDrive/Platforms.hs | bsd-3-clause | 9,381 | 0 | 12 | 2,191 | 2,080 | 1,278 | 802 | 252 | 3 |
module Main
(
main
) where
import qualified Data.ByteString.Lazy.Char8 as L
import qualified Data.Attoparsec.Char8 as P
import Data.Attoparsec.Lazy hiding (skipWhile,take)
import Data.List (intercalate,transpose)
import NanoUtils.Container (normalizeByMax)
import System.IO
main = do
xss <- parseFile
let xss' = transpose.map normalizeByMax.transpose $ xss
contents = intercalate "\n".map (intercalate "\t".map show) $ xss'
writeFile "temp" contents
parseFile = do
contents <- L.readFile "data/allnodes_notnormalized.tab"
let (Done _ lst) = parse docParser contents
return lst
docParser = (P.double `P.sepBy` skipTab) `P.sepBy` P.endOfLine
skipTab = P.skipWhile (=='\t') | nanonaren/Reducer | Normalize.hs | bsd-3-clause | 706 | 0 | 14 | 116 | 225 | 124 | 101 | 20 | 1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances,
PatternGuards #-}
module Idris.Core.Evaluate(normalise, normaliseTrace, normaliseC, normaliseAll,
rt_simplify, simplify, specialise, hnf, convEq, convEq',
Def(..), CaseInfo(..), CaseDefs(..),
Accessibility(..), Totality(..), PReason(..), MetaInformation(..),
Context, initContext, ctxtAlist, uconstraints, next_tvar,
addToCtxt, setAccess, setTotal, setMetaInformation, addCtxtDef, addTyDecl,
addDatatype, addCasedef, simplifyCasedef, addOperator,
lookupNames, lookupTy, lookupP, lookupDef, lookupDefAcc, lookupVal,
mapDefCtxt,
lookupTotal, lookupNameTotal, lookupMetaInformation, lookupTyEnv, isDConName, isTConName, isConName, isFnName,
Value(..), Quote(..), initEval, uniqueNameCtxt) where
import Debug.Trace
import Control.Monad.State -- not Strict!
import qualified Data.Binary as B
import Data.Binary hiding (get, put)
import Idris.Core.TT
import Idris.Core.CaseTree
data EvalState = ES { limited :: [(Name, Int)],
nexthole :: Int }
deriving Show
type Eval a = State EvalState a
data EvalOpt = Spec
| HNF
| Simplify
| AtREPL
| RunTT
deriving (Show, Eq)
initEval = ES [] 0
-- VALUES (as HOAS) ---------------------------------------------------------
-- | A HOAS representation of values
data Value = VP NameType Name Value
| VV Int
-- True for Bool indicates safe to reduce
| VBind Bool Name (Binder Value) (Value -> Eval Value)
-- For frozen let bindings when simplifying
| VBLet Int Name Value Value Value
| VApp Value Value
| VType UExp
| VErased
| VImpossible
| VConstant Const
| VProj Value Int
-- | VLazy Env [Value] Term
| VTmp Int
instance Show Value where
show x = show $ evalState (quote 100 x) initEval
instance Show (a -> b) where
show x = "<<fn>>"
-- THE EVALUATOR ------------------------------------------------------------
-- The environment is assumed to be "locally named" - i.e., not de Bruijn
-- indexed.
-- i.e. it's an intermediate environment that we have while type checking or
-- while building a proof.
-- | Normalise fully type checked terms (so, assume all names/let bindings resolved)
normaliseC :: Context -> Env -> TT Name -> TT Name
normaliseC ctxt env t
= evalState (do val <- eval False ctxt [] env t []
quote 0 val) initEval
normaliseAll :: Context -> Env -> TT Name -> TT Name
normaliseAll ctxt env t
= evalState (do val <- eval False ctxt [] env t [AtREPL]
quote 0 val) initEval
normalise :: Context -> Env -> TT Name -> TT Name
normalise = normaliseTrace False
normaliseTrace :: Bool -> Context -> Env -> TT Name -> TT Name
normaliseTrace tr ctxt env t
= evalState (do val <- eval tr ctxt [] (map finalEntry env) (finalise t) []
quote 0 val) initEval
specialise :: Context -> Env -> [(Name, Int)] -> TT Name -> TT Name
specialise ctxt env limits t
= evalState (do val <- eval False ctxt []
(map finalEntry env) (finalise t)
[Spec]
quote 0 val) (initEval { limited = limits })
-- | Like normalise, but we only reduce functions that are marked as okay to
-- inline (and probably shouldn't reduce lets?)
-- 20130908: now only used to reduce for totality checking. Inlining should
-- be done elsewhere.
simplify :: Context -> Env -> TT Name -> TT Name
simplify ctxt env t
= evalState (do val <- eval False ctxt [(sUN "lazy", 0),
(sUN "assert_smaller", 0),
(sUN "par", 0),
(sUN "prim__syntactic_eq", 0),
(sUN "fork", 0)]
(map finalEntry env) (finalise t)
[Simplify]
quote 0 val) initEval
-- | Simplify for run-time (i.e. basic inlining)
rt_simplify :: Context -> Env -> TT Name -> TT Name
rt_simplify ctxt env t
= evalState (do val <- eval False ctxt [(sUN "lazy", 0),
(sUN "assert_smaller", 0),
(sUN "par", 0),
(sUN "prim__syntactic_eq", 0),
(sUN "prim_fork", 0)]
(map finalEntry env) (finalise t)
[RunTT]
quote 0 val) initEval
-- | Reduce a term to head normal form
hnf :: Context -> Env -> TT Name -> TT Name
hnf ctxt env t
= evalState (do val <- eval False ctxt []
(map finalEntry env)
(finalise t) [HNF]
quote 0 val) initEval
-- unbindEnv env (quote 0 (eval ctxt (bindEnv env t)))
finalEntry :: (Name, Binder (TT Name)) -> (Name, Binder (TT Name))
finalEntry (n, b) = (n, fmap finalise b)
bindEnv :: EnvTT n -> TT n -> TT n
bindEnv [] tm = tm
bindEnv ((n, Let t v):bs) tm = Bind n (NLet t v) (bindEnv bs tm)
bindEnv ((n, b):bs) tm = Bind n b (bindEnv bs tm)
unbindEnv :: EnvTT n -> TT n -> TT n
unbindEnv [] tm = tm
unbindEnv (_:bs) (Bind n b sc) = unbindEnv bs sc
usable :: Bool -- specialising
-> Name -> [(Name, Int)] -> Eval (Bool, [(Name, Int)])
-- usable _ _ ns@((MN 0 "STOP", _) : _) = return (False, ns)
usable False n [] = return (True, [])
usable True n ns
= do ES ls num <- get
case lookup n ls of
Just 0 -> return (False, ns)
Just i -> return (True, ns)
_ -> return (False, ns)
usable False n ns
= case lookup n ns of
Just 0 -> return (False, ns)
Just i -> return $ (True, (n, abs (i-1)) : filter (\ (n', _) -> n/=n') ns)
_ -> return $ (True, (n, 100) : filter (\ (n', _) -> n/=n') ns)
deduct :: Name -> Eval ()
deduct n = do ES ls num <- get
case lookup n ls of
Just i -> do put $ ES ((n, (i-1)) :
filter (\ (n', _) -> n/=n') ls) num
_ -> return ()
-- | Evaluate in a context of locally named things (i.e. not de Bruijn indexed,
-- such as we might have during construction of a proof)
-- The (Name, Int) pair in the arguments is the maximum depth of unfolding of
-- a name. The corresponding pair in the state is the maximum number of
-- unfoldings overall.
eval :: Bool -> Context -> [(Name, Int)] -> Env -> TT Name ->
[EvalOpt] -> Eval Value
eval traceon ctxt ntimes genv tm opts = ev ntimes [] True [] tm where
spec = Spec `elem` opts
simpl = Simplify `elem` opts
runtime = RunTT `elem` opts
atRepl = AtREPL `elem` opts
hnf = HNF `elem` opts
-- returns 'True' if the function should block
-- normal evaluation should return false
blockSimplify (CaseInfo inl dict) n stk
| RunTT `elem` opts
= not (inl || dict) || elem n stk
| Simplify `elem` opts
= (not (inl || dict) || elem n stk)
|| (n == sUN "prim__syntactic_eq")
| otherwise = False
getCases cd | simpl = cases_totcheck cd
| runtime = cases_runtime cd
| otherwise = cases_compiletime cd
ev ntimes stk top env (P _ n ty)
| Just (Let t v) <- lookup n genv = ev ntimes stk top env v
ev ntimes_in stk top env (P Ref n ty)
| not top && hnf = liftM (VP Ref n) (ev ntimes stk top env ty)
| otherwise
= do (u, ntimes) <- usable spec n ntimes_in
if u then
do let val = lookupDefAcc n (spec || atRepl) ctxt
case val of
[(Function _ tm, Public)] ->
ev ntimes (n:stk) True env tm
[(Function _ tm, Hidden)] ->
ev ntimes (n:stk) True env tm
[(TyDecl nt ty, _)] -> do vty <- ev ntimes stk True env ty
return $ VP nt n vty
[(CaseOp ci _ _ _ _ cd, acc)]
| (acc /= Frozen) &&
null (fst (cases_totcheck cd)) -> -- unoptimised version
let (ns, tree) = getCases cd in
if blockSimplify ci n stk
then liftM (VP Ref n) (ev ntimes stk top env ty)
else -- traceWhen runtime (show (n, ns, tree)) $
do c <- evCase ntimes n (n:stk) top env ns [] tree
case c of
(Nothing, _) -> liftM (VP Ref n) (ev ntimes stk top env ty)
(Just v, _) -> return v
_ -> liftM (VP Ref n) (ev ntimes stk top env ty)
else liftM (VP Ref n) (ev ntimes stk top env ty)
ev ntimes stk top env (P nt n ty)
= liftM (VP nt n) (ev ntimes stk top env ty)
ev ntimes stk top env (V i)
| i < length env && i >= 0 = return $ snd (env !! i)
| otherwise = return $ VV i
ev ntimes stk top env (Bind n (Let t v) sc)
= do v' <- ev ntimes stk top env v --(finalise v)
sc' <- ev ntimes stk top ((n, v') : env) sc
wknV (-1) sc'
-- | otherwise
-- = do t' <- ev ntimes stk top env t
-- v' <- ev ntimes stk top env v --(finalise v)
-- -- use Tmp as a placeholder, then make it a variable reference
-- -- again when evaluation finished
-- hs <- get
-- let vd = nexthole hs
-- put (hs { nexthole = vd + 1 })
-- sc' <- ev ntimes stk top (VP Bound (MN vd "vlet") VErased : env) sc
-- return $ VBLet vd n t' v' sc'
ev ntimes stk top env (Bind n (NLet t v) sc)
= do t' <- ev ntimes stk top env (finalise t)
v' <- ev ntimes stk top env (finalise v)
sc' <- ev ntimes stk top ((n, v') : env) sc
return $ VBind True n (Let t' v') (\x -> return sc')
ev ntimes stk top env (Bind n b sc)
= do b' <- vbind env b
let n' = uniqueName n (map fst env)
return $ VBind True -- (vinstances 0 sc < 2)
n' b' (\x -> ev ntimes stk False ((n, x):env) sc)
where vbind env t
-- | simpl
-- = fmapMB (\tm -> ev ((MN 0 "STOP", 0) : ntimes)
-- stk top env (finalise tm)) t
-- | otherwise
= fmapMB (\tm -> ev ntimes stk top env (finalise tm)) t
ev ntimes stk top env (App f a)
= do f' <- ev ntimes stk False env f
a' <- ev ntimes stk False env a
evApply ntimes stk top env [a'] f'
ev ntimes stk top env (Proj t i)
= do -- evaluate dictionaries if it means the projection works
t' <- ev ntimes stk top env t
-- tfull' <- reapply ntimes stk top env t' []
return (doProj t' (getValArgs t'))
where doProj t' (VP (DCon _ _) _ _, args)
| i >= 0 && i < length args = args!!i
doProj t' _ = VProj t' i
ev ntimes stk top env (Constant c) = return $ VConstant c
ev ntimes stk top env Erased = return VErased
ev ntimes stk top env Impossible = return VImpossible
ev ntimes stk top env (TType i) = return $ VType i
evApply ntimes stk top env args (VApp f a)
= evApply ntimes stk top env (a:args) f
evApply ntimes stk top env args f
= apply ntimes stk top env f args
reapply ntimes stk top env f@(VP Ref n ty) args
= let val = lookupDefAcc n (spec || atRepl) ctxt in
case val of
[(CaseOp ci _ _ _ _ cd, acc)] ->
let (ns, tree) = getCases cd in
do c <- evCase ntimes n (n:stk) top env ns args tree
case c of
(Nothing, _) -> return $ unload env (VP Ref n ty) args
(Just v, rest) -> evApply ntimes stk top env rest v
_ -> case args of
(a : as) -> return $ unload env f (a : as)
[] -> return f
reapply ntimes stk top env (VApp f a) args
= reapply ntimes stk top env f (a : args)
reapply ntimes stk top env v args = return v
apply ntimes stk top env (VBind True n (Lam t) sc) (a:as)
= do a' <- sc a
app <- apply ntimes stk top env a' as
wknV (-1) app
apply ntimes_in stk top env f@(VP Ref n ty) args
| not top && hnf = case args of
[] -> return f
_ -> return $ unload env f args
| otherwise
= do (u, ntimes) <- usable spec n ntimes_in
if u then
do let val = lookupDefAcc n (spec || atRepl) ctxt
case val of
[(CaseOp ci _ _ _ _ cd, acc)]
| acc /= Frozen -> -- unoptimised version
let (ns, tree) = getCases cd in
if blockSimplify ci n stk
then return $ unload env (VP Ref n ty) args
else -- traceWhen runtime (show (n, ns, tree)) $
do c <- evCase ntimes n (n:stk) top env ns args tree
case c of
(Nothing, _) -> return $ unload env (VP Ref n ty) args
(Just v, rest) -> evApply ntimes stk top env rest v
[(Operator _ i op, _)] ->
if (i <= length args)
then case op (take i args) of
Nothing -> return $ unload env (VP Ref n ty) args
Just v -> evApply ntimes stk top env (drop i args) v
else return $ unload env (VP Ref n ty) args
_ -> case args of
[] -> return f
_ -> return $ unload env f args
else case args of
(a : as) -> return $ unload env f (a:as)
[] -> return f
apply ntimes stk top env f (a:as) = return $ unload env f (a:as)
apply ntimes stk top env f [] = return f
-- specApply stk env f@(VP Ref n ty) args
-- = case lookupCtxt n statics of
-- [as] -> if or as
-- then trace (show (n, map fst (filter (\ (_, s) -> s) (zip args as)))) $
-- return $ unload env f args
-- else return $ unload env f args
-- _ -> return $ unload env f args
-- specApply stk env f args = return $ unload env f args
unload :: [(Name, Value)] -> Value -> [Value] -> Value
unload env f [] = f
unload env f (a:as) = unload env (VApp f a) as
evCase ntimes n stk top env ns args tree
| length ns <= length args
= do let args' = take (length ns) args
let rest = drop (length ns) args
when spec $ deduct n -- successful, so deduct usages
t <- evTree ntimes stk top env (zip ns args') tree
-- (zipWith (\n , t) -> (n, t)) ns args') tree
return (t, rest)
| otherwise = return (Nothing, args)
evTree :: [(Name, Int)] -> [Name] -> Bool ->
[(Name, Value)] -> [(Name, Value)] -> SC -> Eval (Maybe Value)
evTree ntimes stk top env amap (UnmatchedCase str) = return Nothing
evTree ntimes stk top env amap (STerm tm)
= do let etm = pToVs (map fst amap) tm
etm' <- ev ntimes stk (not (conHeaded tm))
(amap ++ env) etm
return $ Just etm'
evTree ntimes stk top env amap (ProjCase t alts)
= do t' <- ev ntimes stk top env t
doCase ntimes stk top env amap t' alts
evTree ntimes stk top env amap (Case n alts)
= case lookup n amap of
Just v -> doCase ntimes stk top env amap v alts
_ -> return Nothing
evTree ntimes stk top env amap ImpossibleCase = return Nothing
doCase ntimes stk top env amap v alts =
do c <- chooseAlt env v (getValArgs v) alts amap
case c of
Just (altmap, sc) -> evTree ntimes stk top env altmap sc
_ -> do c' <- chooseAlt' ntimes stk env v (getValArgs v) alts amap
case c' of
Just (altmap, sc) -> evTree ntimes stk top env altmap sc
_ -> return Nothing
conHeaded tm@(App _ _)
| (P (DCon _ _) _ _, args) <- unApply tm = True
conHeaded t = False
chooseAlt' ntimes stk env _ (f, args) alts amap
= do f' <- apply ntimes stk True env f args
chooseAlt env f' (getValArgs f')
alts amap
chooseAlt :: [(Name, Value)] -> Value -> (Value, [Value]) -> [CaseAlt] ->
[(Name, Value)] ->
Eval (Maybe ([(Name, Value)], SC))
chooseAlt env _ (VP (DCon i a) _ _, args) alts amap
| Just (ns, sc) <- findTag i alts = return $ Just (updateAmap (zip ns args) amap, sc)
| Just v <- findDefault alts = return $ Just (amap, v)
chooseAlt env _ (VP (TCon i a) _ _, args) alts amap
| Just (ns, sc) <- findTag i alts
= return $ Just (updateAmap (zip ns args) amap, sc)
| Just v <- findDefault alts = return $ Just (amap, v)
chooseAlt env _ (VConstant c, []) alts amap
| Just v <- findConst c alts = return $ Just (amap, v)
| Just (n', sub, sc) <- findSuc c alts
= return $ Just (updateAmap [(n',sub)] amap, sc)
| Just v <- findDefault alts = return $ Just (amap, v)
chooseAlt env _ (VP _ n _, args) alts amap
| Just (ns, sc) <- findFn n alts = return $ Just (updateAmap (zip ns args) amap, sc)
chooseAlt env _ (VBind _ _ (Pi s) t, []) alts amap
| Just (ns, sc) <- findFn (sUN "->") alts
= do t' <- t (VV 0) -- we know it's not in scope or it's not a pattern
return $ Just (updateAmap (zip ns [s, t']) amap, sc)
chooseAlt _ _ _ alts amap
| Just v <- findDefault alts
= if (any fnCase alts)
then return $ Just (amap, v)
else return Nothing
| otherwise = return Nothing
fnCase (FnCase _ _ _) = True
fnCase _ = False
-- Replace old variable names in the map with new matches
-- (This is possibly unnecessary since we make unique names and don't
-- allow repeated variables...?)
updateAmap newm amap
= newm ++ filter (\ (x, _) -> not (elem x (map fst newm))) amap
findTag i [] = Nothing
findTag i (ConCase n j ns sc : xs) | i == j = Just (ns, sc)
findTag i (_ : xs) = findTag i xs
findFn fn [] = Nothing
findFn fn (FnCase n ns sc : xs) | fn == n = Just (ns, sc)
findFn fn (_ : xs) = findFn fn xs
findDefault [] = Nothing
findDefault (DefaultCase sc : xs) = Just sc
findDefault (_ : xs) = findDefault xs
findSuc c [] = Nothing
findSuc (BI val) (SucCase n sc : _)
| val /= 0 = Just (n, VConstant (BI (val - 1)), sc)
findSuc c (_ : xs) = findSuc c xs
findConst c [] = Nothing
findConst c (ConstCase c' v : xs) | c == c' = Just v
findConst (AType (ATInt ITNative)) (ConCase n 1 [] v : xs) = Just v
findConst (AType ATFloat) (ConCase n 2 [] v : xs) = Just v
findConst (AType (ATInt ITChar)) (ConCase n 3 [] v : xs) = Just v
findConst StrType (ConCase n 4 [] v : xs) = Just v
findConst PtrType (ConCase n 5 [] v : xs) = Just v
findConst (AType (ATInt ITBig)) (ConCase n 6 [] v : xs) = Just v
findConst (AType (ATInt (ITFixed ity))) (ConCase n tag [] v : xs)
| tag == 7 + fromEnum ity = Just v
findConst (AType (ATInt (ITVec ity count))) (ConCase n tag [] v : xs)
| tag == (fromEnum ity + 1) * 1000 + count = Just v
findConst c (_ : xs) = findConst c xs
getValArgs tm = getValArgs' tm []
getValArgs' (VApp f a) as = getValArgs' f (a:as)
getValArgs' f as = (f, as)
-- tmpToV i vd (VLetHole j) | vd == j = return $ VV i
-- tmpToV i vd (VP nt n v) = liftM (VP nt n) (tmpToV i vd v)
-- tmpToV i vd (VBind n b sc) = do b' <- fmapMB (tmpToV i vd) b
-- let sc' = \x -> do x' <- sc x
-- tmpToV (i + 1) vd x'
-- return (VBind n b' sc')
-- tmpToV i vd (VApp f a) = liftM2 VApp (tmpToV i vd f) (tmpToV i vd a)
-- tmpToV i vd x = return x
instance Eq Value where
(==) x y = getTT x == getTT y
where getTT v = evalState (quote 0 v) initEval
class Quote a where
quote :: Int -> a -> Eval (TT Name)
instance Quote Value where
quote i (VP nt n v) = liftM (P nt n) (quote i v)
quote i (VV x) = return $ V x
quote i (VBind _ n b sc) = do sc' <- sc (VTmp i)
b' <- quoteB b
liftM (Bind n b') (quote (i+1) sc')
where quoteB t = fmapMB (quote i) t
quote i (VBLet vd n t v sc)
= do sc' <- quote i sc
t' <- quote i t
v' <- quote i v
let sc'' = pToV (sMN vd "vlet") (addBinder sc')
return (Bind n (Let t' v') sc'')
quote i (VApp f a) = liftM2 App (quote i f) (quote i a)
quote i (VType u) = return $ TType u
quote i VErased = return $ Erased
quote i VImpossible = return $ Impossible
quote i (VProj v j) = do v' <- quote i v
return (Proj v' j)
quote i (VConstant c) = return $ Constant c
quote i (VTmp x) = return $ V (i - x - 1)
wknV :: Int -> Value -> Eval Value
wknV i (VV x) = return $ VV (x + i)
wknV i (VBind red n b sc) = do b' <- fmapMB (wknV i) b
return $ VBind red n b' (\x -> do x' <- sc x
wknV i x')
wknV i (VApp f a) = liftM2 VApp (wknV i f) (wknV i a)
wknV i t = return t
convEq' ctxt x y = evalStateT (convEq ctxt x y) (0, [])
convEq :: Context -> TT Name -> TT Name -> StateT UCs (TC' Err) Bool
convEq ctxt = ceq [] where
ceq :: [(Name, Name)] -> TT Name -> TT Name -> StateT UCs (TC' Err) Bool
ceq ps (P xt x _) (P yt y _)
| x == y || (x, y) `elem` ps || (y,x) `elem` ps = return True
| otherwise = sameDefs ps x y
ceq ps x (Bind n (Lam t) (App y (V 0))) = ceq ps x y
ceq ps (Bind n (Lam t) (App x (V 0))) y = ceq ps x y
ceq ps x (Bind n (Lam t) (App y (P Bound n' _)))
| n == n' = ceq ps x y
ceq ps (Bind n (Lam t) (App x (P Bound n' _))) y
| n == n' = ceq ps x y
ceq ps (V x) (V y) = return (x == y)
ceq ps (Bind _ xb xs) (Bind _ yb ys)
= liftM2 (&&) (ceqB ps xb yb) (ceq ps xs ys)
where
ceqB ps (Let v t) (Let v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t')
ceqB ps (Guess v t) (Guess v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t')
ceqB ps b b' = ceq ps (binderTy b) (binderTy b')
ceq ps (App fx ax) (App fy ay) = liftM2 (&&) (ceq ps fx fy) (ceq ps ax ay)
ceq ps (Constant x) (Constant y) = return (x == y)
ceq ps (TType x) (TType y) = do (v, cs) <- get
put (v, ULE x y : cs)
return True
ceq ps Erased _ = return True
ceq ps _ Erased = return True
ceq ps _ _ = return False
caseeq ps (Case n cs) (Case n' cs') = caseeqA ((n,n'):ps) cs cs'
where
caseeqA ps (ConCase x i as sc : rest) (ConCase x' i' as' sc' : rest')
= do q1 <- caseeq (zip as as' ++ ps) sc sc'
q2 <- caseeqA ps rest rest'
return $ x == x' && i == i' && q1 && q2
caseeqA ps (ConstCase x sc : rest) (ConstCase x' sc' : rest')
= do q1 <- caseeq ps sc sc'
q2 <- caseeqA ps rest rest'
return $ x == x' && q1 && q2
caseeqA ps (DefaultCase sc : rest) (DefaultCase sc' : rest')
= liftM2 (&&) (caseeq ps sc sc') (caseeqA ps rest rest')
caseeqA ps [] [] = return True
caseeqA ps _ _ = return False
caseeq ps (STerm x) (STerm y) = ceq ps x y
caseeq ps (UnmatchedCase _) (UnmatchedCase _) = return True
caseeq ps _ _ = return False
sameDefs ps x y = case (lookupDef x ctxt, lookupDef y ctxt) of
([Function _ xdef], [Function _ ydef])
-> ceq ((x,y):ps) xdef ydef
([CaseOp _ _ _ _ _ xd],
[CaseOp _ _ _ _ _ yd])
-> let (_, xdef) = cases_compiletime xd
(_, ydef) = cases_compiletime yd in
caseeq ((x,y):ps) xdef ydef
_ -> return False
-- SPECIALISATION -----------------------------------------------------------
-- We need too much control to be able to do this by tweaking the main
-- evaluator
spec :: Context -> Ctxt [Bool] -> Env -> TT Name -> Eval (TT Name)
spec ctxt statics genv tm = error "spec undefined"
-- CONTEXTS -----------------------------------------------------------------
{-| A definition is either a simple function (just an expression with a type),
a constant, which could be a data or type constructor, an axiom or as an
yet undefined function, or an Operator.
An Operator is a function which explains how to reduce.
A CaseOp is a function defined by a simple case tree -}
data Def = Function !Type !Term
| TyDecl NameType !Type
| Operator Type Int ([Value] -> Maybe Value)
| CaseOp CaseInfo
!Type
![Type] -- argument types
![Either Term (Term, Term)] -- original definition
![([Name], Term, Term)] -- simplified for totality check definition
!CaseDefs
-- [Name] SC -- Compile time case definition
-- [Name] SC -- Run time cae definitions
data CaseDefs = CaseDefs {
cases_totcheck :: !([Name], SC),
cases_compiletime :: !([Name], SC),
cases_inlined :: !([Name], SC),
cases_runtime :: !([Name], SC)
}
data CaseInfo = CaseInfo {
case_inlinable :: Bool,
tc_dictionary :: Bool
}
{-!
deriving instance Binary Def
!-}
{-!
deriving instance Binary CaseInfo
!-}
{-!
deriving instance Binary CaseDefs
!-}
instance Show Def where
show (Function ty tm) = "Function: " ++ show (ty, tm)
show (TyDecl nt ty) = "TyDecl: " ++ show nt ++ " " ++ show ty
show (Operator ty _ _) = "Operator: " ++ show ty
show (CaseOp (CaseInfo inlc inlr) ty atys ps_in ps cd)
= let (ns, sc) = cases_compiletime cd
(ns_t, sc_t) = cases_totcheck cd
(ns', sc') = cases_runtime cd in
"Case: " ++ show ty ++ " " ++ show ps ++ "\n" ++
"TOTALITY CHECK TIME:\n\n" ++
show ns_t ++ " " ++ show sc_t ++ "\n\n" ++
"COMPILE TIME:\n\n" ++
show ns ++ " " ++ show sc ++ "\n\n" ++
"RUN TIME:\n\n" ++
show ns' ++ " " ++ show sc' ++ "\n\n" ++
if inlc then "Inlinable\n" else "Not inlinable\n"
-------
-- Frozen => doesn't reduce
-- Hidden => doesn't reduce and invisible to type checker
data Accessibility = Public | Frozen | Hidden
deriving (Show, Eq)
-- | The result of totality checking
data Totality = Total [Int] -- ^ well-founded arguments
| Productive -- ^ productive
| Partial PReason
| Unchecked
deriving Eq
-- | Reasons why a function may not be total
data PReason = Other [Name] | Itself | NotCovering | NotPositive | UseUndef Name
| BelieveMe | Mutual [Name] | NotProductive
deriving (Show, Eq)
instance Show Totality where
show (Total args)= "Total" -- ++ show args ++ " decreasing arguments"
show Productive = "Productive" -- ++ show args ++ " decreasing arguments"
show Unchecked = "not yet checked for totality"
show (Partial Itself) = "possibly not total as it is not well founded"
show (Partial NotCovering) = "not total as there are missing cases"
show (Partial NotPositive) = "not strictly positive"
show (Partial NotProductive) = "not productive"
show (Partial BelieveMe) = "not total due to use of believe_me in proof"
show (Partial (Other ns)) = "possibly not total due to: " ++ showSep ", " (map show ns)
show (Partial (Mutual ns)) = "possibly not total due to recursive path " ++
showSep " --> " (map show ns)
{-!
deriving instance Binary Accessibility
!-}
{-!
deriving instance Binary Totality
!-}
{-!
deriving instance Binary PReason
!-}
-- Possible attached meta-information for a definition in context
data MetaInformation =
EmptyMI -- ^ No meta-information
| DataMI [Int] -- ^ Meta information for a data declaration with position of parameters
deriving (Eq, Show)
-- | Contexts used for global definitions and for proof state. They contain
-- universe constraints and existing definitions.
data Context = MkContext {
uconstraints :: [UConstraint],
next_tvar :: Int,
definitions :: Ctxt (Def, Accessibility, Totality, MetaInformation)
} deriving Show
-- | The initial empty context
initContext = MkContext [] 0 emptyContext
mapDefCtxt :: (Def -> Def) -> Context -> Context
mapDefCtxt f (MkContext c t defs) = MkContext c t (mapCtxt f' defs)
where f' (d, a, t, m) = f' (f d, a, t, m)
-- | Get the definitions from a context
ctxtAlist :: Context -> [(Name, Def)]
ctxtAlist ctxt = map (\(n, (d, a, t, m)) -> (n, d)) $ toAlist (definitions ctxt)
veval ctxt env t = evalState (eval False ctxt [] env t []) initEval
addToCtxt :: Name -> Term -> Type -> Context -> Context
addToCtxt n tm ty uctxt
= let ctxt = definitions uctxt
ctxt' = addDef n (Function ty tm, Public, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
setAccess :: Name -> Accessibility -> Context -> Context
setAccess n a uctxt
= let ctxt = definitions uctxt
ctxt' = updateDef n (\ (d, _, t, m) -> (d, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
setTotal :: Name -> Totality -> Context -> Context
setTotal n t uctxt
= let ctxt = definitions uctxt
ctxt' = updateDef n (\ (d, a, _, m) -> (d, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
setMetaInformation :: Name -> MetaInformation -> Context -> Context
setMetaInformation n m uctxt
= let ctxt = definitions uctxt
ctxt' = updateDef n (\ (d, a, t, _) -> (d, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
addCtxtDef :: Name -> Def -> Context -> Context
addCtxtDef n d c = let ctxt = definitions c
ctxt' = addDef n (d, Public, Unchecked, EmptyMI) $! ctxt in
c { definitions = ctxt' }
addTyDecl :: Name -> NameType -> Type -> Context -> Context
addTyDecl n nt ty uctxt
= let ctxt = definitions uctxt
ctxt' = addDef n (TyDecl nt ty, Public, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
addDatatype :: Datatype Name -> Context -> Context
addDatatype (Data n tag ty cons) uctxt
= let ctxt = definitions uctxt
ty' = normalise uctxt [] ty
ctxt' = addCons 0 cons (addDef n
(TyDecl (TCon tag (arity ty')) ty, Public, Unchecked, EmptyMI) ctxt) in
uctxt { definitions = ctxt' }
where
addCons tag [] ctxt = ctxt
addCons tag ((n, ty) : cons) ctxt
= let ty' = normalise uctxt [] ty in
addCons (tag+1) cons (addDef n
(TyDecl (DCon tag (arity ty')) ty, Public, Unchecked, EmptyMI) ctxt)
-- FIXME: Too many arguments! Refactor all these Bools.
addCasedef :: Name -> CaseInfo -> Bool -> Bool -> Bool -> Bool ->
[Type] -> -- argument types
[Either Term (Term, Term)] ->
[([Name], Term, Term)] -> -- totality
[([Name], Term, Term)] -> -- compile time
[([Name], Term, Term)] -> -- inlined
[([Name], Term, Term)] -> -- run time
Type -> Context -> Context
addCasedef n ci@(CaseInfo alwaysInline tcdict)
tcase covering reflect asserted argtys
ps_in ps_tot ps_inl ps_ct ps_rt ty uctxt
= let ctxt = definitions uctxt
access = case lookupDefAcc n False uctxt of
[(_, acc)] -> acc
_ -> Public
ctxt' = case (simpleCase tcase covering reflect CompileTime emptyFC argtys ps_tot,
simpleCase tcase covering reflect CompileTime emptyFC argtys ps_ct,
simpleCase tcase covering reflect CompileTime emptyFC argtys ps_inl,
simpleCase tcase covering reflect RunTime emptyFC argtys ps_rt) of
(OK (CaseDef args_tot sc_tot _),
OK (CaseDef args_ct sc_ct _),
OK (CaseDef args_inl sc_inl _),
OK (CaseDef args_rt sc_rt _)) ->
let inl = alwaysInline -- tcdict
inlc = (inl || small n args_ct sc_ct) && (not asserted)
inlr = inl || small n args_rt sc_rt
cdef = CaseDefs (args_tot, sc_tot)
(args_ct, sc_ct)
(args_inl, sc_inl)
(args_rt, sc_rt) in
addDef n (CaseOp (ci { case_inlinable = inlc })
ty argtys ps_in ps_tot cdef,
access, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
-- simplify a definition for totality checking
simplifyCasedef :: Name -> Context -> Context
simplifyCasedef n uctxt
= let ctxt = definitions uctxt
ctxt' = case lookupCtxt n ctxt of
[(CaseOp ci ty atys [] ps _, acc, tot, metainf)] ->
ctxt -- nothing to simplify (or already done...)
[(CaseOp ci ty atys ps_in ps cd, acc, tot, metainf)] ->
let ps_in' = map simpl ps_in
pdef = map debind ps_in' in
case simpleCase False True False CompileTime emptyFC atys pdef of
OK (CaseDef args sc _) ->
addDef n (CaseOp ci
ty atys ps_in' ps (cd { cases_totcheck = (args, sc) }),
acc, tot, metainf) ctxt
Error err -> error (show err)
_ -> ctxt in
uctxt { definitions = ctxt' }
where
depat acc (Bind n (PVar t) sc)
= depat (n : acc) (instantiate (P Bound n t) sc)
depat acc x = (acc, x)
debind (Right (x, y)) = let (vs, x') = depat [] x
(_, y') = depat [] y in
(vs, x', y')
debind (Left x) = let (vs, x') = depat [] x in
(vs, x', Impossible)
simpl (Right (x, y)) = Right (x, simplify uctxt [] y)
simpl t = t
addOperator :: Name -> Type -> Int -> ([Value] -> Maybe Value) ->
Context -> Context
addOperator n ty a op uctxt
= let ctxt = definitions uctxt
ctxt' = addDef n (Operator ty a op, Public, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
tfst (a, _, _, _) = a
lookupNames :: Name -> Context -> [Name]
lookupNames n ctxt
= let ns = lookupCtxtName n (definitions ctxt) in
map fst ns
lookupTy :: Name -> Context -> [Type]
lookupTy n ctxt
= do def <- lookupCtxt n (definitions ctxt)
case tfst def of
(Function ty _) -> return ty
(TyDecl _ ty) -> return ty
(Operator ty _ _) -> return ty
(CaseOp _ ty _ _ _ _) -> return ty
isConName :: Name -> Context -> Bool
isConName n ctxt = isTConName n ctxt || isDConName n ctxt
isTConName :: Name -> Context -> Bool
isTConName n ctxt
= or $ do def <- lookupCtxt n (definitions ctxt)
case tfst def of
(TyDecl (TCon _ _) _) -> return True
_ -> return False
isDConName :: Name -> Context -> Bool
isDConName n ctxt
= or $ do def <- lookupCtxt n (definitions ctxt)
case tfst def of
(TyDecl (DCon _ _) _) -> return True
_ -> return False
isFnName :: Name -> Context -> Bool
isFnName n ctxt
= or $ do def <- lookupCtxt n (definitions ctxt)
case tfst def of
(Function _ _) -> return True
(Operator _ _ _) -> return True
(CaseOp _ _ _ _ _ _) -> return True
_ -> return False
lookupP :: Name -> Context -> [Term]
lookupP n ctxt
= do def <- lookupCtxt n (definitions ctxt)
p <- case def of
(Function ty tm, a, _, _) -> return (P Ref n ty, a)
(TyDecl nt ty, a, _, _) -> return (P nt n ty, a)
(CaseOp _ ty _ _ _ _, a, _, _) -> return (P Ref n ty, a)
(Operator ty _ _, a, _, _) -> return (P Ref n ty, a)
case snd p of
Hidden -> []
_ -> return (fst p)
lookupDef :: Name -> Context -> [Def]
lookupDef n ctxt = map tfst $ lookupCtxt n (definitions ctxt)
lookupDefAcc :: Name -> Bool -> Context ->
[(Def, Accessibility)]
lookupDefAcc n mkpublic ctxt
= map mkp $ lookupCtxt n (definitions ctxt)
-- io_bind a special case for REPL prettiness
where mkp (d, a, _, _) = if mkpublic && (not (n == sUN "io_bind" || n == sUN "io_return"))
then (d, Public) else (d, a)
lookupTotal :: Name -> Context -> [Totality]
lookupTotal n ctxt = map mkt $ lookupCtxt n (definitions ctxt)
where mkt (d, a, t, m) = t
lookupMetaInformation :: Name -> Context -> [MetaInformation]
lookupMetaInformation n ctxt = map mkm $ lookupCtxt n (definitions ctxt)
where mkm (d, a, t, m) = m
lookupNameTotal :: Name -> Context -> [(Name, Totality)]
lookupNameTotal n = map (\(n, (_, _, t, _)) -> (n, t)) . lookupCtxtName n . definitions
lookupVal :: Name -> Context -> [Value]
lookupVal n ctxt
= do def <- lookupCtxt n (definitions ctxt)
case tfst def of
(Function _ htm) -> return (veval ctxt [] htm)
(TyDecl nt ty) -> return (VP nt n (veval ctxt [] ty))
lookupTyEnv :: Name -> Env -> Maybe (Int, Type)
lookupTyEnv n env = li n 0 env where
li n i [] = Nothing
li n i ((x, b): xs)
| n == x = Just (i, binderTy b)
| otherwise = li n (i+1) xs
-- | Create a unique name given context and other existing names
uniqueNameCtxt :: Context -> Name -> [Name] -> Name
uniqueNameCtxt ctxt n hs
| n `elem` hs = uniqueNameCtxt ctxt (nextName n) hs
| [_] <- lookupTy n ctxt = uniqueNameCtxt ctxt (nextName n) hs
| otherwise = n
| ctford/Idris-Elba-dev | src/Idris/Core/Evaluate.hs | bsd-3-clause | 40,466 | 5 | 29 | 15,550 | 14,165 | 7,242 | 6,923 | 727 | 75 |
{-# LANGUAGE FlexibleContexts #-}
-- | @futhark py@
module Futhark.CLI.Python (main) where
import Futhark.Actions (compilePythonAction)
import Futhark.Compiler.CLI
import Futhark.Passes
-- | Run @futhark py@
main :: String -> [String] -> IO ()
main = compilerMain
()
[]
"Compile sequential Python"
"Generate sequential Python code from optimised Futhark program."
sequentialCpuPipeline
$ \fcfg () mode outpath prog ->
actionProcedure (compilePythonAction fcfg mode outpath) prog
| diku-dk/futhark | src/Futhark/CLI/Python.hs | isc | 498 | 0 | 9 | 81 | 111 | 62 | 49 | 14 | 1 |
{-# LANGUAGE DeriveAnyClass #-}
module Commands.Plugins.Spiros.Finite.Types where
import Commands.Plugins.Spiros.Extra.Types
import Commands.Plugins.Spiros.Edit.Types
import Numeric.Natural
data Finite
= Finite Natural Finite0
deriving (Show,Read,Eq,Ord,Generic,Data,NFData)
data Finite0
= Edit0 Edit
| Move0 Move
-- | KeyRiff_ KeySequence
--TODO | Click_ Click
deriving (Show,Read,Eq,Ord,Generic,Data,NFData)
| sboosali/commands-spiros | config/Commands/Plugins/Spiros/Finite/Types.hs | gpl-2.0 | 442 | 0 | 6 | 71 | 120 | 71 | 49 | 12 | 0 |
{-# LANGUAGE TemplateHaskell, LambdaCase, GeneralizedNewtypeDeriving, ViewPatterns #-}
module Commands.Plugins.Spiros.Types where
import Commands.Plugins.Spiros.Extra.Types
-- import qualified Commands.Servers.Servant as Server
-- import Commands.Backends.Workflow (WorkflowT,MonadWorkflow_,MonadThrow)
import Commands.Parsers.Earley (EarleyParser)
import Workflow.Core (WorkflowT,MonadWorkflow_,MonadThrow)
-- import qualified System.FilePath.Posix as FilePath
import Control.Lens (makePrisms)
import Data.Text.Lazy (Text)
import Control.Monad.IO.Class (MonadIO)
import Prelude.Spiros (Default(..))
-- type SpirosConfig = Server.VConfig SpirosBackend SpirosContext
-- TODO
-- type SpirosGlobals = Server.VGlobals SpirosContext
type SpirosBackend = SpirosMonad -- TODO
type SpirosMonad_ = SpirosMonad ()
type SpirosParser s r = EarleyParser s r String Text
newtype SpirosMonad a = SpirosMonad
{ getSpirosMonad :: WorkflowT IO a
} deriving
( Functor
, Applicative
, Monad
, MonadIO
, MonadThrow
-- , Workflow.MonadWorkflow -- can't derive ConstraintKinds
, MonadWorkflow_
-- , MonadNatlink
-- , MonadVServer
-- , MonadState Server.VState
)
-- ================================================================ --
data SpirosContext
= GlobalContext
| EmacsContext
| ChromeContext
| IntelliJContext
deriving (Show,Read,Eq,Ord,Enum,Bounded,Data,Generic)
instance NFData SpirosContext
instance Default SpirosContext where def = GlobalContext
-- instance IsString SpirosContext where fromString = -- no, depends on a mapping, unlawful like fromaeson
makePrisms ''SpirosContext
-- ================================================================ --
| sboosali/commands-spiros | config/Commands/Plugins/Spiros/Types.hs | gpl-2.0 | 1,707 | 0 | 7 | 238 | 264 | 161 | 103 | 30 | 0 |
{-| Implementation of the primitives of instance allocation
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013, 2015 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.HTools.Cluster.AllocatePrimitives
( allocateOnSingle
, allocateOnPair
) where
import Ganeti.HTools.AlgorithmParams (AlgorithmOptions(..))
import Ganeti.HTools.Cluster.AllocationSolution (AllocElement)
import Ganeti.HTools.Cluster.Metrics ( compCV, compCVfromStats
, updateClusterStatisticsTwice)
import Ganeti.HTools.Cluster.Moves (setInstanceLocationScore)
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Instance as Instance
import qualified Ganeti.HTools.Node as Node
import Ganeti.HTools.Types
import Ganeti.Utils.Statistics
-- | Tries to allocate an instance on one given node.
allocateOnSingle :: AlgorithmOptions
-> Node.List -> Instance.Instance -> Ndx
-> OpResult AllocElement
allocateOnSingle opts nl inst new_pdx =
let p = Container.find new_pdx nl
new_inst = Instance.setBoth inst new_pdx Node.noSecondary
force = algIgnoreSoftErrors opts
in do
Instance.instMatchesPolicy inst (Node.iPolicy p) (Node.exclStorage p)
new_p <- Node.addPriEx force p inst
let new_nl = Container.add new_pdx new_p nl
new_score = compCV new_nl
return (new_nl, new_inst, [new_p], new_score)
-- | Tries to allocate an instance on a given pair of nodes.
allocateOnPair :: AlgorithmOptions
-> [Statistics]
-> Node.List -> Instance.Instance -> Ndx -> Ndx
-> OpResult AllocElement
allocateOnPair opts stats nl inst new_pdx new_sdx =
let tgt_p = Container.find new_pdx nl
tgt_s = Container.find new_sdx nl
force = algIgnoreSoftErrors opts
in do
Instance.instMatchesPolicy inst (Node.iPolicy tgt_p)
(Node.exclStorage tgt_p)
let new_inst = Instance.setBoth (setInstanceLocationScore inst tgt_p
(Just tgt_s))
new_pdx new_sdx
new_p <- Node.addPriEx force tgt_p new_inst
new_s <- Node.addSec tgt_s new_inst new_pdx
let new_nl = Container.addTwo new_pdx new_p new_sdx new_s nl
new_stats = updateClusterStatisticsTwice stats
(tgt_p, new_p) (tgt_s, new_s)
return (new_nl, new_inst, [new_p, new_s], compCVfromStats new_stats)
| mbakke/ganeti | src/Ganeti/HTools/Cluster/AllocatePrimitives.hs | bsd-2-clause | 3,665 | 0 | 16 | 765 | 553 | 296 | 257 | 46 | 1 |
-- |
-- Module : Foundation.Tuple
-- License : BSD-style
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : experimental
-- Portability : portable
--
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module Foundation.Tuple
( Tuple2(..)
, Tuple3(..)
, Tuple4(..)
, Fstable(..)
, Sndable(..)
, Thdable(..)
) where
import Basement.Compat.Base
import Basement.Compat.Bifunctor
import Foundation.Primitive
-- | Strict tuple (a,b)
data Tuple2 a b = Tuple2 !a !b
deriving (Show,Eq,Ord,Typeable,Data,Generic)
instance (NormalForm a, NormalForm b) => NormalForm (Tuple2 a b) where
toNormalForm (Tuple2 a b) = toNormalForm a `seq` toNormalForm b
instance Bifunctor Tuple2 where
bimap f g (Tuple2 a b) = Tuple2 (f a) (g b)
-- | Strict tuple (a,b,c)
data Tuple3 a b c = Tuple3 !a !b !c
deriving (Show,Eq,Ord,Typeable,Data,Generic)
instance (NormalForm a, NormalForm b, NormalForm c) => NormalForm (Tuple3 a b c) where
toNormalForm (Tuple3 a b c) = toNormalForm a `seq` toNormalForm b `seq` toNormalForm c
-- | Strict tuple (a,b,c,d)
data Tuple4 a b c d = Tuple4 !a !b !c !d
deriving (Show,Eq,Ord,Typeable,Data,Generic)
instance (NormalForm a, NormalForm b, NormalForm c, NormalForm d)
=> NormalForm (Tuple4 a b c d) where
toNormalForm (Tuple4 a b c d) = toNormalForm a `seq` toNormalForm b `seq` toNormalForm c `seq` toNormalForm d
-- | Class of product types that have a first element
class Fstable a where
type ProductFirst a
fst :: a -> ProductFirst a
-- | Class of product types that have a second element
class Sndable a where
type ProductSecond a
snd :: a -> ProductSecond a
-- | Class of product types that have a third element
class Thdable a where
type ProductThird a
thd :: a -> ProductThird a
instance Fstable (a,b) where
type ProductFirst (a,b) = a
fst (a,_) = a
instance Fstable (a,b,c) where
type ProductFirst (a,b,c) = a
fst (a,_,_) = a
instance Fstable (a,b,c,d) where
type ProductFirst (a,b,c,d) = a
fst (a,_,_,_) = a
instance Fstable (Tuple2 a b) where
type ProductFirst (Tuple2 a b) = a
fst (Tuple2 a _) = a
instance Fstable (Tuple3 a b c) where
type ProductFirst (Tuple3 a b c) = a
fst (Tuple3 a _ _) = a
instance Fstable (Tuple4 a b c d) where
type ProductFirst (Tuple4 a b c d) = a
fst (Tuple4 a _ _ _) = a
instance Sndable (a,b) where
type ProductSecond (a,b) = b
snd (_,b) = b
instance Sndable (a,b,c) where
type ProductSecond (a,b,c) = b
snd (_,b,_) = b
instance Sndable (a,b,c,d) where
type ProductSecond (a,b,c,d) = b
snd (_,b,_,_) = b
instance Sndable (Tuple2 a b) where
type ProductSecond (Tuple2 a b) = b
snd (Tuple2 _ b) = b
instance Sndable (Tuple3 a b c) where
type ProductSecond (Tuple3 a b c) = b
snd (Tuple3 _ b _) = b
instance Sndable (Tuple4 a b c d) where
type ProductSecond (Tuple4 a b c d) = b
snd (Tuple4 _ b _ _) = b
instance Thdable (a,b,c) where
type ProductThird (a,b,c) = c
thd (_,_,c) = c
instance Thdable (a,b,c,d) where
type ProductThird (a,b,c,d) = c
thd (_,_,c,_) = c
instance Thdable (Tuple3 a b c) where
type ProductThird (Tuple3 a b c) = c
thd (Tuple3 _ _ c) = c
instance Thdable (Tuple4 a b c d) where
type ProductThird (Tuple4 a b c d) = c
thd (Tuple4 _ _ c _) = c
| vincenthz/hs-foundation | foundation/Foundation/Tuple.hs | bsd-3-clause | 3,381 | 0 | 9 | 798 | 1,445 | 795 | 650 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module Version where
import Version.TH
import Data.Version (showVersion)
import qualified Paths_birch as P
version :: String
version = showVersion P.version ++ "-" ++ $(getCommitHash)
| hithroc/hsvkbot | src/Version.hs | bsd-3-clause | 218 | 0 | 8 | 29 | 53 | 32 | 21 | 7 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE GADTs #-}
module Llvm.Pass.RewriteUse where
import Control.Monad
import Data.Maybe
import Prelude hiding (succ)
import qualified Compiler.Hoopl as H
import Llvm.Data.Ir
import Llvm.Util.Monadic (maybeM)
import Debug.Trace
type MaybeChange a = a -> Maybe a
f2 :: (a -> Maybe a) -> (a, a) -> Maybe (a, a)
f2 f (a1, a2) = case (f a1, f a2) of
(Nothing, Nothing) -> Nothing
(a1', a2') -> Just (fromMaybe a1 a1', fromMaybe a2 a2')
f3 :: (a -> Maybe a) -> (a, a, a) -> Maybe (a, a, a)
f3 f (a1, a2, a3) = case (f a1, f a2, f a3) of
(Nothing, Nothing, Nothing) -> Nothing
(a1', a2', a3') -> Just (fromMaybe a1 a1', fromMaybe a2 a2', fromMaybe a3 a3')
fs :: Eq a => (a -> Maybe a) -> [a] -> Maybe [a]
fs f ls = let ls' = map (\x -> (fromMaybe x (f x))) ls
in if ls == ls' then Nothing else Just ls'
rwIbinExpr :: MaybeChange a -> MaybeChange (IbinExpr a)
rwIbinExpr f e = let (v1, v2) = operandOfIbinExpr e
t = typeOfIbinExpr e
in do { (v1', v2') <- f2 f (v1, v2)
; return $ newBinExpr t v1' v2'
}
where newBinExpr t v1 v2 =
case e of
Add nw _ _ _ -> Add nw t v1 v2
Sub nw _ _ _ -> Sub nw t v1 v2
Mul nw _ _ _ -> Mul nw t v1 v2
Udiv nw _ _ _ -> Udiv nw t v1 v2
Sdiv nw _ _ _ -> Sdiv nw t v1 v2
Urem _ _ _ -> Urem t v1 v2
Srem _ _ _ -> Srem t v1 v2
Shl nw _ _ _ -> Shl nw t v1 v2
Lshr nw _ _ _ -> Lshr nw t v1 v2
Ashr nw _ _ _ -> Ashr nw t v1 v2
And _ _ _ -> And t v1 v2
Or _ _ _ -> Or t v1 v2
Xor _ _ _ -> Xor t v1 v2
rwFbinExpr :: MaybeChange a -> MaybeChange (FbinExpr a)
rwFbinExpr f e = let (v1, v2) = operandOfFbinExpr e
t = typeOfFbinExpr e
in do { (v1', v2') <- f2 f (v1, v2)
; return $ newBinExpr t v1' v2'
}
where newBinExpr t v1 v2 =
case e of
Fadd fg _ _ _ -> Fadd fg t v1 v2
Fsub fg _ _ _ -> Fsub fg t v1 v2
Fmul fg _ _ _ -> Fmul fg t v1 v2
Fdiv fg _ _ _ -> Fdiv fg t v1 v2
Frem fg _ _ _ -> Frem fg t v1 v2
rwBinExpr :: MaybeChange a -> MaybeChange (BinExpr a)
rwBinExpr f (Ie e) = liftM Ie (rwIbinExpr f e)
rwBinExpr f (Fe e) = liftM Fe (rwFbinExpr f e)
rwConversion :: MaybeChange a -> MaybeChange (Conversion a)
rwConversion f (Conversion co tv1 t) = do { tv1' <- f tv1
; return $ Conversion co tv1' t
}
rwGetElemPtr :: Eq a => MaybeChange a -> MaybeChange (GetElemPtr a)
rwGetElemPtr f (GetElemPtr b tv1 indices) = do { tv1' <- f tv1
; indices' <- fs f indices
; return $ GetElemPtr b tv1' indices'
}
rwSelect :: MaybeChange a -> MaybeChange (Select a)
rwSelect f (Select tv1 tv2 tv3) = do { (tv1', tv2', tv3') <- f3 f (tv1, tv2, tv3)
; return $ Select tv1' tv2' tv3'
}
rwIcmp :: MaybeChange a -> MaybeChange (Icmp a)
rwIcmp f (Icmp op t v1 v2) = do { (v1', v2') <- f2 f (v1, v2)
; return $ Icmp op t v1' v2'
}
rwFcmp :: MaybeChange a -> MaybeChange (Fcmp a)
rwFcmp f (Fcmp op t v1 v2) = do { (v1', v2') <- f2 f (v1, v2)
; return $ Fcmp op t v1' v2'
}
tv2v :: MaybeChange Value -> MaybeChange (Typed Value)
tv2v f (TypedData t x) = liftM (TypedData t) (f x)
tp2p :: MaybeChange Value -> MaybeChange (Typed Pointer)
tp2p f x | trace ("tp2p " ++ (show x)) False = undefined
tp2p f (TypedData t (Pointer x)) = liftM (\p -> TypedData t (Pointer p)) (f x)
rwExpr :: MaybeChange Value -> MaybeChange Expr
rwExpr f (EgEp gep) = rwGetElemPtr (tv2v f) gep >>= return . EgEp
rwExpr f (EiC a) = rwIcmp f a >>= return . EiC
rwExpr f (EfC a) = rwFcmp f a >>= return . EfC
rwExpr f (Eb a) = rwBinExpr f a >>= return . Eb
rwExpr f (Ec a) = rwConversion (tv2v f) a >>= return . Ec
rwExpr f (Es a) = rwSelect (tv2v f) a >>= return . Es
rwExpr f (Ev x) = (tv2v f x) >>= return . Ev
rwMemOp :: MaybeChange Value -> MaybeChange Rhs
rwMemOp f x | trace ("rwMemOp " ++ (show x)) False = undefined
rwMemOp f (RmO (Allocate m t ms ma)) = do { ms' <- maybeM (tv2v f) ms
; return $ RmO $ Allocate m t ms' ma
}
rwMemOp f (RmO (Load x ptr a1 a2 a3 a4)) =
do { tp <- (tp2p f) ptr
; traceM $ "tp:" ++ show tp
; return $ RmO (Load x tp a1 a2 a3 a4)
}
rwMemOp f (RmO (LoadAtomic _ _ (TypedData (Tpointer t _) ptr) _)) = do { tv <- (tv2v f) (TypedData t (Deref ptr))
; return $ Re $ Ev tv
}
-- rwMemOp f (RmO (Free tv)) = (tv2v f) tv >>= return . RmO . Free
rwMemOp f (RmO (Store a tv1 tv2 ma nt)) = do { tv1' <- (tv2v f) tv1
; return $ RmO $ Store a tv1' tv2 ma nt
}
rwMemOp f (RmO (StoreAtomic at a tv1 tv2 ma)) = do { tv1' <- (tv2v f) tv1
; return $ RmO $ StoreAtomic at a tv1' tv2 ma
}
rwMemOp f (RmO (CmpXchg wk b ptr v1 v2 b2 fe ff)) = do { (v1', v2') <- f2 (tv2v f) (v1, v2)
; return $ RmO $ CmpXchg wk b ptr v1' v2' b2 fe ff
}
rwMemOp f (RmO (AtomicRmw b ao ptr v1 b2 fe)) = do { v1' <- (tv2v f) v1
; return $ RmO $ AtomicRmw b ao ptr v1' b2 fe
}
rwMemOp _ _ = error "impossible case"
rwShuffleVector :: MaybeChange a -> MaybeChange (ShuffleVector a)
rwShuffleVector f (ShuffleVector tv1 tv2 tv3) = do { (tv1', tv2', tv3') <- f3 f (tv1, tv2, tv3)
; return $ ShuffleVector tv1' tv2' tv3'
}
rwExtractValue :: MaybeChange a -> MaybeChange (ExtractValue a)
rwExtractValue f (ExtractValue tv1 s) = f tv1 >>= \tv1' -> return $ ExtractValue tv1' s
rwInsertValue :: MaybeChange a -> MaybeChange (InsertValue a)
rwInsertValue f (InsertValue tv1 tv2 s) = do { (tv1', tv2') <- f2 f (tv1, tv2)
; return $ InsertValue tv1' tv2' s
}
rwExtractElem :: MaybeChange a -> MaybeChange (ExtractElem a)
rwExtractElem f (ExtractElem tv1 tv2) = do { (tv1', tv2') <- f2 f (tv1, tv2)
; return $ ExtractElem tv1' tv2'
}
rwInsertElem :: MaybeChange a -> MaybeChange (InsertElem a)
rwInsertElem f (InsertElem tv1 tv2 tv3) = do { (tv1', tv2', tv3') <- f3 f (tv1, tv2, tv3)
; return $ InsertElem tv1' tv2' tv3'
}
rwRhs :: MaybeChange Value -> MaybeChange Rhs
rwRhs f (RmO a) = rwMemOp f (RmO a)
rwRhs _ (Call _ _) = Nothing
rwRhs f (Re a) = rwExpr f a >>= return . Re
rwRhs f (ReE a) = rwExtractElem (tv2v f) a >>= return . ReE
rwRhs f (RiE a) = rwInsertElem (tv2v f) a >>= return . RiE
rwRhs f (RsV a) = rwShuffleVector (tv2v f) a >>= return . RsV
rwRhs f (ReV a) = rwExtractValue (tv2v f) a >>= return . ReV
rwRhs f (RiV a) = rwInsertValue (tv2v f) a >>= return . RiV
rwRhs f (VaArg tv t) = (tv2v f) tv >>= \tv' -> return $ VaArg tv' t
rwRhs _ (LandingPad _ _ _ _ _) = Nothing
rwComputingInst :: MaybeChange Value -> MaybeChange ComputingInst
rwComputingInst f (ComputingInst lhs rhs) = rwRhs f rhs >>= return . (ComputingInst lhs)
rwComputingInstWithDbg :: MaybeChange Value -> MaybeChange ComputingInstWithDbg
rwComputingInstWithDbg f (ComputingInstWithDbg cinst dbgs) =
rwComputingInst f cinst >>= \cinst' -> return $ ComputingInstWithDbg cinst' dbgs
rwCinst :: MaybeChange Value -> MaybeChange (Node e x)
rwCinst f (Cinst c) = rwComputingInstWithDbg f c >>= return . Cinst
rwCinst _ _ = Nothing
rwTerminatorInst :: MaybeChange Value -> MaybeChange TerminatorInst
rwTerminatorInst f (Return ls) = do { ls' <- fs (tv2v f) ls
; return $ Return ls'
}
rwTerminatorInst f (Cbr v tl fl) = do { v' <- f v
; return $ Cbr v' tl fl
}
rwTerminatorInst _ _ = Nothing
-- rwTerminatorInst f e = error ("unhandled case " ++ (show e))
rwTerminatorInstWithDbg :: MaybeChange Value -> MaybeChange TerminatorInstWithDbg
rwTerminatorInstWithDbg f (TerminatorInstWithDbg cinst dbgs) =
rwTerminatorInst f cinst >>= \cinst' -> return $ TerminatorInstWithDbg cinst' dbgs
rwTinst :: MaybeChange Value -> MaybeChange (Node e x)
rwTinst f (Tinst c) = rwTerminatorInstWithDbg f c >>= return . Tinst
rwTinst _ _ = Nothing
rwNode :: MaybeChange Value -> MaybeChange (Node e x)
rwNode f n@(Cinst _) = rwCinst f n
rwNode f n@(Tinst _) = rwTinst f n
rwNode _ _ = Nothing
nodeToGraph :: Node e x -> H.Graph Node e x
nodeToGraph n@(Nlabel _) = H.mkFirst n
nodeToGraph n@(Pinst _) = H.mkMiddle n
nodeToGraph n@(Cinst _) = H.mkMiddle n
nodeToGraph n@(Tinst _) = H.mkLast n
| mlite/hLLVM | src/Llvm/Pass/RewriteUse.hs | bsd-3-clause | 10,153 | 0 | 15 | 4,105 | 3,936 | 1,958 | 1,978 | 159 | 13 |
--------------------------------------------------------------------
-- |
-- Module : Text.XML.Light.Cursor
-- Copyright : (c) Galois, Inc. 2008
-- License : BSD3
--
-- Maintainer: Iavor S. Diatchki <[email protected]>
-- Stability : provisional
-- Portability: portable
--
-- XML cursors for working XML content withing the context of
-- an XML document. This implementation is based on the general
-- tree zipper written by Krasimir Angelov and Iavor S. Diatchki.
--
module Text.XML.Light.Cursor
( Tag(..), getTag, setTag, fromTag
, Cursor(..), Path
-- * Conversions
, fromContent
, fromElement
, fromForest
, toForest
, toTree
-- * Moving around
, parent
, root
, getChild
, firstChild
, lastChild
, left
, right
, nextDF
-- ** Searching
, findChild
, findLeft
, findRight
, findRec
-- * Node classification
, isRoot
, isFirst
, isLast
, isLeaf
, isChild
, hasChildren
, getNodeIndex
-- * Updates
, setContent
, modifyContent
, modifyContentM
-- ** Inserting content
, insertLeft
, insertRight
, insertGoLeft
, insertGoRight
-- ** Removing content
, removeLeft
, removeRight
, removeGoLeft
, removeGoRight
, removeGoUp
) where
import Text.XML.Light.Types
import Data.Maybe(isNothing)
import Control.Monad(mplus)
data Tag = Tag { tagName :: QName
, tagAttribs :: [Attr]
, tagLine :: Maybe Line
} deriving (Show)
getTag :: Element -> Tag
getTag e = Tag { tagName = elName e
, tagAttribs = elAttribs e
, tagLine = elLine e
}
setTag :: Tag -> Element -> Element
setTag t e = fromTag t (elContent e)
fromTag :: Tag -> [Content] -> Element
fromTag t cs = Element { elName = tagName t
, elAttribs = tagAttribs t
, elLine = tagLine t
, elContent = cs
}
type Path = [([Content],Tag,[Content])]
-- | The position of a piece of content in an XML document.
data Cursor = Cur
{ current :: Content -- ^ The currently selected content.
, lefts :: [Content] -- ^ Siblings on the left, closest first.
, rights :: [Content] -- ^ Siblings on the right, closest first.
, parents :: Path -- ^ The contexts of the parent elements of this location.
} deriving (Show)
-- Moving around ---------------------------------------------------------------
-- | The parent of the given location.
parent :: Cursor -> Maybe Cursor
parent loc =
case parents loc of
(pls,v,prs) : ps -> Just
Cur { current = Elem
(fromTag v
(combChildren (lefts loc) (current loc) (rights loc)))
, lefts = pls, rights = prs, parents = ps
}
[] -> Nothing
-- | The top-most parent of the given location.
root :: Cursor -> Cursor
root loc = maybe loc root (parent loc)
-- | The left sibling of the given location.
left :: Cursor -> Maybe Cursor
left loc =
case lefts loc of
t : ts -> Just loc { current = t, lefts = ts
, rights = current loc : rights loc }
[] -> Nothing
-- | The right sibling of the given location.
right :: Cursor -> Maybe Cursor
right loc =
case rights loc of
t : ts -> Just loc { current = t, lefts = current loc : lefts loc
, rights = ts }
[] -> Nothing
-- | The first child of the given location.
firstChild :: Cursor -> Maybe Cursor
firstChild loc =
do (t : ts, ps) <- downParents loc
return Cur { current = t, lefts = [], rights = ts , parents = ps }
-- | The last child of the given location.
lastChild :: Cursor -> Maybe Cursor
lastChild loc =
do (ts, ps) <- downParents loc
case reverse ts of
l : ls -> return Cur { current = l, lefts = ls, rights = []
, parents = ps }
[] -> Nothing
-- | Find the next left sibling that satisfies a predicate.
findLeft :: (Cursor -> Bool) -> Cursor -> Maybe Cursor
findLeft p loc = do loc1 <- left loc
if p loc1 then return loc1 else findLeft p loc1
-- | Find the next right sibling that satisfies a predicate.
findRight :: (Cursor -> Bool) -> Cursor -> Maybe Cursor
findRight p loc = do loc1 <- right loc
if p loc1 then return loc1 else findRight p loc1
-- | The first child that satisfies a predicate.
findChild :: (Cursor -> Bool) -> Cursor -> Maybe Cursor
findChild p loc =
do loc1 <- firstChild loc
if p loc1 then return loc1 else findRight p loc1
-- | The next position in a left-to-right depth-first traversal of a document:
-- either the first child, right sibling, or the right sibling of a parent that
-- has one.
nextDF :: Cursor -> Maybe Cursor
nextDF c = firstChild c `mplus` up c
where up x = right x `mplus` (up =<< parent x)
-- | Perform a depth first search for a descendant that satisfies the
-- given predicate.
findRec :: (Cursor -> Bool) -> Cursor -> Maybe Cursor
findRec p c = if p c then Just c else findRec p =<< nextDF c
-- | The child with the given index (starting from 0).
getChild :: Int -> Cursor -> Maybe Cursor
getChild n loc =
do (ts,ps) <- downParents loc
(ls,t,rs) <- splitChildren ts n
return Cur { current = t, lefts = ls, rights = rs, parents = ps }
-- | private: computes the parent for "down" operations.
downParents :: Cursor -> Maybe ([Content], Path)
downParents loc =
case current loc of
Elem e -> Just ( elContent e
, (lefts loc, getTag e, rights loc) : parents loc
)
_ -> Nothing
-- Conversions -----------------------------------------------------------------
-- | A cursor for the given content.
fromContent :: Content -> Cursor
fromContent t = Cur { current = t, lefts = [], rights = [], parents = [] }
-- | A cursor for the given element.
fromElement :: Element -> Cursor
fromElement e = fromContent (Elem e)
-- | The location of the first tree in a forest.
fromForest :: [Content] -> Maybe Cursor
fromForest (t:ts) = Just Cur { current = t, lefts = [], rights = ts
, parents = [] }
fromForest [] = Nothing
-- | Computes the tree containing this location.
toTree :: Cursor -> Content
toTree loc = current (root loc)
-- | Computes the forest containing this location.
toForest :: Cursor -> [Content]
toForest loc = let r = root loc in combChildren (lefts r) (current r) (rights r)
-- Queries ---------------------------------------------------------------------
-- | Are we at the top of the document?
isRoot :: Cursor -> Bool
isRoot loc = null (parents loc)
-- | Are we at the left end of the the document?
isFirst :: Cursor -> Bool
isFirst loc = null (lefts loc)
-- | Are we at the right end of the document?
isLast :: Cursor -> Bool
isLast loc = null (rights loc)
-- | Are we at the bottom of the document?
isLeaf :: Cursor -> Bool
isLeaf loc = isNothing (downParents loc)
-- | Do we have a parent?
isChild :: Cursor -> Bool
isChild loc = not (isRoot loc)
-- | Get the node index inside the sequence of children
getNodeIndex :: Cursor -> Int
getNodeIndex loc = length (lefts loc)
-- | Do we have children?
hasChildren :: Cursor -> Bool
hasChildren loc = not (isLeaf loc)
-- Updates ---------------------------------------------------------------------
-- | Change the current content.
setContent :: Content -> Cursor -> Cursor
setContent t loc = loc { current = t }
-- | Modify the current content.
modifyContent :: (Content -> Content) -> Cursor -> Cursor
modifyContent f loc = setContent (f (current loc)) loc
-- | Modify the current content, allowing for an effect.
modifyContentM :: Monad m => (Content -> m Content) -> Cursor -> m Cursor
modifyContentM f loc = do x <- f (current loc)
return (setContent x loc)
-- | Insert content to the left of the current position.
insertLeft :: Content -> Cursor -> Cursor
insertLeft t loc = loc { lefts = t : lefts loc }
-- | Insert content to the right of the current position.
insertRight :: Content -> Cursor -> Cursor
insertRight t loc = loc { rights = t : rights loc }
-- | Remove the content on the left of the current position, if any.
removeLeft :: Cursor -> Maybe (Content,Cursor)
removeLeft loc = case lefts loc of
l : ls -> return (l,loc { lefts = ls })
[] -> Nothing
-- | Remove the content on the right of the current position, if any.
removeRight :: Cursor -> Maybe (Content,Cursor)
removeRight loc = case rights loc of
l : ls -> return (l,loc { rights = ls })
[] -> Nothing
-- | Insert content to the left of the current position.
-- The new content becomes the current position.
insertGoLeft :: Content -> Cursor -> Cursor
insertGoLeft t loc = loc { current = t, rights = current loc : rights loc }
-- | Insert content to the right of the current position.
-- The new content becomes the current position.
insertGoRight :: Content -> Cursor -> Cursor
insertGoRight t loc = loc { current = t, lefts = current loc : lefts loc }
-- | Remove the current element.
-- The new position is the one on the left.
removeGoLeft :: Cursor -> Maybe Cursor
removeGoLeft loc = case lefts loc of
l : ls -> Just loc { current = l, lefts = ls }
[] -> Nothing
-- | Remove the current element.
-- The new position is the one on the right.
removeGoRight :: Cursor -> Maybe Cursor
removeGoRight loc = case rights loc of
l : ls -> Just loc { current = l, rights = ls }
[] -> Nothing
-- | Remove the current element.
-- The new position is the parent of the old position.
removeGoUp :: Cursor -> Maybe Cursor
removeGoUp loc =
case parents loc of
(pls,v,prs) : ps -> Just
Cur { current = Elem (fromTag v (reverse (lefts loc) ++ rights loc))
, lefts = pls, rights = prs, parents = ps
}
[] -> Nothing
-- | private: Gets the given element of a list.
-- Also returns the preceding elements (reversed) and the following elements.
splitChildren :: [a] -> Int -> Maybe ([a],a,[a])
splitChildren _ n | n < 0 = Nothing
splitChildren cs pos = loop [] cs pos
where loop acc (x:xs) 0 = Just (acc,x,xs)
loop acc (x:xs) n = loop (x:acc) xs $! n-1
loop _ _ _ = Nothing
-- | private: combChildren ls x ys = reverse ls ++ [x] ++ ys
combChildren :: [a] -> a -> [a] -> [a]
combChildren ls t rs = foldl (flip (:)) (t:rs) ls
| amremam2004/vxmlizer | Text/XML/Light/Cursor.hs | bsd-3-clause | 10,568 | 0 | 18 | 2,858 | 2,799 | 1,526 | 1,273 | 196 | 3 |
module Usage.Usage where
import qualified Definition.Definition as D.D
test :: Int
test = D.D.s<caret>even + 1
| charleso/intellij-haskforce | tests/gold/codeInsight/QualifiedImportMultipleLevels_AsPartConsistsOfMultipleCons/Usage/Usage.hs | apache-2.0 | 115 | 0 | 8 | 20 | 41 | 25 | 16 | -1 | -1 |
{-
Copyright (C) 2012-2016 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Slides
Copyright : Copyright (C) 2012-2016 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <[email protected]>
Stability : alpha
Portability : portable
Utility functions for splitting documents into slides for slide
show formats (dzslides, revealjs, s5, slidy, slideous, beamer).
-}
module Text.Pandoc.Slides ( getSlideLevel, prepSlides ) where
import Text.Pandoc.Definition
-- | Find level of header that starts slides (defined as the least header
-- level that occurs before a non-header/non-hrule in the blocks).
getSlideLevel :: [Block] -> Int
getSlideLevel = go 6
where go least (Header n _ _ : x : xs)
| n < least && nonHOrHR x = go n xs
| otherwise = go least (x:xs)
go least (_ : xs) = go least xs
go least [] = least
nonHOrHR (Header{}) = False
nonHOrHR (HorizontalRule) = False
nonHOrHR _ = True
-- | Prepare a block list to be passed to hierarchicalize.
prepSlides :: Int -> [Block] -> [Block]
prepSlides slideLevel = ensureStartWithH . splitHrule . extractRefsHeader
where splitHrule (HorizontalRule : Header n attr xs : ys)
| n == slideLevel = Header slideLevel attr xs : splitHrule ys
splitHrule (HorizontalRule : xs) = Header slideLevel nullAttr [Str "\0"] :
splitHrule xs
splitHrule (x : xs) = x : splitHrule xs
splitHrule [] = []
extractRefsHeader bs =
case reverse bs of
(Div ("",["references"],[]) (Header n attrs xs : ys) : zs)
-> reverse zs ++ (Header n attrs xs : [Div ("",["references"],[]) ys])
_ -> bs
ensureStartWithH bs@(Header n _ _:_)
| n <= slideLevel = bs
ensureStartWithH bs = Header slideLevel nullAttr [Str "\0"] : bs
| janschulz/pandoc | src/Text/Pandoc/Slides.hs | gpl-2.0 | 2,734 | 0 | 16 | 779 | 513 | 263 | 250 | 28 | 6 |
{-# OPTIONS_GHC -fwarn-safe #-}
-- | Basic test to see if Safe warning flags compile
-- Warn if module is inferred safe
-- In this test the warning _shouldn't_ fire
module SafeFlags23 where
import System.IO.Unsafe
f :: Int
f = 1
| sdiehl/ghc | testsuite/tests/safeHaskell/flags/SafeFlags24.hs | bsd-3-clause | 232 | 0 | 4 | 44 | 24 | 17 | 7 | 5 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hr-HR">
<title>Directory List v2.3</title>
<maps>
<homeID>directorylistv2_3</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/directorylistv2_3/src/main/javahelp/help_hr_HR/helpset_hr_HR.hs | apache-2.0 | 978 | 78 | 66 | 157 | 412 | 209 | 203 | -1 | -1 |
{-
Parser.hs: Parser for the Flounder interface definition language
Part of Flounder: a strawman device definition DSL for Barrelfish
Copyright (c) 2009, ETH Zurich.
All rights reserved.
This file is distributed under the terms in the attached LICENSE file.
If you do not find this file, copies can be found by writing to:
ETH Zurich D-INFK, Haldeneggsteig 4, CH-8092 Zurich. Attn: Systems Group.
-}
module Parser where
import Syntax
import Prelude
import Text.ParserCombinators.Parsec as Parsec
import Text.ParserCombinators.Parsec.Expr
import Text.ParserCombinators.Parsec.Pos
import qualified Text.ParserCombinators.Parsec.Token as P
import Text.ParserCombinators.Parsec.Language( javaStyle )
import Data.Char
import Numeric
import Data.List
import Text.Printf
parse_intf predefDecls filename = parseFromFile (intffile predefDecls) filename
parse_include predefDecls filename = parseFromFile (includefile predefDecls) filename
lexer = P.makeTokenParser (javaStyle
{ P.reservedNames = [ "interface",
"message",
"rpc",
"in",
"out"
]
, P.reservedOpNames = ["*","/","+","-"]
, P.commentStart = "/*"
, P.commentEnd = "*/"
})
whiteSpace = P.whiteSpace lexer
reserved = P.reserved lexer
identifier = P.identifier lexer
stringLit = P.stringLiteral lexer
comma = P.comma lexer
commaSep = P.commaSep lexer
commaSep1 = P.commaSep1 lexer
parens = P.parens lexer
braces = P.braces lexer
squares = P.squares lexer
semiSep = P.semiSep lexer
symbol = P.symbol lexer
natural = P.natural lexer
builtinTypes = map show [UInt8 ..] ++ ["int"] -- int is legacy -AB
-- identifyBuiltin :: [(String, Declaration)] -> String -> TypeRef
identifyBuiltin typeDcls typeName =
do {
if typeName `elem` builtinTypes then
return $ Builtin $ (read typeName::TypeBuiltin)
else
case typeName `lookup` typeDcls of
Just (Typedef (TAliasT new orig)) -> return $ TypeAlias new orig
Just _ -> return $ TypeVar typeName
Nothing ->
do {
; pos <- getPosition
-- This is ugly, I agree:
; return $ error ("Use of undeclared type '" ++ typeName ++ "' in "
++ show (sourceName pos) ++ " at l. "
++ show (sourceLine pos) ++ " col. "
++ show (sourceColumn pos))
}
}
intffile predefDecls = do { whiteSpace
; i <- iface predefDecls
; return i
}
includefile predefDecls = do { whiteSpace
; typeDecls <- typeDeclaration predefDecls
; return typeDecls
}
iface predefDecls = do { reserved "interface"
; name <- identifier
; descr <- option name stringLit
; decls <- braces $ do {
; typeDecls <- typeDeclaration predefDecls
; msgDecls <- many1 $ mesg typeDecls
; return ((map snd typeDecls) ++ msgDecls)
}
; symbol ";" <?> " ';' missing from end of " ++ name ++ " interface specification"
; return (Interface name (Just descr) decls)
}
typeDeclaration typeDcls = do {
; decl <- try (do {
; x <- transparentAlias
; return $ Just x
})
<|> try (do {
; x <- typedefinition typeDcls
; return $ Just x
})
<|> return Nothing
; case decl of
Nothing -> return typeDcls
Just x -> typeDeclaration (x : typeDcls)
}
mesg typeDcls = do { bckArgs <- many backendParams
; def <- msg typeDcls bckArgs <|> rpc typeDcls bckArgs
; return $ Messagedef def
}
msg typeDcls bckArgs = do { t <- msgtype
; i <- identifier
; a <- parens $ commaSep (marg typeDcls)
; symbol ";"
; return $ Message t i a bckArgs
}
rpc typeDcls bckArgs= do { _ <- rpctype
; i <- identifier
; a <- parens $ commaSep (rpcArg typeDcls)
; symbol ";"
; return $ RPC i a bckArgs
}
rpctype = do { reserved "rpc"
; return () }
rpcArg typeDcls = do { reserved "in"
; Arg b n <- marg typeDcls
; return $ RPCArgIn b n
}
<|> do { reserved "out"
; Arg b n <- marg typeDcls
; return $ RPCArgOut b n
}
backendParams = do { char '@'
; i <- identifier
; p <- parens $ commaSep backendParam
; return (i, p)
}
backendParam = do { name <- identifier
; symbol "="
; do { num <- natural ; return $ (name, BackendInt num) }
<|> do { arg <- identifier ; return $ (name, BackendMsgArg arg) }
}
msgtype = do { reserved "message"; return MMessage }
<|> do { reserved "call"; return MCall }
<|> do { reserved "response"; return MResponse }
marg typeDcls = try (marg_array typeDcls)
<|> (marg_simple typeDcls)
marg_simple typeDcls = do { t <- identifier
; n <- identifier
; b <- identifyBuiltin typeDcls t
; return (Arg b (Name n))
}
marg_array typeDcls = do { t <- identifier
; n <- identifier
; symbol "["
; l <- identifier
; symbol "]"
; bType <- identifyBuiltin typeDcls t
; return (Arg bType (DynamicArray n l))
}
transparentAlias = do { whiteSpace
; reserved "alias"
; newType <- identifier
; originType <- identifier
; symbol ";"
; return (newType, Typedef $ TAliasT newType
(read originType::TypeBuiltin))
}
typedefinition typeDcls = do { whiteSpace
; reserved "typedef"
; (name, typeDef) <- typedef_body typeDcls
; symbol ";"
; return (name, Typedef typeDef)
}
typedef_body typeDcls = try (struct_typedef typeDcls)
<|> try (array_typedef typeDcls)
<|> try enum_typedef
<|> (alias_typedef typeDcls)
struct_typedef typeDcls = do { reserved "struct"
; f <- braces $ many1 (struct_field typeDcls)
; i <- identifier
; return (i, (TStruct i f))
}
struct_field typeDcls = do { t <- identifier
; i <- identifier
; symbol ";"
; b <- identifyBuiltin typeDcls t
; return (TStructField b i)
}
array_typedef typeDcls = do { t <- identifier
; i <- identifier
; symbol "["
; sz <- integer
; symbol "]"
; b <- identifyBuiltin typeDcls t
; return (i, (TArray b i sz))
}
enum_typedef = do { reserved "enum"
; v <- braces $ commaSep1 identifier
; i <- identifier
; return (i, (TEnum i v))
}
alias_typedef typeDcls = do { t <- identifier
; i <- identifier
; b <- identifyBuiltin typeDcls t
; return (i, (TAlias i b))
}
integer = P.integer lexer
| joe9/barrelfish | tools/flounder/Parser.hs | mit | 9,137 | 5 | 23 | 4,498 | 2,035 | 1,031 | 1,004 | 163 | 4 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeOperators #-}
module T15361 where
import Data.Kind
import Data.Type.Equality
-- Don't report (* ~ *) here
foo :: forall (a :: Type) (b :: Type) (c :: Type).
a :~~: b -> a :~~: c
foo HRefl = HRefl
data Chumbawamba :: Type -> Type where
IGetKnockedDown :: (Eq a, Ord a) => a -> Chumbawamba a
-- Don't report (Eq a) here
goo :: Chumbawamba a -> String
goo (IGetKnockedDown x) = show x
| sdiehl/ghc | testsuite/tests/typecheck/should_fail/T15361.hs | bsd-3-clause | 495 | 0 | 7 | 102 | 138 | 81 | 57 | -1 | -1 |
import qualified Data.Vector as U
import Data.Bits
main = print . U.maximumBy (\x y -> GT) . U.map (*2) . U.map (`shiftL` 2) $ U.replicate (100000000 :: Int) (5::Int)
| hvr/vector | old-testsuite/microsuite/maximumBy.hs | bsd-3-clause | 168 | 0 | 11 | 30 | 89 | 51 | 38 | 3 | 1 |
import Data.Bits ((.&.))
flags :: Int -> Int
flags x
| x .&. 128 > 0 = 12
| otherwise = 13
{-# NOINLINE flags #-}
main :: IO ()
main = print (flags 255)
| ezyang/ghc | testsuite/tests/codeGen/should_run/T13425.hs | bsd-3-clause | 159 | 0 | 9 | 41 | 78 | 40 | 38 | 8 | 1 |
-- !!! Check the Read instance for Array
-- [Not strictly a 'deriving' issue]
module Main( main ) where
import Data.Array
bds :: ((Int,Int),(Int,Int))
bds = ((1,4),(2,5))
type MyArr = Array (Int,Int) Int
a :: MyArr
a = array bds [ ((i,j), i+j) | (i,j) <- range bds ]
main = do { putStrLn (show a) ;
let { b :: MyArr ;
b = read (show a) } ;
putStrLn (show b)
}
| olsner/ghc | testsuite/tests/deriving/should_run/drvrun009.hs | bsd-3-clause | 418 | 0 | 12 | 132 | 189 | 111 | 78 | 11 | 1 |
-- !!! Test seeking
import System.IO
main = do
h <- openFile "hSeek001.in" ReadMode
True <- hIsSeekable h
hSeek h SeekFromEnd (-1)
z <- hGetChar h
putStr (z:"\n")
hSeek h SeekFromEnd (-3)
x <- hGetChar h
putStr (x:"\n")
hSeek h RelativeSeek (-2)
w <- hGetChar h
putStr (w:"\n")
hSeek h RelativeSeek 2
z <- hGetChar h
putStr (z:"\n")
hSeek h AbsoluteSeek (0)
a <- hGetChar h
putStr (a:"\n")
hSeek h AbsoluteSeek (10)
k <- hGetChar h
putStr (k:"\n")
hSeek h AbsoluteSeek (25)
z <- hGetChar h
putStr (z:"\n")
hClose h
| urbanslug/ghc | libraries/base/tests/IO/hSeek001.hs | bsd-3-clause | 614 | 0 | 9 | 189 | 295 | 131 | 164 | 26 | 1 |
module Main where
areaTriangleTrig a b c = c * height / 2
where
cosa = (b ^ 2 + c ^ 2 - a ^ 2) / (2 * b * c)
sina = sqrt (1 - cosa ^ 2)
height = b * sina
areaTriangleHeron a b c = result
where
result = sqrt (s * (s - a) * (s - b) * (s - c))
s = (a + b + c) / 2
| fredmorcos/attic | snippets/haskell/triangle-area.hs | isc | 391 | 0 | 13 | 208 | 175 | 94 | 81 | 8 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
-- | This module provides facilities for obtaining the types of
-- various Futhark constructs. Typically, you will need to execute
-- these in a context where type information is available as a
-- 'Scope'; usually by using a monad that is an instance of
-- 'HasScope'. The information is returned as a list of 'ExtType'
-- values - one for each of the values the Futhark construct returns.
-- Some constructs (such as subexpressions) can produce only a single
-- value, and their typing functions hence do not return a list.
--
-- Some representations may have more specialised facilities enabling
-- even more information - for example,
-- "Futhark.IR.Mem" exposes functionality for
-- also obtaining information about the storage location of results.
module Futhark.IR.Prop.TypeOf
( expExtType,
expExtTypeSize,
subExpType,
subExpResType,
basicOpType,
mapType,
-- * Return type
module Futhark.IR.RetType,
-- * Type environment
module Futhark.IR.Prop.Scope,
-- * Extensibility
TypedOp (..),
)
where
import Futhark.IR.Prop.Constants
import Futhark.IR.Prop.Reshape
import Futhark.IR.Prop.Scope
import Futhark.IR.Prop.Types
import Futhark.IR.RetType
import Futhark.IR.Syntax
-- | The type of a subexpression.
subExpType :: HasScope t m => SubExp -> m Type
subExpType (Constant val) = pure $ Prim $ primValueType val
subExpType (Var name) = lookupType name
-- | Type type of a 'SubExpRes' - not that this might refer to names
-- bound in the body containing the result.
subExpResType :: HasScope t m => SubExpRes -> m Type
subExpResType = subExpType . resSubExp
-- | @mapType f arrts@ wraps each element in the return type of @f@ in
-- an array with size equal to the outermost dimension of the first
-- element of @arrts@.
mapType :: SubExp -> Lambda rep -> [Type]
mapType outersize f =
[ arrayOf t (Shape [outersize]) NoUniqueness
| t <- lambdaReturnType f
]
-- | The type of a primitive operation.
basicOpType :: HasScope rep m => BasicOp -> m [Type]
basicOpType (SubExp se) =
pure <$> subExpType se
basicOpType (Opaque _ se) =
pure <$> subExpType se
basicOpType (ArrayLit es rt) =
pure [arrayOf rt (Shape [n]) NoUniqueness]
where
n = intConst Int64 $ toInteger $ length es
basicOpType (BinOp bop _ _) =
pure [Prim $ binOpType bop]
basicOpType (UnOp _ x) =
pure <$> subExpType x
basicOpType CmpOp {} =
pure [Prim Bool]
basicOpType (ConvOp conv _) =
pure [Prim $ snd $ convOpType conv]
basicOpType (Index ident slice) =
result <$> lookupType ident
where
result t = [Prim (elemType t) `arrayOfShape` shape]
shape = Shape $ sliceDims slice
basicOpType (Update _ src _ _) =
pure <$> lookupType src
basicOpType (FlatIndex ident slice) =
result <$> lookupType ident
where
result t = [Prim (elemType t) `arrayOfShape` shape]
shape = Shape $ flatSliceDims slice
basicOpType (FlatUpdate src _ _) =
pure <$> lookupType src
basicOpType (Iota n _ _ et) =
pure [arrayOf (Prim (IntType et)) (Shape [n]) NoUniqueness]
basicOpType (Replicate (Shape []) e) =
pure <$> subExpType e
basicOpType (Replicate shape e) =
pure . flip arrayOfShape shape <$> subExpType e
basicOpType (Scratch t shape) =
pure [arrayOf (Prim t) (Shape shape) NoUniqueness]
basicOpType (Reshape [] e) =
result <$> lookupType e
where
result t = [Prim $ elemType t]
basicOpType (Reshape shape e) =
result <$> lookupType e
where
result t = [t `setArrayShape` newShape shape]
basicOpType (Rearrange perm e) =
result <$> lookupType e
where
result t = [rearrangeType perm t]
basicOpType (Rotate _ e) =
pure <$> lookupType e
basicOpType (Concat i x _ ressize) =
result <$> lookupType x
where
result xt = [setDimSize i xt ressize]
basicOpType (Copy v) =
pure <$> lookupType v
basicOpType (Manifest _ v) =
pure <$> lookupType v
basicOpType Assert {} =
pure [Prim Unit]
basicOpType (UpdateAcc v _ _) =
pure <$> lookupType v
-- | The type of an expression.
expExtType ::
(HasScope rep m, TypedOp (Op rep)) =>
Exp rep ->
m [ExtType]
expExtType (Apply _ _ rt _) = pure $ map (fromDecl . declExtTypeOf) rt
expExtType (If _ _ _ rt) = pure $ map extTypeOf $ ifReturns rt
expExtType (DoLoop merge _ _) =
pure $ loopExtType $ map fst merge
expExtType (BasicOp op) = staticShapes <$> basicOpType op
expExtType (WithAcc inputs lam) =
fmap staticShapes $
(<>)
<$> (concat <$> traverse inputType inputs)
<*> pure (drop num_accs (lambdaReturnType lam))
where
inputType (_, arrs, _) = traverse lookupType arrs
num_accs = length inputs
expExtType (Op op) = opType op
-- | The number of values returned by an expression.
expExtTypeSize ::
(RepTypes rep, TypedOp (Op rep)) =>
Exp rep ->
Int
expExtTypeSize = length . feelBad . expExtType
-- FIXME, this is a horrible quick hack.
newtype FeelBad rep a = FeelBad {feelBad :: a}
instance Functor (FeelBad rep) where
fmap f = FeelBad . f . feelBad
instance Applicative (FeelBad rep) where
pure = FeelBad
f <*> x = FeelBad $ feelBad f $ feelBad x
instance RepTypes rep => HasScope rep (FeelBad rep) where
lookupType = const $ pure $ Prim $ IntType Int64
askScope = pure mempty
-- | Given the parameters of a loop, produce the return type.
loopExtType :: Typed dec => [Param dec] -> [ExtType]
loopExtType params =
existentialiseExtTypes inaccessible $ staticShapes $ map typeOf params
where
inaccessible = map paramName params
-- | Any operation must define an instance of this class, which
-- describes the type of the operation (at the value level).
class TypedOp op where
opType :: HasScope t m => op -> m [ExtType]
instance TypedOp () where
opType () = pure []
| HIPERFIT/futhark | src/Futhark/IR/Prop/TypeOf.hs | isc | 5,812 | 0 | 11 | 1,166 | 1,717 | 884 | 833 | 126 | 1 |
{-# LANGUAGE JavaScriptFFI #-}
module Doppler.GHCJS.VirtualDOM.VDom (
VDom, requireVDom
) where
import GHCJS.Types (JSVal)
newtype VDom = VDom JSVal
foreign import javascript interruptible "require(['virtual-dom'], $c);"
requireVDom :: IO VDom
| oinuar/doppler | src/Doppler/GHCJS/VirtualDOM/VDom.hs | mit | 254 | 3 | 6 | 39 | 51 | 32 | 19 | -1 | -1 |
import Data.List
import Data.Text hiding (intercalate, map)
import System.Hclip
import Text.ParserCombinators.Parsec
-- | Strip, with Strings instead of Text for arguments
trim :: String -> String
trim = unpack . strip . pack
-- | A single cell of a matrix
body :: Parser String
body = many1 $ noneOf "&\\"
-- | A single row of the matrix
row :: Parser [String]
row = sepBy body (char '&')
-- | A matrix parser (excluding wrappers)
matrix :: Parser [[String]]
matrix = sepBy row (try (string "\\\\"))
-- | A wrapped matrix parser
wrappedMatrix :: Parser [[String]]
wrappedMatrix = do
optional (try $ string "\\begin{bmatrix}")
mat <- matrix
optional (try $ string "\\end{bmatrix}")
return mat
-- | Trim every element of the matrix
cleanUp :: [[String]] -> [[String]]
cleanUp (x : xs) = map trim x : cleanUp xs
cleanUp [] = []
-- | Generate a wolfram array from an array of arrays of strings
wolfram :: [[String]] -> String
wolfram x = "{" ++ wolfram' ++ "}"
where
wolfram' = intercalate ",\n " (map row x)
row y = "{" ++ row' y ++ "}"
row' y = intercalate ", " y
main :: IO ()
main = do
input <- getClipboard
putStrLn $ "Got input: \n" ++ input ++ "\n"
let result = parse wrappedMatrix "matrix" $ trim input
case result of
Left e -> putStrLn $ "Failed to parse input:\n" ++ show e
Right mat -> do
let s = wolfram $ cleanUp mat
setClipboard s
putStrLn $ "Success! Copied result to clipboard:\n" ++ s
| mystor/matrix-detex | MatrixDetex.hs | mit | 1,604 | 0 | 16 | 464 | 477 | 242 | 235 | 37 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE CPP #-}
module Test.Hspec.Wai.Internal (
WaiExpectation
, WaiSession(..)
, runWaiSession
, runWithState
, withApplication
, getApp
, getState
, formatHeader
) where
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Control.Monad.Trans.Reader
import Network.Wai (Application)
import Network.Wai.Test hiding (request)
import Test.Hspec.Core.Spec
import Test.Hspec.Wai.Util (formatHeader)
#if MIN_VERSION_base(4,9,0)
import Control.Monad.Fail
#endif
-- | An expectation in the `WaiSession` monad. Failing expectations are
-- communicated through exceptions (similar to `Test.Hspec.Expectations.Expectation` and
-- `Test.HUnit.Base.Assertion`).
type WaiExpectation st = WaiSession st ()
-- | A <http://www.yesodweb.com/book/web-application-interface WAI> test
-- session that carries the `Application` under test and some client state.
newtype WaiSession st a = WaiSession {unWaiSession :: ReaderT st Session a}
deriving (Functor, Applicative, Monad, MonadIO
#if MIN_VERSION_base(4,9,0)
, MonadFail
#endif
)
runWaiSession :: WaiSession () a -> Application -> IO a
runWaiSession action app = runWithState action ((), app)
runWithState :: WaiSession st a -> (st, Application) -> IO a
runWithState action (st, app) = runSession (flip runReaderT st $ unWaiSession action) app
withApplication :: Application -> WaiSession () a -> IO a
withApplication = flip runWaiSession
instance Example (WaiExpectation st) where
type Arg (WaiExpectation st) = (st, Application)
evaluateExample e p action = evaluateExample (action $ runWithState e) p ($ ())
getApp :: WaiSession st Application
getApp = WaiSession (lift ask)
getState :: WaiSession st st
getState = WaiSession ask
| hspec/hspec-wai | src/Test/Hspec/Wai/Internal.hs | mit | 1,937 | 0 | 9 | 335 | 431 | 248 | 183 | 37 | 1 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.SQLTransactionErrorCallback
(newSQLTransactionErrorCallback,
newSQLTransactionErrorCallbackSync,
newSQLTransactionErrorCallbackAsync, SQLTransactionErrorCallback)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SQLTransactionErrorCallback Mozilla SQLTransactionErrorCallback documentation>
newSQLTransactionErrorCallback ::
(MonadDOM m) =>
(SQLError -> JSM ()) -> m SQLTransactionErrorCallback
newSQLTransactionErrorCallback callback
= liftDOM
(SQLTransactionErrorCallback . Callback <$>
function
(\ _ _ [error] ->
fromJSValUnchecked error >>= \ error' -> callback error'))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SQLTransactionErrorCallback Mozilla SQLTransactionErrorCallback documentation>
newSQLTransactionErrorCallbackSync ::
(MonadDOM m) =>
(SQLError -> JSM ()) -> m SQLTransactionErrorCallback
newSQLTransactionErrorCallbackSync callback
= liftDOM
(SQLTransactionErrorCallback . Callback <$>
function
(\ _ _ [error] ->
fromJSValUnchecked error >>= \ error' -> callback error'))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SQLTransactionErrorCallback Mozilla SQLTransactionErrorCallback documentation>
newSQLTransactionErrorCallbackAsync ::
(MonadDOM m) =>
(SQLError -> JSM ()) -> m SQLTransactionErrorCallback
newSQLTransactionErrorCallbackAsync callback
= liftDOM
(SQLTransactionErrorCallback . Callback <$>
asyncFunction
(\ _ _ [error] ->
fromJSValUnchecked error >>= \ error' -> callback error'))
| ghcjs/jsaddle-dom | src/JSDOM/Generated/SQLTransactionErrorCallback.hs | mit | 2,755 | 0 | 13 | 595 | 561 | 335 | 226 | 47 | 1 |
module Instructions where
import Text.ParserCombinators.Parsec
import Control.Applicative hiding (many, (<|>))
type Coordinate = (Integer, Integer)
type Region = (Coordinate, Coordinate)
data Instruction = Instruction Task Region
deriving (Show)
data Task = TurnOn | Toggle | TurnOff
deriving (Show)
instructions = many instruction <* eof
instruction :: GenParser Char st Instruction
instruction =
Instruction <$> task
<* space
<*> region
<* eol
task = try (TurnOn <$ string "turn on") <|>
try (TurnOff <$ string "turn off") <|>
(Toggle <$ string "toggle")
region = (,) <$> coord
<* string " through "
<*> coord
coord = (,) <$> integer
<* char ','
<*> integer
integer = rd <$> many1 digit
where rd = read :: String -> Integer
eol = (char '\n' <|> (char '\r' >> option '\n' (char '\n'))) >> return ()
| corajr/adventofcode2015 | 6/Instructions.hs | mit | 947 | 0 | 12 | 279 | 299 | 163 | 136 | 28 | 1 |
module Main where
import Lib
import Text.Printf
import Data.Time.Clock.POSIX
n = 4::Int
main :: IO ()
main = do
startTime <- getPOSIXTime
printf "Maximum product of %d values taken in a straight line from array 'values':\n\t%d"
n $ maxStraightProduct n
stopTime <- getPOSIXTime
printf "\t(%s sec)\n" $ show (stopTime - startTime)
| JohnL4/ProjectEuler | Haskell/Problem011/app/Main.hs | mit | 349 | 0 | 10 | 70 | 91 | 47 | 44 | 12 | 1 |
-- | This module describes the interface (as a data tyep) that some variant
-- should implement. See `Variant`.
--
{-# LANGUAGE OverloadedStrings #-}
module NetHack.Data.Variant
( Variant()
, monster
, allMonsterNames
, commandPrefix
, variant
, loadVariant )
where
import Control.Applicative
import Control.Monad.IO.Class
import qualified Data.ByteString as B
import Data.List ( find )
import qualified Data.Text as T
import Data.Yaml
import qualified NetHack.Data.Monster as MD
-- | Export a function that returns one of these to add a variant to the bot.
-- See `variant`.
data Variant = Variant { monster :: !(T.Text -> Maybe MD.Monster)
, allMonsterNames :: ![T.Text]
, commandPrefix :: T.Text }
instance FromJSON Variant where
parseJSON (Object v) = do
prefix <- v .: "prefix"
monsters <- v .: "monsters"
return Variant
{
commandPrefix = prefix
, allMonsterNames = fmap MD.moName monsters
, monster = \name -> find ((==) name . MD.moName) monsters
}
parseJSON _ = empty
-- Builds a `Variant` out of three properties.
variant :: (T.Text -> Maybe MD.Monster) -- ^ Return a monster with the given
-- name or `Nothing` if there is no
-- such monster.
-> [T.Text] -- ^ The list of all monster names.
-> T.Text -- ^ The command prefix for the IRC
-- bot. E.g. "u" for UnNetHack.
-> Variant
variant = Variant
-- Loads a variant from a YAML file.
loadVariant :: MonadIO m => FilePath -> m Variant
loadVariant fpath = liftIO $ do
bs <- B.readFile fpath
case decodeEither bs of
Left err -> error err
Right var -> return var
| UnNetHack/pinobot | lib/NetHack/Data/Variant.hs | mit | 1,915 | 0 | 15 | 653 | 375 | 211 | 164 | 45 | 2 |
module Euler.E9 where
euler9 :: Int -> Int
euler9 n = x*y*z
where
(x,y,z) = findTriple n
genTriples :: Int -> [(Int, Int, Int)]
genTriples n = [(x,y,z) | x <- [1..n], y <- [x..n], z <- [y..n], x+y+z == n]
isPythTriple :: (Int,Int,Int) -> Bool
isPythTriple (x,y,z) = or
[ x*x + y*y == z*z
, x*x + z*z == y*y
, y*y + z*z == x*x
]
findTriple :: Int -> (Int,Int,Int)
findTriple n = head $ filter isPythTriple $ genTriples n
main :: IO ()
main = print $ euler9 1000
| D4r1/project-euler | Euler/E9.hs | mit | 472 | 4 | 11 | 105 | 320 | 174 | 146 | 15 | 1 |
module SyntheticWeb.Client.Executor
( executeTask
) where
import Control.Concurrent (threadDelay)
import Control.Concurrent.STM (atomically)
import Data.Time (NominalDiffTime)
import SyntheticWeb.Client.Http (get, post, put)
import SyntheticWeb.Client.TimedAction (timedAction)
import SyntheticWeb.Counter ( ByteCounter
, activatePattern
, updateByteCount
, updateLatencyTime
, updatePatternTime
, updateSleepTime )
import SyntheticWeb.Client.ExecM ( ExecM
, getCounters
, getActivities
, getGenerator
, liftIO )
import SyntheticWeb.Plan.Types ( Activity (..)
, Duration (..) )
import SyntheticWeb.Statistical (Statistical (Exactly), sample)
-- | Execute one task.
executeTask :: ExecM ()
executeTask = do
((), timeItTook) <-
timedAction $ do
doActivatePattern
mapM_ executeActivity =<< getActivities
doUpdatePatternTime timeItTook
-- | Execute one client activity. Counters related to the activity -
-- timings and byte counters - shall be updated.
executeActivity :: Activity -> ExecM ()
-- | Delay the task worker thread for the specified duration.
executeActivity (SLEEP duration) = do
delay <- sampleDelayTime duration
((), timeItTook) <- timedAction $ liftIO (threadDelay delay)
doUpdateSleepTime timeItTook
-- | Fetch a resource with the specfied size.
executeActivity (GET headers download _) = do
((_, byteCount), timeItTook) <- timedAction (get download headers)
doUpdateByteCountAndLatencyTime byteCount timeItTook
-- | Upload to a resource with the specified size.
executeActivity (PUT headers upload) = do
((_, byteCount), timeItTook) <- timedAction (put upload headers)
doUpdateByteCountAndLatencyTime byteCount timeItTook
-- | Perform a post (upload and download) with the specicied sizes.
executeActivity (POST headers upload download _) = do
((_, byteCount), timeItTook) <- timedAction (post upload download headers)
doUpdateByteCountAndLatencyTime byteCount timeItTook
sampleDelayTime :: Duration -> ExecM Int
sampleDelayTime (Usec stat) = do
Exactly t <- sample stat =<< getGenerator
return t
sampleDelayTime (Msec stat) = do
Exactly t <- sample stat =<< getGenerator
return (t * 1000)
sampleDelayTime (Sec stat) = do
Exactly t <- sample stat =<< getGenerator
return (t * 1000000)
doActivatePattern :: ExecM ()
doActivatePattern = do
c <- getCounters
liftIO $ atomically (activatePattern c)
doUpdateByteCountAndLatencyTime :: ByteCounter -> NominalDiffTime -> ExecM ()
doUpdateByteCountAndLatencyTime bc t = do
c <- getCounters
liftIO $ atomically $ do
updateByteCount bc c
updateLatencyTime t c
doUpdatePatternTime :: NominalDiffTime -> ExecM ()
doUpdatePatternTime t = do
c <- getCounters
liftIO $ atomically (updatePatternTime t c)
doUpdateSleepTime :: NominalDiffTime -> ExecM ()
doUpdateSleepTime t = do
c <- getCounters
liftIO $ atomically (updateSleepTime t c)
| kosmoskatten/synthetic-web | src/SyntheticWeb/Client/Executor.hs | mit | 3,191 | 0 | 12 | 769 | 791 | 405 | 386 | 70 | 1 |
module AI where
import Control.Monad
import Data.Array.MArray
import Data.Array.IO
import Data.Word
import System.Random
type Index = Int
type Value = Int
type Weight = Value
type Neurons = IOArray Index Value
type Synapses = [(Index, Index, Weight)] -- src, dst, weight
type Goals = [Index]
type Brain = (Neurons, Synapses, Goals)
type Score = Int
type Population = [(Score, Brain)]
newRandomBrain :: Int -> Int -> Goals -> IO Brain
newRandomBrain nbNeurons nbSynapses goals = do
neurons <- newArray (0, nbNeurons) 0
synapses <- replicateM nbSynapses $ do
source <- randomRIO (0, nbNeurons)
destination <- randomRIO (0, nbNeurons)
weight <- randomRIO (minBound, maxBound)
return (source, destination, weight)
return (neurons, synapses, goals)
think :: Brain -> [Value] -> IO [Value]
think (neurons, synapses, goals) inputs = do
-- Clear existing neurons
(l, h) <- getBounds neurons
forM_ [l..h] $ \i -> do
writeArray neurons i 0
-- Write inputs
forM_ (zip [1..] inputs) $ \(i, v) -> do
writeArray neurons i v
-- Fire the synapses again
forM_ synapses $ \(src, dst, weight) -> do
result <- readArray neurons src
writeArray neurons dst (result + weight)
-- Yield goals
forM goals $ \i -> do
readArray neurons i
createPopulation :: Int -> Int -> Int -> Goals -> IO Population
createPopulation amount nbNeurons nbSynapses goals = replicateM amount $ do
brain <- newRandomBrain nbNeurons nbSynapses goals
return (0, brain)
testPopulation :: Population -> ([Value] -> IO Score) -> [Value] -> IO Population
testPopulation population fitness inputs = forM population $ \(oldScore, brain) -> do
outputs <- think brain inputs
score <- fitness outputs
return (oldScore + score, brain) | nitrix/ai | src/AI.hs | mit | 1,853 | 0 | 13 | 445 | 650 | 344 | 306 | 45 | 1 |
import Data.List
isTriangular threeNumbers = x + y > z
where
[x, y, z] = sort threeNumbers
countTrue = length . filter id
parseInputLine = map (read :: String -> Integer) . words
main = do
input <- getContents
print . countTrue . map (isTriangular . parseInputLine) . lines $ input
| lzlarryli/advent_of_code_2016 | day3/part1.hs | mit | 297 | 0 | 12 | 66 | 117 | 60 | 57 | 8 | 1 |
module Y2017.M03.D16.Solution where
import Data.Maybe (fromMaybe)
import Data.Set (Set)
import qualified Data.Set as Set
-- below imports available from 1HaskellADay git respository
import Analytics.Theory.Number.Prime
import Y2017.M03.D15.Solution (uniqueValuesUpTo)
{--
So, yesterday, when we solved the Exercise imported above, we saw that we had
614 unique values with the max value being Just 126410606437752. That max value
is quite the spicy meatball, however. But a help here is that we are looking
for prime-square-free numbers, or, that is to say more precisely, numbers that
are not divisible by the squares of the primes. So, let's winnow down our list
a bit.
--}
squareFreed :: Prime -> Set Integer -> Set Integer
squareFreed prime = Set.filter ((/= 0) . (`mod` (prime ^ 2)))
{--
How many values in uniqueValuesUpTo 51 when that list is squareFreed of the
first Prime, 2? (remember to square 2 as the factor to check against)o
>>> length (squareFreed (uniqueValuesUpTo 51) (head primes))
286
>>> fst <$> Set.maxView (squareFreed (head primes) (uniqueValuesUpTo 51))
Just 18053528883775
Boom! A marked improvement! Let's do the same for the next prime, 3. First,
assign smr0 to the squareFreed 2 value:
>>> let smr0 = squareFreed (head primes) (uniqueValuesUpTo 51)
and repeat the above for the next prime (head (tail primes)). What is the new
length and new max value you get for your newly filtered set from smr0?
assign smr1 to that newer smaller subset.
>>> let smr1 = squareFreed (head (tail primes)) smr0
>>> length smr1
185
>>> fst <$> Set.maxView smr1
Just 18053528883775
No change to the max, let's go a bit further. Now how about for the next prime?
>>> let smr2 = squareFreed (head (drop 2 primes)) smr1
>>> length smr2
162
>>> fst <$> Set.maxView smr2
Just 9762479679106
>>> sqrt . fromIntegral <$> it
Just 3124496.708128527
This shows after filtering out only 3 prime-squares we've significantly reduced
the number of values we need to test against AND the maximum value prime we need
to compute to test.
So.
Today's Haskell problem. Using the above algorithm, filter the unique values
of the Pascal's Triangle up to row 51 down to only the square-free numbers,
then sum the resulting set. What is the value you get?
--}
sqFreeSieve :: Set Integer -> Set Integer
sqFreeSieve = sfs primes
sfs :: [Prime] -> Set Integer -> Set Integer
sfs (p:rimes) uniq =
if fromMaybe 0 (fst <$> Set.maxView uniq) < p * p then uniq
else sfs rimes (squareFreed p uniq)
{--
>>> length (sqFreeSieve (uniqueValuesUpTo 51))
158
Eheh! We found only four more beasties for all that work?!? ;)
>>> sum (sqFreeSieve (uniqueValuesUpTo 51))
... some value
--}
| geophf/1HaskellADay | exercises/HAD/Y2017/M03/D16/Solution.hs | mit | 2,693 | 0 | 12 | 475 | 223 | 126 | 97 | 14 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Keymap.Vim.Ex.Commands.Edit
-- License : GPL-2
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Implements quit commands.
module Yi.Keymap.Vim.Ex.Commands.Edit (parse) where
import Control.Applicative
import Control.Monad
import qualified Data.Text as T
import qualified Text.ParserCombinators.Parsec as P
import Yi.Editor
import Yi.File
import Yi.Keymap
import Yi.Keymap.Vim.Common
import qualified Yi.Keymap.Vim.Ex.Commands.Common as Common
import Yi.Keymap.Vim.Ex.Types
parse :: EventString -> Maybe ExCommand
parse = Common.parse $ do
tab <- P.many (P.string "tab")
void $ P.try ( P.string "edit") <|> P.string "e"
void $ P.many1 P.space
filename <- T.pack <$> P.many1 P.anyChar
return $! edit (not (null tab)) filename
edit :: Bool -> T.Text -> ExCommand
edit tab f = Common.impureExCommand {
cmdShow = showEdit tab f
, cmdAction = YiA $ do
when tab $ withEditor newTabE
void . editFile $ T.unpack f
, cmdComplete = (fmap . fmap)
(showEdit tab) (Common.filenameComplete f)
}
showEdit :: Bool -> T.Text -> T.Text
showEdit tab f = (if tab then "tab" else "") `T.append` "edit " `T.append` f
| atsukotakahashi/wi | src/library/Yi/Keymap/Vim/Ex/Commands/Edit.hs | gpl-2.0 | 1,401 | 0 | 13 | 337 | 389 | 217 | 172 | 30 | 2 |
module Handler.Servers where
import Import
import Control.Exception (IOException, try)
import Default (server)
import Service.Interface (get_task_types)
import Types.TaskTree (TaskTree)
getServersR :: Handler Html
getServersR = postServersR
postServersR :: Handler Html
postServersR = do
((result, formWidget), formEnctype) <- runFormPost $ serversForm Nothing
case result of
FormSuccess s -> redirect $ ServerR s
_ -> return ()
defaultLayout $
formToWidget ServersR Nothing formEnctype formWidget
serversForm :: Maybe ServerUrl -> Form ServerUrl
serversForm mserver = do
renderBootstrap3 BootstrapBasicForm $
areq serverField (withAutofocus $ bfs MsgServer) (Just $ maybe (pack server) id mserver)
<* bootstrapSubmit (BootstrapSubmit MsgServerWΓ€hlen "btn-success" [])
where
serverField = flip checkM textField $ \ server' -> do
check' <- lift $ try $ get_task_types $ unpack server' :: HandlerT Autotool IO (Either IOException [TaskTree])
case check' of
Left e -> return . Left . pack . show $ e
Right _ -> return $ Right server'
| marcellussiegburg/autotool | yesod/Handler/Servers.hs | gpl-2.0 | 1,099 | 0 | 17 | 211 | 355 | 178 | 177 | 26 | 2 |
module MarchUp.SimpleText (module Data.Monoid, element, textual) where
import qualified MarchUp.Text as T
import Data.Monoid
import Data.Traversable
import Data.Foldable
import Control.Applicative
type Text = T.Text String
element :: Show a => a -> Text
element = T.Elem . show
textual = T.Text
instance Show Text where
showsPrec p = T.linearize showString . fmap showString
| jyp/MarXup | MarXup/SimpleText.hs | gpl-2.0 | 386 | 0 | 8 | 64 | 121 | 68 | 53 | 12 | 1 |
module TAP (
planTests, planNoPlan, planSkipAll,
runTests, is, isnt, like, unlike, pass, fail, ok,
skip, skipUnless, toDo,
diag, bailOut
) where
import System.IO
import System.Exit
import Control.Monad.State
import Control.Exception
import Text.Regex.Posix
data TAPState = TAPState {
planSet :: Bool,
noPlan :: Bool,
skipAll :: Bool,
testDied :: Bool,
expectedTests :: Int,
executedTests :: Int,
failedTests :: Int,
exitCode :: Int
} deriving (Show)
initState = TAPState {
planSet = False,
noPlan = False,
skipAll = False,
testDied = False,
expectedTests = 0,
executedTests = 0,
failedTests = 0,
exitCode = 0
}
type TAP a = StateT TAPState IO a
planTests :: Int -> Maybe String -> TAP Int
planTests n s = do
_assertNotPlanned
when (n == 0) $ _die "You said to run 0 tests! You've got to run something."
lift $ _printPlan n s
modify (\x -> x {planSet = True, expectedTests = n})
return n
planNoPlan :: TAP Int
planNoPlan = do
_assertNotPlanned
modify (\x -> x {planSet = True, noPlan = True})
return 0
planSkipAll :: Maybe String -> TAP Int
planSkipAll s = do
_assertNotPlanned
lift . _printPlan 0 . Just $ "Skip " ++
case s of
Just s -> s
otherwise -> ""
modify (\x -> x {planSet = True, skipAll = True})
_exit $ Just 0
_assertNotPlanned :: TAP ()
_assertNotPlanned = do
ts <- get
when (planSet ts) $ _die "You tried to plan twice!"
_assertPlanned :: TAP ()
_assertPlanned = do
ts <- get
when (not $ planSet ts) $ _die "You tried to run a test without a plan! Gotta have a plan."
_printPlan :: Int -> Maybe String -> IO Int
_printPlan n s = do
putStrLn $ "1.." ++ show n ++
case s of
Just s -> " # " ++ s
otherwise -> ""
return n
is :: (Show a, Eq a) => a -> a -> Maybe String -> TAP Bool
is result expected msg = do
rc <- ok (result == expected) msg
when (not rc) $ do
diag $ " got: '" ++ (show result) ++ "'"
diag $ " expected: '" ++ (show expected) ++ "'"
return rc
isnt :: (Show a, Eq a) => a -> a -> Maybe String -> TAP Bool
isnt result expected msg = do
rc <- ok (result /= expected) msg
when (not rc) $ do
diag $ " got: '" ++ (show result) ++ "'"
diag $ " didn't expect: '" ++ (show expected) ++ "'"
return rc
like :: String -> String -> Maybe String -> TAP Bool
like target pattern msg = do
rc <- ok (_matches target pattern) msg
when (not rc) $ diag $ " '" ++ target ++ "' doesn't match '" ++ pattern ++ "'"
return rc
unlike :: String -> String -> Maybe String -> TAP Bool
unlike target pattern msg = do
rc <- ok (not $ _matches target pattern) msg
when (not rc) $ diag $ " '" ++ target ++ "' matches '" ++ pattern ++ "'"
return rc
pass :: Maybe String -> TAP Bool
pass s = ok True s
fail :: Maybe String -> TAP Bool
fail s = ok False s
ok :: Bool -> Maybe String -> TAP Bool
ok result msg = do
_assertPlanned
modify (\x -> x {executedTests = executedTests x + 1})
case msg of
Just s -> when (_matches s "^[0-9]+$") $ do
diag $ " You named your test '" ++ s
++ "'. You shouldn't use numbers for your test names."
diag $ " Very confusing."
otherwise -> return ()
when (not result) $ do
lift $ putStr "not "
modify (\x -> x {failedTests = failedTests x + 1})
ts <- get
lift . putStr $ "ok " ++ (show $ executedTests ts)
case msg of
Just s -> lift . putStr $ " - " ++ s
otherwise -> return ()
-- TODO
lift $ putStrLn ""
-- STACK TRACE
return result
_matches :: String -> String -> Bool
_matches "" _ = False
_matches _ "" = False
_matches target pattern = target =~ pattern :: Bool
_is_diag :: (Show a) => a -> a -> TAP ()
_is_diag result expected = do
diag $ " got: '" ++ (show result) ++ "'"
diag $ " expected: '" ++ (show expected) ++ "'"
skip :: Int -> Maybe String -> TAP Int
skip n reason = do
let msg = case reason of Just s -> s
otherwise -> ""
forM_ [1 .. n] (\n' -> do
modify (\x -> x {executedTests = executedTests x + 1})
ts <- get
lift . putStrLn $ "ok " ++ (show $ executedTests ts) ++ " # skip: " ++ msg)
return n
skipUnless :: Bool -> Int -> Maybe String -> TAP a -> TAP Int
skipUnless cond n reason tap = do
if cond
then do
tap
return 0
else do
skip n reason
diag :: String -> TAP ()
diag s = lift . putStrLn $ "# " ++ s
_die :: String -> TAP a
_die s = do
lift $ hPutStrLn stderr s
modify (\x -> x {testDied = True})
_exit $ Just 255
bailOut :: String -> TAP a
bailOut s = do
lift $ hPutStrLn stderr s
_exit $ Just 255
_wrapup :: TAP ()
_wrapup = do
ts <- get
let s n = if (n > 1) then "s" else ""
let err | not $ planSet ts = diag "Looks like your test died before it could output anything." >> return True
| testDied ts = diag ("Looks like your test died just after " ++ (show $ executedTests ts)) >> return True
| otherwise = return False
stop <- err
if stop
then return ()
else do
when ((not $ noPlan ts)&&((expectedTests ts) < (executedTests ts))) $ do
let extra = (executedTests ts) - (expectedTests ts)
diag $ "Looks like you planned " ++ (show $ expectedTests ts) ++ " test" ++ (s $ expectedTests ts)
++ " but ran " ++ (show extra) ++ " extra."
modify (\x -> x {exitCode = -1})
when ((not $ noPlan ts)&&((expectedTests ts) > (executedTests ts))) $ do
diag $ "Looks like you planned " ++ (show $ expectedTests ts) ++ " test" ++ (s $ expectedTests ts)
++ " but only ran " ++ (show $ executedTests ts)
when (failedTests ts > 0) $ do
diag $ "Looks like you failed " ++ (show $ failedTests ts) ++ " test" ++ (s $ failedTests ts)
++ " of " ++ (show $ executedTests ts)
_exit :: Maybe Int -> TAP a
_exit mrc = do
case mrc of
Just rc -> modify (\x -> x {exitCode = rc})
otherwise -> return ()
ts <- get
when (exitCode ts == 0) $ do
rc <- if ((noPlan ts)||(not $ planSet ts))
then return $ failedTests ts
else if ((expectedTests ts) < (executedTests ts))
then return $ (executedTests ts) - (expectedTests ts)
else return $ ((failedTests ts) + ((expectedTests ts) - (executedTests ts)))
modify (\x -> x {exitCode = rc})
_wrapup
ts <- get
let rc = exitCode ts
lift . exitWith $ if (rc == 0) then ExitSuccess else ExitFailure rc
runTests :: TAP a -> IO (a, TAPState)
-- Add exception handling here?
runTests s = runStateT (s >> _exit Nothing) initState
| goozbach/bash-tap-functions | tap.hs | gpl-2.0 | 7,041 | 0 | 21 | 2,297 | 2,742 | 1,355 | 1,387 | 191 | 5 |
-- Copyright (C) 2008 JP Bernardy
-- | Utilities shared by various UIs
module Yi.UI.Utils where
import Yi.Buffer
import Yi.Prelude
import Prelude (Ordering(..))
import Yi.Window
import Control.Arrow (second)
import Data.Monoid
import Yi.Style
import Data.List (zip, repeat, span, dropWhile, length, zipWith, transpose, scanl, take, intercalate, takeWhile, reverse)
import Yi.Syntax (Span(..))
import Data.List.Split (splitEvery)
import Yi.String (padLeft)
import Control.Monad.State (runState,modify)
indexedAnnotatedStreamB :: Point -> BufferM [(Point, Char)]
indexedAnnotatedStreamB p = do
text <- indexedStreamB Forward p
annots <- withSyntaxB modeGetAnnotations
return $ spliceAnnots text (dropWhile (\s -> spanEnd s < p) (annots p))
applyHeights :: Traversable t => [Int] -> t Window -> t Window
applyHeights heights ws = fst $ runState (mapM distribute ws) heights
where distribute win = case isMini win of
True -> return win {height = 1}
False -> do h <- gets head
modify tail
return win {height = h}
spliceAnnots :: [(Point,Char)] -> [Span String] -> [(Point,Char)]
spliceAnnots text [] = text
spliceAnnots text (Span start x stop:anns) = l ++ zip (repeat start) x ++ spliceAnnots r anns
where (l,rest) = span ((start >) . fst) text
(_,r) = span ((stop >) . fst) rest
-- | Turn a sequence of (from,style,to) strokes into a sequence
-- of picture points (from,style), taking special care to
-- ensure that the points are strictly increasing and introducing
-- padding segments where neccessary.
-- Precondition: Strokes are ordered and not overlapping.
strokePicture :: [Span (Endo a)] -> [(Point,(a -> a))]
strokePicture [] = []
strokePicture wholeList@((Span leftMost _ _):_) = helper leftMost wholeList
where helper :: Point -> [Span (Endo a)] -> [(Point,(a -> a))]
helper prev [] = [(prev,id)]
helper prev ((Span l f r):xs)
| prev < l = (prev, id) : (l,appEndo f) : helper r xs
| otherwise = (l,appEndo f) : helper r xs
-- | Paint the given stroke-picture on top of an existing picture
paintStrokes :: (a -> a) -> a -> [(Point,(a -> a))] -> [(Point,a)] -> [(Point,a)]
paintStrokes f0 _ [] lx = fmap (second f0) lx
paintStrokes _ x0 lf [] = fmap (second ($ x0)) lf
paintStrokes f0 x0 lf@((pf,f):tf) lx@((px,x):tx) =
case pf `compare` px of
LT -> (pf, f x0):paintStrokes f x0 tf lx
EQ -> (pf, f x ):paintStrokes f x tf tx
GT -> (px, f0 x ):paintStrokes f0 x lf tx
paintPicture :: a -> [[Span (Endo a)]] -> [(Point,a)]
paintPicture a = foldr (paintStrokes id a . strokePicture) []
attributesPictureB :: UIStyle -> Maybe SearchExp -> Region -> [[Span StyleName]] -> BufferM [(Point,Attributes)]
attributesPictureB sty mexp region extraLayers =
paintPicture (baseAttributes sty) <$>
fmap (fmap (fmap ($ sty))) <$>
(extraLayers ++) <$>
strokesRangesB mexp region
attributesPictureAndSelB :: UIStyle -> Maybe SearchExp -> Region -> BufferM [(Point,Attributes)]
attributesPictureAndSelB sty mexp region = do
selReg <- getSelectRegionB
showSel <- getA highlightSelectionA
rectSel <- getA rectangleSelectionA
let styliseReg reg = Span (regionStart reg) selectedStyle (regionEnd reg)
extraLayers | rectSel && showSel = (:[]) . fmap styliseReg <$> blockifyRegion selReg
| showSel = return [[styliseReg selReg]]
| otherwise = return []
attributesPictureB sty mexp region =<< extraLayers
-- | Arrange a list of items in columns over maximum @maxNumberOfLines@ lines
arrangeItems :: [String] -> Int -> Int -> [String]
arrangeItems items maxWidth maxNumberOfLines = take maxNumberOfLines $ snd choice
where choice = maximumBy (compare `on` fst) arrangements
arrangements = fmap (arrangeItems' items maxWidth) (reverse [1..maxNumberOfLines])
-- | Arrange a list of items in columns over @numberOfLines@ lines.
arrangeItems' :: [String] -> Int -> Int -> (Int, [String])
arrangeItems' items maxWidth numberOfLines = (fittedItems,theLines)
where columns = splitEvery numberOfLines items
columnsWidth = fmap (maximum . fmap length) columns
totalWidths = scanl (\x y -> 1 + x + y) 0 columnsWidth
shownItems = scanl (+) 0 (fmap length columns)
fittedItems = snd $ last $ takeWhile ((<= maxWidth) . fst) $ zip totalWidths shownItems
theLines = fmap (intercalate " " . zipWith padLeft columnsWidth) $ transpose columns
| codemac/yi-editor | src/Yi/UI/Utils.hs | gpl-2.0 | 4,612 | 0 | 14 | 1,058 | 1,677 | 890 | 787 | 76 | 3 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE DeriveDataTypeable #-}
----------------------------------------------------------------------------------
-- |
-- Module : Tct.Method.Bounds.Automata
-- Copyright : (c) Martin Avanzini <[email protected]>,
-- Georg Moser <[email protected]>,
-- Andreas Schnabl <[email protected]>
-- License : LGPL (see COPYING)
-- Maintainer : Martin Avanzini <[email protected]>,
-- Andreas Schnabl <[email protected]>
-- Stability : unstable
-- Portability : unportable
--
-- This module implements automata functionality as employed by
-- the bounds processor.
-----------------------------------------------------------------------------------
module Tct.Method.Bounds.Automata where
import Data.Typeable
import qualified Data.Set as Set
import qualified Data.IntMap as IMap
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import Data.IntMap (IntMap)
import Data.Map (Map)
import Data.Set (Set)
import Control.Monad.State.Class (MonadState(..))
import qualified Control.Monad.State.Lazy as State
import Termlib.Utils (Enumerateable(..), PrettyPrintable(..))
import Termlib.FunctionSymbol (Symbol, Signature)
import Termlib.Term (Term(..))
import Termlib.Trs.PrettyPrint (pprintTrs)
import Text.PrettyPrint.HughesPJ hiding (empty)
-- | This datatype represents the /enrichment/ employed.
data Enrichment =
Match -- ^ Matchbounds.
| Roof -- ^ Roofbounds.
| Top -- ^ Topbounds.
deriving (Typeable, Enum, Bounded, Eq)
instance Show Enrichment where
show Match = "match"
show Roof = "roof"
show Top = "top"
data WeakBoundedness = WeakMayExceedBound | WeakMayNotExceedBound
-- TODO:MA: which types should be strict?
type Label = Int
type LSym = (Symbol,Label)
type State = Int
data LTerm = F LSym [LTerm]
| S State
deriving (Eq, Ord)
instance PrettyPrintable LTerm where
pprint (S s) = pprint s
pprint (F (f,l) ts) = text (show $ enum f) <> text "_" <> text (show l) <> parens ppts
where ppts = hcat $ punctuate (text ",") [pprint ti | ti <- ts]
instance Show LTerm where
show = show . pprint
data Rule = Collapse LSym [State] State
| Epsilon State State deriving (Eq, Ord, Show)
ppRule :: (Symbol -> Doc) -> Rule -> Doc
ppRule _ (Epsilon p q) = text (show p) <+> text "->" <+> text (show q)
ppRule ppSym (Collapse (f,l) args q) = pplhs <+> text "->" <+> text (show q)
where pplhs = ppSym f <> text "_" <> text (show l) <> parens ppargs
ppargs = hsep $ punctuate (text ",") [text (show ai) | ai <- args]
instance PrettyPrintable (Rule, Signature) where
pprint (r,sig) = ppRule (\ f -> pprint (f,sig)) r
instance PrettyPrintable ([Rule], Signature) where
pprint (rules, sig) = pprintTrs (\ r -> pprint (r,sig)) rules
instance PrettyPrintable (Automaton, Signature) where
pprint (a, sig) = pprint ((toRules a), sig)
instance PrettyPrintable [Rule] where
pprint rules = pprintTrs (\ r -> ppRule (text . show) r) rules
instance PrettyPrintable (Automaton) where
pprint = pprint . toRules
-- TODO:MA: sym -> ... in beiden automaten
type FwdAutomaton = IntMap (IntMap (Map [State] (Set State)))
-- sym -> l -> args -> qs <=> forall q \in qs. sym_l(args) -> q \in A
type BwdAutomaton = IntMap (IntMap (IntMap (Set [State])))
-- sym -> q -> l -> argss <=> forall args \in argss. sym_l(args) -> q \in A
data Automaton = Automaton { fwd :: FwdAutomaton
, bwd :: BwdAutomaton
, fresh :: State
, maxlabel :: Label}
deriving (Eq, Show)
size :: LTerm -> Int
size (F _ ts) = 1 + sum (map size ts)
size (S _) = 0
isEpsilonRule :: Rule -> Bool
isEpsilonRule (Epsilon _ _) = True
isEpsilonRule (Collapse _ _ _) = False
lift :: Symbol -> Label -> LSym
lift = (,)
base :: LSym -> Symbol
base = fst
height :: LSym -> Label
height = snd
baseTerm :: LTerm -> Term
baseTerm (F f ts) = Fun (base f) $ map baseTerm ts
baseTerm (S _) = error "Cannot convert a labeled term with Tree automaton states back to a normal term"
toRules :: Automaton -> [Rule]
toRules a = [Collapse (invEnum f,l) args q | (f,m1) <- IMap.toList $ fwd a
, (l,m2) <- IMap.toList m1
, (args, qs) <- Map.toList m2
, q <- Set.toList qs]
fromRules :: [Rule] -> Automaton
fromRules rs = foldl (\ a r -> insert r a) empty rs
empty :: Automaton
empty = Automaton IMap.empty IMap.empty 0 0
freshState :: Automaton -> (State, Automaton)
freshState a = (fr, Automaton (fwd a) (bwd a) (fr + 1) (maxlabel a))
where fr = fresh a
freshStates :: Int -> Automaton -> ([State], Automaton)
freshStates 0 a = ([], a)
freshStates i a = case freshStates (i - 1) a' of (qs, a'') -> ((q:qs),a'')
where (q, a') = freshState a
fwdInsert :: LSym -> [State] -> State -> FwdAutomaton -> FwdAutomaton
fwdInsert (f,l) qs q a = IMap.alter alter1 (enum f) a
where default3 = Set.singleton q
default2 = Map.singleton qs default3
default1 = IMap.singleton l default2
alter1 = Just . maybe default1 (\ m1 -> IMap.alter alter2 l m1)
alter2 = Just . maybe default2 (\ m2 -> Map.alter alter3 qs m2)
alter3 = Just . maybe default3 (\ ps -> Set.insert q ps)
bwdInsert :: LSym -> [State] -> State -> BwdAutomaton -> BwdAutomaton
bwdInsert (f,l) qs q a = IMap.alter alter1 (enum f) a
where default3 = Set.singleton qs
default2 = IMap.singleton l default3
default1 = IMap.singleton q default2
alter1 = Just . maybe default1 (\ m1 -> IMap.alter alter2 q m1)
alter2 = Just . maybe default2 (\ m2 -> IMap.alter alter3 l m2)
alter3 = Just . maybe default3 (\ ps -> Set.insert qs ps)
-- MA:TODO verifizieren dass fresh immer "frisch" ist
insert :: Rule -> Automaton -> Automaton
insert (Collapse sym args q) (Automaton f b fr l) = Automaton (fwdInsert sym args q f) (bwdInsert sym args q b) (maximum $ [fr, q + 1] ++ [a + 1 | a <- args]) (max l $ height sym)
insert (Epsilon p q) (Automaton f b fr l) = Automaton f' b' (maximum [fr, p + 1, q + 1]) l
where f' = IMap.map (IMap.map $ Map.map addForwardRight) f
addForwardRight ps = if p `Set.member` ps then Set.insert q ps else ps
b' = IMap.map addBackwardRight b
addBackwardRight mp = case IMap.lookup p mp of
Just mp' -> addBackwardRight2 mp' mp
Nothing -> mp
addBackwardRight2 mp' mp = IMap.insertWith addBackwardRight3 q mp' mp
addBackwardRight3 = IMap.unionWith Set.union
-- f'' = IMap.map (IMap.map addForwardLeft) f'
-- addForwardLeft mp = foldr addForwardLeft2 mp (Map.keys mp)
-- addForwardLeft2 k mp = Set.fold (addForwardLeft3 k) mp (modifiedArgs k)
-- addForwardLeft3 k k' mp = Map.insertWith Set.union k' (fromJust $ Map.lookup k mp) mp
-- b'' = IMap.map (IMap.map $ IMap.map $ Set.unions . Set.toList . Set.map modifiedArgs) b'
-- modifiedArgs [] = Set.singleton []
-- modifiedArgs (q':qs) | q == q' = let subresult = modifiedArgs qs in Set.map ((:) p) subresult `Set.union` Set.map ((:) q) subresult
-- | otherwise = Set.map ((:) q') $ modifiedArgs qs
mkFreshState :: MonadState Automaton m => m State
mkFreshState = do a <- State.get
let (qi,a') = freshState a
State.put a'
return qi
mkInsertRule :: MonadState Automaton m => Rule -> m ()
mkInsertRule r = State.modify (insert r)
step :: Automaton -> LSym -> [State] -> Set State
-- q \in (step A f_l qs) <=> f_l(qs) -> q
step a (f,l) qs = fromMaybe Set.empty look
where look = do m1 <- IMap.lookup (enum f) (fwd a)
m2 <- IMap.lookup l m1
Map.lookup qs m2
bstep :: Automaton -> LSym -> State -> Set [State]
-- qs \in bstep f_l q <=> f_l(qs) -> q
bstep a (f,l) q = fromMaybe Set.empty look
where look = do m1 <- IMap.lookup (enum f) (bwd a)
m2 <- IMap.lookup q m1
IMap.lookup l m2
bstepUL :: Automaton -> Symbol -> State -> [(Label,Set [State])]
-- (l,[...,qs,...]) \in bstep f q <=> f_l(qs) -> q
bstepUL a f q = fromMaybe [] look
where look = do m1 <- IMap.lookup (enum f) (bwd a)
m2 <- IMap.lookup q m1
return $ IMap.toList m2
rulesDefiningUL :: Automaton -> Symbol -> [(Label,[State], Set State)]
-- (l,qs,[...,q,...]) \in rulesDefining f <=> f_l(qs) -> q
rulesDefiningUL a f = fromMaybe [] look
where look = do m1 <- IMap.lookup (enum f) (fwd a)
return [(l,qs,rs) | (l, m2) <- IMap.toList m1
, (qs,rs) <- Map.toList m2]
rulesDefining :: Automaton -> LSym -> [([State], Set State)]
-- (qs,[...,q,...]) \in rulesDefining f_l <=> f_l(qs) -> q
rulesDefining a (f,l) = fromMaybe [] look
where look = do m1 <- IMap.lookup (enum f) (fwd a)
m2 <- IMap.lookup l m1
return $ Map.toList m2
symbols :: Automaton -> Set LSym
symbols a = IMap.foldWithKey f Set.empty (fwd a)
where f fn m s = (Set.fromList [(invEnum fn,l) | l <- IMap.keys m]) `Set.union` s
| mzini/TcT | source/Tct/Method/Bounds/Automata.hs | gpl-3.0 | 9,573 | 0 | 14 | 2,547 | 3,062 | 1,627 | 1,435 | 160 | 3 |
module Main where
import Control.Applicative
import Data.Char
import Data.List hiding (or)
import Prelude hiding (or)
import System.Environment
import System.Exit
import System.Random
import Test.QuickCheck hiding (sample)
import Test.QuickCheck.Gen hiding (sample)
data Regex = Charset String
| Or Regex Regex
| Cat Regex Regex
| Star Regex
| Plus Regex
| Quest Regex
deriving (Eq)
simplify' r@(Charset _) = r
simplify' (Or r1 r2) = Or (simplify' r1) (simplify' r2)
simplify' (Cat r1 r2) = Cat (simplify' r1) (simplify' r2)
simplify' (Star (Star r)) = Star (simplify' r)
simplify' (Star (Plus r)) = Star (simplify' r)
simplify' (Star (Quest r)) = Star (simplify' r)
simplify' (Star r) = Star (simplify' r)
simplify' (Plus (Star r)) = Star (simplify' r)
simplify' (Plus (Plus r)) = Plus (simplify' r)
simplify' (Plus (Quest r)) = Star (simplify' r)
simplify' (Plus r) = Plus (simplify' r)
simplify' (Quest (Star r)) = Star (simplify' r)
simplify' (Quest (Plus r)) = Star (simplify' r)
simplify' (Quest (Quest r)) = Quest (simplify' r)
simplify' (Quest r) = Quest (simplify' r)
simplify r = fst .
head .
dropWhile (\p -> fst p /= snd p) .
zip rs $ tail rs
where
rs = iterate simplify' r
ppRegex :: Regex -> String
ppRegex (Charset [c]) = [c]
ppRegex (Charset str) = "[" ++ str ++ "]"
ppRegex (Or r1 r2) = bracketPair r1 r2 "|"
ppRegex (Cat r1 r2) = bracketPair r1 r2 ""
ppRegex (Star r) = bracket r ++ "*"
ppRegex (Plus r) = bracket r ++ "+"
ppRegex (Quest r) = bracket r ++ "?"
bracket r@(Charset _) = ppRegex r
bracket r = "(" ++ ppRegex r ++ ")"
bracketPair r1 r2 sep = bracket r1 ++ sep ++ bracket r2
instance Show Regex where
show = ppRegex
instance Arbitrary Regex where
arbitrary = sized regex
regex 0 = resize 1 (sized charset)
regex n = oneof [ resize (min n 10) (sized charset)
, binOp Or (n `div` 2)
, binOp Cat (n `div` 2)
, unOp Star (n - 1)
, unOp Plus (n - 1)
, unOp Quest (n - 1)
]
unOp op n = op <$> regex n
binOp op n = op <$> regex n <*> regex n
charset n = do
txt <- resize (n * 2) $ listOf1 chars
return . Charset . nub . sort $ txt
chars = oneof [ choose ('a', 'z')
, choose ('A','Z')
]
sample n s (MkGen m) = do
rnd <- newStdGen
let rnds rnd = rnd1 : rnds rnd2 where (rnd1, rnd2) = split rnd
return [(m r s) | r <- take n $ rnds rnd]
rxgen :: Int -> Int -> IO [Regex]
rxgen n s = sample n s $ (arbitrary :: Gen Regex)
usage = do
putStrLn "usage: rxgen <number of regexps to generate> <complexity (number)>"
exitWith $ ExitFailure 1
main :: IO ()
main = do
args <- getArgs
case args of
[count, size] -> rxgen (read count) (read size) >>=
putStr . unlines . map (show . simplify)
_ -> usage
| jthornber/rxgen | Main.hs | gpl-3.0 | 3,030 | 0 | 15 | 927 | 1,352 | 678 | 674 | 82 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Cloudbuild.Projects.Locations.Triggers.Create
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a new \`BuildTrigger\`. This API is experimental.
--
-- /See:/ <https://cloud.google.com/cloud-build/docs/ Cloud Build API Reference> for @cloudbuild.projects.locations.triggers.create@.
module Network.Google.Resource.Cloudbuild.Projects.Locations.Triggers.Create
(
-- * REST Resource
ProjectsLocationsTriggersCreateResource
-- * Creating a Request
, projectsLocationsTriggersCreate
, ProjectsLocationsTriggersCreate
-- * Request Lenses
, pltcParent
, pltcXgafv
, pltcUploadProtocol
, pltcAccessToken
, pltcUploadType
, pltcPayload
, pltcProjectId
, pltcCallback
) where
import Network.Google.ContainerBuilder.Types
import Network.Google.Prelude
-- | A resource alias for @cloudbuild.projects.locations.triggers.create@ method which the
-- 'ProjectsLocationsTriggersCreate' request conforms to.
type ProjectsLocationsTriggersCreateResource =
"v1" :>
Capture "parent" Text :>
"triggers" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "projectId" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] BuildTrigger :>
Post '[JSON] BuildTrigger
-- | Creates a new \`BuildTrigger\`. This API is experimental.
--
-- /See:/ 'projectsLocationsTriggersCreate' smart constructor.
data ProjectsLocationsTriggersCreate =
ProjectsLocationsTriggersCreate'
{ _pltcParent :: !Text
, _pltcXgafv :: !(Maybe Xgafv)
, _pltcUploadProtocol :: !(Maybe Text)
, _pltcAccessToken :: !(Maybe Text)
, _pltcUploadType :: !(Maybe Text)
, _pltcPayload :: !BuildTrigger
, _pltcProjectId :: !(Maybe Text)
, _pltcCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsTriggersCreate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pltcParent'
--
-- * 'pltcXgafv'
--
-- * 'pltcUploadProtocol'
--
-- * 'pltcAccessToken'
--
-- * 'pltcUploadType'
--
-- * 'pltcPayload'
--
-- * 'pltcProjectId'
--
-- * 'pltcCallback'
projectsLocationsTriggersCreate
:: Text -- ^ 'pltcParent'
-> BuildTrigger -- ^ 'pltcPayload'
-> ProjectsLocationsTriggersCreate
projectsLocationsTriggersCreate pPltcParent_ pPltcPayload_ =
ProjectsLocationsTriggersCreate'
{ _pltcParent = pPltcParent_
, _pltcXgafv = Nothing
, _pltcUploadProtocol = Nothing
, _pltcAccessToken = Nothing
, _pltcUploadType = Nothing
, _pltcPayload = pPltcPayload_
, _pltcProjectId = Nothing
, _pltcCallback = Nothing
}
-- | The parent resource where this trigger will be created. Format:
-- \`projects\/{project}\/locations\/{location}\`
pltcParent :: Lens' ProjectsLocationsTriggersCreate Text
pltcParent
= lens _pltcParent (\ s a -> s{_pltcParent = a})
-- | V1 error format.
pltcXgafv :: Lens' ProjectsLocationsTriggersCreate (Maybe Xgafv)
pltcXgafv
= lens _pltcXgafv (\ s a -> s{_pltcXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pltcUploadProtocol :: Lens' ProjectsLocationsTriggersCreate (Maybe Text)
pltcUploadProtocol
= lens _pltcUploadProtocol
(\ s a -> s{_pltcUploadProtocol = a})
-- | OAuth access token.
pltcAccessToken :: Lens' ProjectsLocationsTriggersCreate (Maybe Text)
pltcAccessToken
= lens _pltcAccessToken
(\ s a -> s{_pltcAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pltcUploadType :: Lens' ProjectsLocationsTriggersCreate (Maybe Text)
pltcUploadType
= lens _pltcUploadType
(\ s a -> s{_pltcUploadType = a})
-- | Multipart request metadata.
pltcPayload :: Lens' ProjectsLocationsTriggersCreate BuildTrigger
pltcPayload
= lens _pltcPayload (\ s a -> s{_pltcPayload = a})
-- | Required. ID of the project for which to configure automatic builds.
pltcProjectId :: Lens' ProjectsLocationsTriggersCreate (Maybe Text)
pltcProjectId
= lens _pltcProjectId
(\ s a -> s{_pltcProjectId = a})
-- | JSONP
pltcCallback :: Lens' ProjectsLocationsTriggersCreate (Maybe Text)
pltcCallback
= lens _pltcCallback (\ s a -> s{_pltcCallback = a})
instance GoogleRequest
ProjectsLocationsTriggersCreate
where
type Rs ProjectsLocationsTriggersCreate =
BuildTrigger
type Scopes ProjectsLocationsTriggersCreate =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsLocationsTriggersCreate'{..}
= go _pltcParent _pltcXgafv _pltcUploadProtocol
_pltcAccessToken
_pltcUploadType
_pltcProjectId
_pltcCallback
(Just AltJSON)
_pltcPayload
containerBuilderService
where go
= buildClient
(Proxy ::
Proxy ProjectsLocationsTriggersCreateResource)
mempty
| brendanhay/gogol | gogol-containerbuilder/gen/Network/Google/Resource/Cloudbuild/Projects/Locations/Triggers/Create.hs | mpl-2.0 | 5,996 | 0 | 18 | 1,358 | 861 | 501 | 360 | 129 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CloudTrail.DescribeTrails
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Retrieves settings for the trail associated with the current region for your
-- account.
--
-- <http://docs.aws.amazon.com/awscloudtrail/latest/APIReference/API_DescribeTrails.html>
module Network.AWS.CloudTrail.DescribeTrails
(
-- * Request
DescribeTrails
-- ** Request constructor
, describeTrails
-- ** Request lenses
, dtTrailNameList
-- * Response
, DescribeTrailsResponse
-- ** Response constructor
, describeTrailsResponse
-- ** Response lenses
, dtrTrailList
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.CloudTrail.Types
import qualified GHC.Exts
newtype DescribeTrails = DescribeTrails
{ _dtTrailNameList :: List "trailNameList" Text
} deriving (Eq, Ord, Read, Show, Monoid, Semigroup)
instance GHC.Exts.IsList DescribeTrails where
type Item DescribeTrails = Text
fromList = DescribeTrails . GHC.Exts.fromList
toList = GHC.Exts.toList . _dtTrailNameList
-- | 'DescribeTrails' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dtTrailNameList' @::@ ['Text']
--
describeTrails :: DescribeTrails
describeTrails = DescribeTrails
{ _dtTrailNameList = mempty
}
-- | The trail returned.
dtTrailNameList :: Lens' DescribeTrails [Text]
dtTrailNameList = lens _dtTrailNameList (\s a -> s { _dtTrailNameList = a }) . _List
newtype DescribeTrailsResponse = DescribeTrailsResponse
{ _dtrTrailList :: List "trailList" Trail
} deriving (Eq, Read, Show, Monoid, Semigroup)
instance GHC.Exts.IsList DescribeTrailsResponse where
type Item DescribeTrailsResponse = Trail
fromList = DescribeTrailsResponse . GHC.Exts.fromList
toList = GHC.Exts.toList . _dtrTrailList
-- | 'DescribeTrailsResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dtrTrailList' @::@ ['Trail']
--
describeTrailsResponse :: DescribeTrailsResponse
describeTrailsResponse = DescribeTrailsResponse
{ _dtrTrailList = mempty
}
-- | The list of trails.
dtrTrailList :: Lens' DescribeTrailsResponse [Trail]
dtrTrailList = lens _dtrTrailList (\s a -> s { _dtrTrailList = a }) . _List
instance ToPath DescribeTrails where
toPath = const "/"
instance ToQuery DescribeTrails where
toQuery = const mempty
instance ToHeaders DescribeTrails
instance ToJSON DescribeTrails where
toJSON DescribeTrails{..} = object
[ "trailNameList" .= _dtTrailNameList
]
instance AWSRequest DescribeTrails where
type Sv DescribeTrails = CloudTrail
type Rs DescribeTrails = DescribeTrailsResponse
request = post "DescribeTrails"
response = jsonResponse
instance FromJSON DescribeTrailsResponse where
parseJSON = withObject "DescribeTrailsResponse" $ \o -> DescribeTrailsResponse
<$> o .:? "trailList" .!= mempty
| dysinger/amazonka | amazonka-cloudtrail/gen/Network/AWS/CloudTrail/DescribeTrails.hs | mpl-2.0 | 3,895 | 0 | 10 | 803 | 546 | 327 | 219 | 62 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.AdvertiserGroups.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates an existing advertiser group. This method supports patch
-- semantics.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ Campaign Manager 360 API Reference> for @dfareporting.advertiserGroups.patch@.
module Network.Google.Resource.DFAReporting.AdvertiserGroups.Patch
(
-- * REST Resource
AdvertiserGroupsPatchResource
-- * Creating a Request
, advertiserGroupsPatch
, AdvertiserGroupsPatch
-- * Request Lenses
, agpXgafv
, agpUploadProtocol
, agpAccessToken
, agpUploadType
, agpProFileId
, agpPayload
, agpId
, agpCallback
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.advertiserGroups.patch@ method which the
-- 'AdvertiserGroupsPatch' request conforms to.
type AdvertiserGroupsPatchResource =
"dfareporting" :>
"v3.5" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"advertiserGroups" :>
QueryParam "id" (Textual Int64) :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] AdvertiserGroup :>
Patch '[JSON] AdvertiserGroup
-- | Updates an existing advertiser group. This method supports patch
-- semantics.
--
-- /See:/ 'advertiserGroupsPatch' smart constructor.
data AdvertiserGroupsPatch =
AdvertiserGroupsPatch'
{ _agpXgafv :: !(Maybe Xgafv)
, _agpUploadProtocol :: !(Maybe Text)
, _agpAccessToken :: !(Maybe Text)
, _agpUploadType :: !(Maybe Text)
, _agpProFileId :: !(Textual Int64)
, _agpPayload :: !AdvertiserGroup
, _agpId :: !(Textual Int64)
, _agpCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AdvertiserGroupsPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'agpXgafv'
--
-- * 'agpUploadProtocol'
--
-- * 'agpAccessToken'
--
-- * 'agpUploadType'
--
-- * 'agpProFileId'
--
-- * 'agpPayload'
--
-- * 'agpId'
--
-- * 'agpCallback'
advertiserGroupsPatch
:: Int64 -- ^ 'agpProFileId'
-> AdvertiserGroup -- ^ 'agpPayload'
-> Int64 -- ^ 'agpId'
-> AdvertiserGroupsPatch
advertiserGroupsPatch pAgpProFileId_ pAgpPayload_ pAgpId_ =
AdvertiserGroupsPatch'
{ _agpXgafv = Nothing
, _agpUploadProtocol = Nothing
, _agpAccessToken = Nothing
, _agpUploadType = Nothing
, _agpProFileId = _Coerce # pAgpProFileId_
, _agpPayload = pAgpPayload_
, _agpId = _Coerce # pAgpId_
, _agpCallback = Nothing
}
-- | V1 error format.
agpXgafv :: Lens' AdvertiserGroupsPatch (Maybe Xgafv)
agpXgafv = lens _agpXgafv (\ s a -> s{_agpXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
agpUploadProtocol :: Lens' AdvertiserGroupsPatch (Maybe Text)
agpUploadProtocol
= lens _agpUploadProtocol
(\ s a -> s{_agpUploadProtocol = a})
-- | OAuth access token.
agpAccessToken :: Lens' AdvertiserGroupsPatch (Maybe Text)
agpAccessToken
= lens _agpAccessToken
(\ s a -> s{_agpAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
agpUploadType :: Lens' AdvertiserGroupsPatch (Maybe Text)
agpUploadType
= lens _agpUploadType
(\ s a -> s{_agpUploadType = a})
-- | User profile ID associated with this request.
agpProFileId :: Lens' AdvertiserGroupsPatch Int64
agpProFileId
= lens _agpProFileId (\ s a -> s{_agpProFileId = a})
. _Coerce
-- | Multipart request metadata.
agpPayload :: Lens' AdvertiserGroupsPatch AdvertiserGroup
agpPayload
= lens _agpPayload (\ s a -> s{_agpPayload = a})
-- | AdvertiserGroup ID.
agpId :: Lens' AdvertiserGroupsPatch Int64
agpId
= lens _agpId (\ s a -> s{_agpId = a}) . _Coerce
-- | JSONP
agpCallback :: Lens' AdvertiserGroupsPatch (Maybe Text)
agpCallback
= lens _agpCallback (\ s a -> s{_agpCallback = a})
instance GoogleRequest AdvertiserGroupsPatch where
type Rs AdvertiserGroupsPatch = AdvertiserGroup
type Scopes AdvertiserGroupsPatch =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient AdvertiserGroupsPatch'{..}
= go _agpProFileId (Just _agpId) _agpXgafv
_agpUploadProtocol
_agpAccessToken
_agpUploadType
_agpCallback
(Just AltJSON)
_agpPayload
dFAReportingService
where go
= buildClient
(Proxy :: Proxy AdvertiserGroupsPatchResource)
mempty
| brendanhay/gogol | gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/AdvertiserGroups/Patch.hs | mpl-2.0 | 5,702 | 0 | 20 | 1,376 | 909 | 525 | 384 | 128 | 1 |
import qualified Data.List ( )
| lspitzner/brittany | data/Test151.hs | agpl-3.0 | 62 | 0 | 4 | 36 | 10 | 6 | 4 | 1 | 0 |
module Model.AssetRevision.Types
( AssetRevision(..)
) where
import Model.Asset.Types
data AssetRevision = AssetRevision
{ revisionAsset :: !Asset
, revisionOrig :: !Asset
}
-- makeAssetRevision :: Asset -> Asset -> AssetRevision
-- makeAssetRevision o a = AssetRevision a o
| databrary/databrary | src/Model/AssetRevision/Types.hs | agpl-3.0 | 288 | 0 | 9 | 50 | 47 | 30 | 17 | 10 | 0 |
{-# LANGUAGE LambdaCase #-}
-- | Parsers for 'MidiMessage' and its components, implemented as Attoparsec
-- parsers. See "Data.Attoparsec.ByteString" for how to run them. In most common
-- use cases, the 'decodeMidi' function in "Sound.MIDI" should suffice.
module Sound.MIDI.Parser where
import Control.Applicative
import Sound.MIDI.Types
import Data.Bits
import Data.Word
import Data.Attoparsec.ByteString
import qualified Data.ByteString as B
import Prelude hiding (take)
midiMessage :: Parser MidiMessage
midiMessage = go =<< peekWord8'
where go x = case x .&. 0xF0 of
0xB0 -> ChannelMode <$> channelMode
<|> ChannelVoice <$> channelVoice
0xF0 -> system x
_ -> ChannelVoice <$> channelVoice
system x
| x == 0xF0 = SystemExclusive <$> systemExclusive
| x <= 0xF7 = SystemCommon <$> systemCommon
| otherwise = SystemRealTime <$> systemRealTime
skipToStatus :: Parser ()
skipToStatus = skipWhile (not . flip testBit 7)
channelVoice :: Parser ChannelVoice
channelVoice = go =<< peekWord8'
where go x = case x .&. 0xF0 of
0x80 -> noteOff
0x90 -> noteOn
0xA0 -> aftertouch
0xB0 -> controlChange
0xC0 -> patchChange
0xD0 -> channelPressure
0xE0 -> pitchBend
_ -> empty
channelMessage :: Word8 -> (Word8 -> Parser a) -> Parser a
channelMessage header p = do
status <- anyWord8
let upper = unsafeShiftR status 4
lower = status .&. 0x0F
if upper == header then p lower else empty
{-# INLINE channelMessage #-}
noteOff :: Parser ChannelVoice
noteOff = channelMessage 0x08 $ \c ->
NoteOff (Channel c) <$> pitch <*> velocity
noteOn :: Parser ChannelVoice
noteOn = channelMessage 0x09 $ \c ->
NoteOn (Channel c) <$> pitch <*> velocity
aftertouch :: Parser ChannelVoice
aftertouch = channelMessage 0x0A $ \c ->
Aftertouch (Channel c) <$> pitch <*> touch
controlChange :: Parser ChannelVoice
controlChange = channelMessage 0x0B $ \c ->
ControlChange (Channel c) <$> controller <*> anyWord8
patchChange :: Parser ChannelVoice
patchChange = channelMessage 0x0C $ \c ->
PatchChange (Channel c) <$> patch
channelPressure :: Parser ChannelVoice
channelPressure = channelMessage 0x0D $ \c ->
ChannelPressure (Channel c) <$> touch
pitchBend :: Parser ChannelVoice
pitchBend = channelMessage 0x0E $ \c ->
PitchBend (Channel c) <$> anyWord14
anyWord14 :: Parser Word16
anyWord14 = go <$> take 2
where go x = let l = x `B.index` 0
m = x `B.index` 1
in unsafeShiftL (fromIntegral m) 7 + fromIntegral l
channelMode :: Parser ChannelMode
channelMode = channelMessage 0x0B $ \c -> anyWord8 >>= \case
0x78 -> AllSoundOff (Channel c) <$ word8 0x00
0x79 -> ResetAllControllers (Channel c) <$ word8 0x00
0x7A -> LocalControl (Channel c) <$> bool'
0x7B -> AllNotesOff (Channel c) <$ word8 0x00
0x7C -> OmniOff (Channel c) <$ word8 0x00
0x7D -> OmniOn (Channel c) <$ word8 0x00
0x7E -> MonoOn (Channel c) <$> anyWord8
0x7F -> PolyOn (Channel c) <$ word8 0x00
_ -> empty
where bool' = anyWord8 >>= \case
0x00 -> pure False
0x7f -> pure True
_ -> empty
systemCommon :: Parser SystemCommon
systemCommon = peekWord8' >>= \case
0xF1 -> mtcQuarter
0xF2 -> songPosition
0xF3 -> songSelect
0xF6 -> tuneRequest
0xF7 -> eox
_ -> empty
mtcQuarter :: Parser SystemCommon
mtcQuarter = MTCQuarter <$> (word8 0xF1 *> anyWord8)
songPosition :: Parser SystemCommon
songPosition = SongPosition <$> (word8 0xF2 *> (PositionPointer <$> anyWord14))
songSelect :: Parser SystemCommon
songSelect = SongSelect <$> (word8 0xF3 *> anyWord8)
tuneRequest :: Parser SystemCommon
tuneRequest = TuneRequest <$ word8 0xF6
eox :: Parser SystemCommon
eox = EOX <$ word8 0xF7
systemRealTime :: Parser SystemRealTime
systemRealTime = anyWord8 >>= \case
0xF8 -> pure TimingClock
0xFA -> pure Start
0xFB -> pure Continue
0xFC -> pure Stop
0xFE -> pure ActiveSensing
0xFF -> pure SystemReset
_ -> empty
systemExclusive :: Parser SystemExclusive
systemExclusive = Exclusive
<$> (word8 0xF0 *> vendorId)
<*> takeTill (`testBit` 7)
vendorId :: Parser VendorId
vendorId = longId <|> shortId
where longId = VendorIdLong <$> (word8 0x00 *> anyWord8) <*> anyWord8
shortId = VendorIdShort <$> anyWord8
-- | Parse a 'Pitch', no check for bit 7 is performed!
pitch :: Parser Pitch
pitch = Pitch <$> anyWord8
-- | Parse a 'Pitch', no check for bit 7 is performed!
patch :: Parser Patch
patch = Patch <$> anyWord8
-- | Parse a 'Velocity', no check for bit 7 is performed!
velocity :: Parser Velocity
velocity = Velocity <$> anyWord8
-- | Parse a 'Touch', no check for bit 7 is performed!
touch :: Parser Touch
touch = Touch <$> anyWord8
-- | Parse a 'Controller', no check for bit 7 is performed!
controller :: Parser Controller
controller = Controller <$> anyWord8
| tsahyt/midi-simple | src/Sound/MIDI/Parser.hs | lgpl-3.0 | 5,229 | 0 | 14 | 1,378 | 1,426 | 724 | 702 | 126 | 11 |
import Control.Monad (replicateM)
import Data.List (genericIndex)
facs :: [Integer]
facs = 1 : map fac [1..]
where fac n = n * (genericIndex facs (n - 1))
factorial = genericIndex facs
teams :: Integer -> Integer -> Integer
teams _ 0 = 1
teams n k = div (factorial n) ((factorial k) * (factorial (n - k)))
runTest :: IO Integer
runTest = do
s <- getLine
let [n, k] = map read . take 2 $ words s
return $ mod (teams n k) 100000007
main = do
t <- readLn
rs <- replicateM t runTest
putStr . unlines $ map show rs
| itsbruce/hackerrank | func/memo/differentWays.hs | unlicense | 541 | 0 | 12 | 135 | 266 | 133 | 133 | 18 | 1 |
{-# LANGUAGE Haskell2010 #-}
module Deprecated where
-- | Docs for something deprecated
deprecated :: Int
deprecated = 1
{-# DEPRECATED deprecated "Don't use this" #-}
| haskell/haddock | latex-test/src/Deprecated/Deprecated.hs | bsd-2-clause | 170 | 0 | 4 | 28 | 17 | 12 | 5 | 5 | 1 |
-- http://www.codewars.com/kata/54d6abf84a35017d30000b26
module Data.Complex.Gaussian.Prime where
import Data.Complex.Gaussian (Gaussian (..), norm)
isGaussianPrime :: Gaussian -> Bool
isGaussianPrime z@(Gaussian x y)
= n==2
|| n`mod`4==1 && isPrime n
|| y==0 && abs x `mod` 4==3 && isPrime (abs x)
|| x==0 && abs y `mod` 4==3 && isPrime (abs y)
where
n = norm z
isPrime m = m > 1 && foldr (\p r -> p*p > m || ((m `rem` p) /= 0 && r)) True primes
primes = 2 : filter isPrime [3,5..]
| Bodigrim/katas | src/haskell/B-Gaussian-primes.hs | bsd-2-clause | 516 | 0 | 21 | 117 | 251 | 136 | 115 | 11 | 1 |
{-# LANGUAGE Haskell2010 #-}
module Operators where
(+++) :: [a] -> [a] -> [a]
a +++ b = a ++ b ++ a
($$$) :: [a] -> [a] -> [a]
a $$$ b = b +++ a
(***) :: [a] -> [a] -> [a]
(***) a [] = a
(***) a (_:b) = a +++ (a *** b)
(*/\*) :: [[a]] -> [a] -> [a]
a */\* b = concatMap (*** b) a
(**/\**) :: [[a]] -> [[a]] -> [[a]]
a **/\** b = zipWith (*/\*) [a +++ b] (a $$$ b)
(#.#) :: a -> b -> (c -> (a, b))
a #.# b = const $ (a, b)
| haskell/haddock | hypsrc-test/src/Operators.hs | bsd-2-clause | 431 | 0 | 9 | 118 | 318 | 184 | 134 | 15 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE ExistentialQuantification #-}
module Ivory.Language.Proc where
import Ivory.Language.Monad
import Ivory.Language.Proxy
import Ivory.Language.Type
import Ivory.Language.Effects
import qualified Ivory.Language.Effects as E
import qualified Ivory.Language.Syntax as AST
-- Function Type ---------------------------------------------------------------
-- | The kind of procedures.
data Proc k = [k] :-> k
-- | Typeclass for procedure types, parametrized over the C procedure's
-- signature, to produce a value representing their signature.
class ProcType (sig :: Proc *) where
-- | Turn a type-level description of the signature into a (return
-- type, [argument types]) value.
procType :: Proxy sig -> (AST.Type,[AST.Type])
-- Base case: C procedure taking no arguments and returning an
-- 'IvoryType'.
instance IvoryType r => ProcType ('[] :-> r) where
procType _ = (ivoryType (Proxy :: Proxy r),[])
-- Inductive case: Anything in 'ProcType' is still in 'ProcType' if it
-- has another 'IvoryType' argument prepended to its signature.
instance (IvoryType a, ProcType (args :-> r))
=> ProcType ((a ': args) :-> r) where
procType _ = (r, ivoryType (Proxy :: Proxy a) : args)
where
(r,args) = procType (Proxy :: Proxy (args :-> r))
-- Function Pointers -----------------------------------------------------------
-- | Procedure pointers
newtype ProcPtr (sig :: Proc *) = ProcPtr { getProcPtr :: AST.Name }
instance ProcType proc => IvoryType (ProcPtr proc) where
ivoryType _ = AST.TyProc r args
where
(r,args) = procType (Proxy :: Proxy proc)
instance ProcType proc => IvoryVar (ProcPtr proc) where
wrapVar = ProcPtr . AST.NameVar
unwrapExpr ptr = case getProcPtr ptr of
AST.NameSym sym -> AST.ExpSym sym
AST.NameVar var -> AST.ExpVar var
procPtr :: ProcType sig => Def sig -> ProcPtr sig
procPtr = ProcPtr . defSymbol
-- Function Symbols ------------------------------------------------------------
-- | Procedure definitions.
data Def (proc :: Proc *)
= DefProc AST.Proc
| DefImport AST.Import
deriving (Show, Eq, Ord)
defSymbol :: Def proc -> AST.Name
defSymbol def = case def of
DefProc p -> AST.NameSym (AST.procSym p)
DefImport i -> AST.NameSym (AST.importSym i)
instance ProcType proc => IvoryType (Def proc) where
ivoryType _ = AST.TyProc r args
where
(r,args) = procType (Proxy :: Proxy proc)
-- Procedure Definition --------------------------------------------------------
-- | Procedure definition.
proc :: forall proc impl. IvoryProcDef proc impl => AST.Sym -> impl -> Def proc
proc name impl = defproc
where
(r,args) = procType (Proxy :: Proxy proc)
(vars,def) = procDef initialClosure Proxy impl
defproc = case def of
Defined block -> DefProc $
AST.Proc { AST.procSym = name
, AST.procRetTy = r
, AST.procArgs = zipWith AST.Typed args vars
, AST.procBody = blockStmts block
, AST.procRequires = blockRequires block
, AST.procEnsures = blockEnsures block
}
Imported header reqs ens -> DefImport $
AST.Import { AST.importSym = name
, AST.importFile = header
, AST.importRetTy = r
, AST.importArgs = zipWith AST.Typed args vars
, AST.importRequires = reqs
, AST.importEnsures = ens
}
-- | Type inference can usually determine the argument types of an Ivory
-- procedure, but for void procedures there's often nothing to constrain
-- the return type. This function is a type-constrained version of
-- 'proc' that just forces the return type to be '()'.
voidProc :: IvoryProcDef (args :-> ()) impl =>
AST.Sym -> impl -> Def (args :-> ())
voidProc = proc
newtype Body r = Body
{ runBody :: forall s . Ivory (E.ProcEffects s r) ()
}
class WrapIvory m where
type Return m
wrap :: (forall s . Ivory (E.ProcEffects s r) (Return m)) -> m r
unwrap :: m r -> (forall s . Ivory (E.ProcEffects s r) (Return m))
instance WrapIvory Body where
type Return Body = ()
wrap = Body
unwrap = runBody
body :: IvoryType r
=> (forall s . Ivory (E.ProcEffects s r) ())
-> Body r
body m = Body m
data Definition = Defined CodeBlock
| Imported FilePath [AST.Require] [AST.Ensure]
-- | Typeclass for an Ivory procedure definition to produce ;
-- the type is parametrized over:
--
-- * The procedure type 'proc', encoding the C procedure's signature
-- via the 'Proc' kind,
-- * The implementation type 'impl' - either 'Body' for the return
-- value, or else a Haskell function type whose arguments correspond
-- to the C arguments and whose return type is @Body r@ on the return
-- type @r@.
class ProcType proc => IvoryProcDef (proc :: Proc *) impl | impl -> proc where
procDef :: Closure -> Proxy proc -> impl -> ([AST.Var], Definition)
-- Base case: No arguments in C signature
instance IvoryType ret => IvoryProcDef ('[] :-> ret) (Body ret) where
procDef env _ b = (getEnv env, Defined (snd (primRunIvory (runBody b))))
-- Inductive case: Remove first argument from C signature, and
-- parametrize 'impl' over another argument of the same type.
instance (IvoryVar a, IvoryProcDef (args :-> ret) k)
=> IvoryProcDef ((a ': args) :-> ret) (a -> k) where
procDef env _ k = procDef env' (Proxy :: Proxy (args :-> ret)) (k arg)
where
(var,env') = genVar env
arg = wrapVar var
-- | A variable name supply, and the typed values that have been generated.
data Closure = Closure
{ closSupply :: [AST.Var]
, closEnv :: [AST.Var]
}
-- | Initial closure, with no environment and a large supply of names.
initialClosure :: Closure
initialClosure = Closure
{ closSupply = [ AST.VarName ("var" ++ show (n :: Int)) | n <- [0 ..] ]
, closEnv = []
}
-- | Given a type and a closure, generate a typed variable, and a new closure
-- with that typed variable in it's environment.
genVar :: Closure -> (AST.Var, Closure)
genVar clos = (var, clos')
where
var = head (closSupply clos)
clos' = Closure
{ closSupply = tail (closSupply clos)
, closEnv = var : closEnv clos
}
-- | Retrieve the environment from a closure.
getEnv :: Closure -> [AST.Var]
getEnv = reverse . closEnv
-- Imported Functions ----------------------------------------------------------
-- | Import a function from a C header.
importProc :: forall proc. ProcType proc => AST.Sym -> String -> Def proc
importProc sym file = DefImport AST.Import
{ AST.importSym = sym
, AST.importFile = file
, AST.importRetTy = retTy
, AST.importArgs = args
, AST.importRequires = []
, AST.importEnsures = []
}
where
(retTy, argTys) = procType (Proxy :: Proxy proc)
args = zipWith AST.Typed argTys (closSupply initialClosure)
newtype ImportFrom r = ImportFrom
{ runImportFrom :: forall s . Ivory (E.ProcEffects s r) FilePath
}
instance WrapIvory ImportFrom where
type Return ImportFrom = FilePath
wrap = ImportFrom
unwrap = runImportFrom
importFrom :: String -> ImportFrom a
importFrom h = ImportFrom (return h)
instance IvoryType ret => IvoryProcDef ('[] :-> ret) (ImportFrom ret) where
procDef env _ b = (getEnv env, Imported header reqs ens)
where
(header, block) = primRunIvory (runImportFrom b)
reqs = blockRequires block
ens = blockEnsures block
-- Call ------------------------------------------------------------------------
-- | Direct calls.
call :: forall proc eff impl. IvoryCall proc eff impl => Def proc -> impl
call def = callAux (defSymbol def) (Proxy :: Proxy proc) []
-- | Indirect calls.
indirect :: forall proc eff impl. IvoryCall proc eff impl
=> ProcPtr proc -> impl
indirect ptr = callAux (getProcPtr ptr) (Proxy :: Proxy proc) []
-- | Typeclass for something callable in Ivory (and returning a
-- result). Parameter 'proc' is the procedure type (encoding the
-- arguments and return of the C procedure via the 'Proc' kind, as in
-- 'IvoryProcDef'), parameter 'eff' is the effect context (which
-- remains unchanged through the calls here), and parameter 'impl', as
-- in 'IvoryProcDef', is the implementation type.
class IvoryCall (proc :: Proc *) (eff :: E.Effects) impl
| proc eff -> impl, impl -> eff where
-- | Recursive helper call. 'proc' encodes a C procedure type, and
-- this call has two main parts:
--
-- * If 'proc' contains arguments, then 'impl' must be a function
-- type causing this whole call to expect an Ivory value that was
-- passed in to apply to the C procedure. In this case, 'proc' is
-- reduced by removing the first C argument from the type itself,
-- and the argument to 'impl' is accumulated onto the list of
-- typed expressions.
-- * If 'proc' contains no arguments, then this returns the Ivory
-- effect which calls the function with all the arguments in the
-- list applied to it, and captures and returns the result.
callAux :: AST.Name -> Proxy proc -> [AST.Typed AST.Expr] -> impl
instance IvoryVar r => IvoryCall ('[] :-> r) eff (Ivory eff r) where
-- Base case ('proc' takes no arguments, 'impl' is just an Ivory
-- effect):
callAux sym _ args = do
r <- freshVar "r"
emit (AST.Call (ivoryType (Proxy :: Proxy r)) (Just r) sym (reverse args))
return (wrapVar r)
instance (IvoryVar a, IvoryVar r, IvoryCall (args :-> r) eff impl)
=> IvoryCall ((a ': args) :-> r) eff (a -> impl) where
-- Inductive case: note that 'proc' reduces from ((a ': args) :-> r)
-- down to (args :-> r) in the proxy, and that 'impl' is (a -> impl)
-- and we put that 'a' onto the list of arguments.
callAux sym _ args a = callAux sym rest args'
where
rest = Proxy :: Proxy (args :-> r)
args' = typedExpr a : args
-- Call_ -----------------------------------------------------------------------
-- | Direct calls, ignoring the result.
call_ :: forall proc eff impl. IvoryCall_ proc eff impl => Def proc -> impl
call_ def = callAux_ (defSymbol def) (Proxy :: Proxy proc) []
-- | Indirect calls, ignoring the result.
indirect_ :: forall proc eff impl. IvoryCall_ proc eff impl
=> ProcPtr proc -> impl
indirect_ ptr = callAux_ (getProcPtr ptr) (Proxy :: Proxy proc) []
-- | Typeclass for something callable in Ivory without a return value
-- needed. This is otherwise identical to 'IvoryCall'.
class IvoryCall_ (proc :: Proc *) (eff :: E.Effects) impl
| proc eff -> impl, impl -> eff
where
callAux_ :: AST.Name -> Proxy proc -> [AST.Typed AST.Expr] -> impl
instance IvoryType r => IvoryCall_ ('[] :-> r) eff (Ivory eff ()) where
callAux_ sym _ args = do
emit (AST.Call (ivoryType (Proxy :: Proxy r)) Nothing sym (reverse args))
instance (IvoryVar a, IvoryType r, IvoryCall_ (args :-> r) eff impl)
=> IvoryCall_ ((a ': args) :-> r) eff (a -> impl) where
callAux_ sym _ args a = callAux_ sym rest args'
where
rest = Proxy :: Proxy (args :-> r)
args' = typedExpr a : args
-- Return ----------------------------------------------------------------------
-- | Primitive return from function.
ret :: (GetReturn eff ~ Returns r, IvoryVar r) => r -> Ivory eff ()
ret r = emit (AST.Return (typedExpr r))
-- | Primitive void return from function.
retVoid :: (GetReturn eff ~ Returns ()) => Ivory eff ()
retVoid = emit AST.ReturnVoid
| Hodapp87/ivory | ivory/src/Ivory/Language/Proc.hs | bsd-3-clause | 11,895 | 0 | 14 | 2,669 | 3,028 | 1,640 | 1,388 | 173 | 2 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE LambdaCase #-}
module Development.Cake3.Ext.UrWeb where
import Data.Data
import Data.Char
import Data.Typeable
import Data.Generics
import Data.Maybe
import Data.Monoid
import Data.List ()
import qualified Data.List as L
import Data.Set (Set)
import qualified Data.Set as S
import Data.Foldable (Foldable(..), foldl')
import qualified Data.Foldable as F
import Data.ByteString.Char8 (ByteString(..))
import qualified Data.ByteString.Char8 as BS
import qualified Data.Text as T
import Data.String
import Control.Monad.Trans
import Control.Monad.State
import Control.Monad.Writer
import Control.Monad.Error
import Language.JavaScript.Parser as JS
import Network.Mime (defaultMimeLookup)
import Text.Printf
import Text.Parsec as P hiding (string)
import Text.Parsec.Token as P hiding(lexeme, symbol)
import qualified Text.Parsec.Token as P
import Text.Parsec.ByteString as P
import qualified System.FilePath as F
import System.Directory
import System.IO as IO
import System.FilePath.Wrapper
import Development.Cake3.Monad
import Development.Cake3 hiding (many, (<|>))
data UrpAllow = UrpMime | UrpUrl | UrpResponseHeader | UrpEnvVar | UrpHeader
deriving(Show,Data,Typeable)
data UrpRewrite = UrpStyle | UrpAll | UrpTable
deriving(Show,Data,Typeable)
data UrpHdrToken = UrpDatabase String
| UrpSql File
| UrpAllow UrpAllow String
| UrpRewrite UrpRewrite String
| UrpLibrary File
| UrpDebug
| UrpInclude File
| UrpLink (Either File String)
| UrpSrc File String String
| UrpPkgConfig String
| UrpFFI File
| UrpJSFunc String String String -- ^ Module name, UrWeb name, JavaScript name
| UrpSafeGet String
| UrpScript String
| UrpClientOnly String
deriving(Show,Data,Typeable)
data UrpModToken
= UrpModule1 File
| UrpModule2 File File
| UrpModuleSys String
deriving(Show,Data,Typeable)
data Urp = Urp {
urp :: File
, uexe :: Maybe File
, uhdr :: [UrpHdrToken]
, umod :: [UrpModToken]
} deriving(Show,Data,Typeable)
newtype UWLib = UWLib Urp
deriving (Show,Data,Typeable)
newtype UWExe = UWExe Urp
deriving (Show,Data,Typeable)
instance (MonadAction a m) => RefInput a m UWLib where
refInput (UWLib u) = refInput (urp u)
instance (MonadAction a m) => RefInput a m UWExe where
refInput (UWExe u) = refInput (urpExe u)
class UrpLike x where
toUrp :: x -> Urp
tempfiles :: x -> [File]
tempfiles = (\x -> (urpObjs x) ++ maybeToList (urpSql' x) ++ maybeToList (urpExe' x)) . toUrp
instance UrpLike Urp where
toUrp = id
instance UrpLike UWLib where
toUrp (UWLib x) = x
instance UrpLike UWExe where
toUrp (UWExe x) = x
urpDeps :: Urp -> [File]
urpDeps (Urp _ _ hdr mod) = foldl' scan2 (foldl' scan1 mempty hdr) mod where
scan1 a (UrpLink (Left f)) = f:a
scan1 a (UrpSrc f _ _) = (f.="o"):a
scan1 a (UrpInclude f) = f:a
scan1 a _ = a
scan2 a (UrpModule1 f) = f:a
scan2 a (UrpModule2 f1 f2) = f1:f2:a
scan2 a _ = a
urpSql' :: Urp -> Maybe File
urpSql' (Urp _ _ hdr _) = find hdr where
find [] = Nothing
find ((UrpSql f):hs) = Just f
find (h:hs) = find hs
urpSql :: Urp -> File
urpSql u = case urpSql' u of
Nothing -> error "ur project defines no SQL file"
Just sql -> sql
urpSrcs (Urp _ _ hdr _) = foldl' scan [] hdr where
scan a (UrpSrc f cfl lfl) = (f,cfl):a
scan a _ = a
urpObjs (Urp _ _ hdr _) = foldl' scan [] hdr where
scan a (UrpSrc f _ lfl) = (f.="o"):a
scan a (UrpLink (Left f)) = (f):a
scan a _ = a
urpLibs (Urp _ _ hdr _) = foldl' scan [] hdr where
scan a (UrpLibrary f) = f:a
scan a _ = a
urpExe' = uexe
urpExe u = case uexe u of
Nothing -> error "ur project defines no EXE file"
Just exe -> exe
urpPkgCfg (Urp _ _ hdr _) = foldl' scan [] hdr where
scan a (UrpPkgConfig s) = s:a
scan a _ = a
data UrpState = UrpState {
urpst :: Urp
, urautogen :: File
} deriving (Show)
defState urp = UrpState (Urp urp Nothing [] []) (fromFilePath "autogen")
class ToUrpWord a where
toUrpWord :: a -> String
instance ToUrpWord UrpAllow where
toUrpWord (UrpMime) = "mime"
toUrpWord (UrpHeader) = "requestHeader"
toUrpWord (UrpUrl) = "url"
toUrpWord (UrpEnvVar) = "env"
toUrpWord (UrpResponseHeader) = "responseHeader"
instance ToUrpWord UrpRewrite where
toUrpWord (UrpStyle) = "style"
toUrpWord (UrpAll) = "all"
toUrpWord (UrpTable) = "table"
class ToUrpLine a where
toUrpLine :: FilePath -> a -> String
maskPkgCfg s = "%" ++ (map toUpper s) ++ "%"
instance ToUrpLine UrpHdrToken where
toUrpLine up (UrpDatabase dbs) = printf "database %s" dbs
toUrpLine up (UrpSql f) = printf "sql %s" (up </> toFilePath f)
toUrpLine up (UrpAllow a s) = printf "allow %s %s" (toUrpWord a) s
toUrpLine up (UrpRewrite a s) = printf "rewrite %s %s" (toUrpWord a) s
toUrpLine up (UrpLibrary f)
| (takeFileName f) == "lib.urp" = printf "library %s" (up </> toFilePath (takeDirectory f))
| otherwise = printf "library %s" (up </> toFilePath (dropExtension f))
toUrpLine up (UrpDebug) = printf "debug"
toUrpLine up (UrpInclude f) = printf "include %s" (up </> toFilePath f)
toUrpLine up (UrpLink (Left f)) = printf "link %s" (up </> toFilePath f)
toUrpLine up (UrpLink (Right lfl)) = printf "link %s" lfl
toUrpLine up (UrpSrc f _ _) = printf "link %s" (up </> toFilePath (f.="o"))
toUrpLine up (UrpPkgConfig s) = printf "link %s" (maskPkgCfg s)
toUrpLine up (UrpFFI s) = printf "ffi %s" (up </> toFilePath (dropExtensions s))
toUrpLine up (UrpSafeGet s) = printf "safeGet %s" (dropExtensions s)
toUrpLine up (UrpJSFunc s1 s2 s3) = printf "jsFunc %s.%s = %s" s1 s2 s3
toUrpLine up (UrpScript s) = printf "script %s" s
toUrpLine up (UrpClientOnly s) = printf "clientOnly %s" s
toUrpLine up e = error $ "toUrpLine: unhandled case " ++ (show e)
instance ToUrpLine UrpModToken where
toUrpLine up (UrpModule1 f) = up </> toFilePath (dropExtensions f)
toUrpLine up (UrpModule2 f _) = up </> toFilePath (dropExtensions f)
toUrpLine up (UrpModuleSys s) = printf "$/%s" s
newtype UrpGen m a = UrpGen { unUrpGen :: StateT UrpState m a }
deriving(Functor, Applicative, Monad, MonadState UrpState, MonadMake, MonadIO)
toFile f' wr = liftIO $ do
let f = toFilePath f'
createDirectoryIfMissing True (takeDirectory f)
writeFile f $ execWriter $ wr
tempPrefix :: File -> String
tempPrefix f = concat $ map (map nodot) $ splitDirectories f where
nodot '.' = '_'
nodot '/' = '_'
nodot a = a
mkFileRule pfx wr = genFile (tmp_file pfx) $ execWriter $ wr
line :: (MonadWriter String m) => String -> m ()
line s = tell (s++"\n")
uwlib :: File -> UrpGen (Make' IO) () -> Make UWLib
uwlib urpfile m = do
((),s) <- runStateT (unUrpGen m) (defState urpfile)
let u@(Urp _ _ hdr mod) = urpst s
let pkgcfg = (urpPkgCfg u)
forM_ (urpSrcs u) $ \(c,fl) -> do
let flags = concat $ fl : map (\p -> printf "$(shell pkg-config --cflags %s) " p) (urpPkgCfg u)
let i = makevar "URINCL" "-I$(shell urweb -print-cinclude) "
let cc = makevar "URCC" "$(shell $(shell urweb -print-ccompiler) -print-prog-name=gcc)"
let cpp = makevar "URCPP" "$(shell $(shell urweb -print-ccompiler) -print-prog-name=g++)"
let incfl = extvar "UR_CFLAGS"
rule' $ do
case takeExtension c of
".cpp" -> shell [cmd| $cpp -c $incfl $i $(string flags) -o @(c .= "o") $(c) |]
".c" -> shell [cmd| $cc -c $i $incfl $(string flags) -o @(c .= "o") $(c) |]
e -> error ("Unknown C-source extension " ++ e)
inp_in <- mkFileRule (tempPrefix (urpfile .= "in")) $ do
forM hdr (line . toUrpLine (urpUp urpfile))
line ""
forM mod (line . toUrpLine (urpUp urpfile))
rule' $ do
let cpy = [cmd|cat $inp_in|] :: CommandGen' (Make' IO)
let l = foldl' (\a p -> do
let l = makevar (map toUpper $ printf "lib%s" p) (printf "$(shell pkg-config --libs %s)" p)
[cmd| $a | sed 's@@$(string $ maskPkgCfg p)@@$l@@' |]
) cpy pkgcfg
shell [cmd| $l > @urpfile |]
depend (urpDeps u)
depend (urpLibs u)
return $ UWLib u
uwapp :: String -> File -> UrpGen (Make' IO) () -> Make UWExe
uwapp opts urpfile m = do
(UWLib u') <- uwlib urpfile m
let u = u' { uexe = Just (urpfile .= "exe") }
rule $ do
depend urpfile
produce (urpExe u)
case urpSql' u of
Nothing -> return ()
Just sql -> produce sql
depend (makevar "URVERSION" "$(shell urweb -version)")
unsafeShell [cmd|urweb $(string opts) $((takeDirectory urpfile)</>(takeBaseName urpfile))|]
return $ UWExe u
setAutogenDir d = modify $ \s -> s { urautogen = d }
addHdr h = modify $ \s -> let u = urpst s in s { urpst = u { uhdr = (uhdr u) ++ [h] } }
addMod m = modify $ \s -> let u = urpst s in s { urpst = u { umod = (umod u) ++ [m] } }
database :: (MonadMake m) => String -> UrpGen m ()
database dbs = addHdr $ UrpDatabase dbs
allow :: (MonadMake m) => UrpAllow -> String -> UrpGen m ()
allow a s = addHdr $ UrpAllow a s
rewrite :: (MonadMake m) => UrpRewrite -> String -> UrpGen m ()
rewrite a s = addHdr $ UrpRewrite a s
urpUp :: File -> FilePath
urpUp f = F.joinPath $ map (const "..") $ filter (/= ".") $ F.splitDirectories $ F.takeDirectory $ toFilePath f
class LibraryLike x where
library :: (MonadMake m) => x -> UrpGen m ()
instance LibraryLike [File] where
library ls = do
forM_ ls $ \l -> do
when ((takeExtension l) /= ".urp") $ do
fail $ printf "library declaration '%s' should ends with '.urp'" (toFilePath l)
addHdr $ UrpLibrary l
instance LibraryLike UWLib where
library (UWLib u) = library [urp u]
instance LibraryLike x => LibraryLike (Make x) where
library ml = liftMake ml >>= library
-- | Build a file using external Makefile facility.
externalMake3 ::
File -- ^ External Makefile
-> File -- ^ External file to refer to
-> String -- ^ The name of the target to run
-> Make [File]
externalMake3 mk f tgt = do
prebuildS [cmd|$(make) -C $(string $ toFilePath $ takeDirectory mk) -f $(string $ takeFileName mk) $(string tgt) |]
return [f]
-- | Build a file using external Makefile facility.
externalMake' ::
File -- ^ External Makefile
-> File -- ^ External file to refer to
-> Make [File]
externalMake' mk f = do
prebuildS [cmd|$(make) -C $(string $ toFilePath $ takeDirectory mk) -f $(string $ takeFileName mk)|]
return [f]
-- | Build a file from external project. It is expected, that this project has a
-- 'Makwfile' in it's root directory. Call Makefile with the default target
externalMake ::
File -- ^ File from the external project to build
-> Make [File]
externalMake f = externalMake3 (takeDirectory f </> "Makefile") f ""
-- | Build a file from external project. It is expected, that this project has a
-- 'Makwfile' in it's root directory
externalMakeTarget ::
File -- ^ File from the external project to build
-> String
-> Make [File]
externalMakeTarget f tgt = externalMake3 (takeDirectory f </> "Makefile") f tgt
-- | Build a file from external project. It is expected, that this project has a
-- fiel.mk (a Makefile with an unusual name) in it's root directory
externalMake2 :: File -> Make [File]
externalMake2 f = externalMake' ((takeDirectory f </> takeFileName f) .= "mk") f
ur, module_ :: (MonadMake m) => UrpModToken -> UrpGen m ()
module_ = addMod
ur = addMod
pair f = UrpModule2 (f.="ur") (f.="urs")
single f = UrpModule1 f
sys s = UrpModuleSys s
debug :: (MonadMake m) => UrpGen m ()
debug = addHdr $ UrpDebug
include :: (MonadMake m) => File -> UrpGen m ()
include f = addHdr $ UrpInclude f
link' :: (MonadMake m) => File -> String -> UrpGen m ()
link' f fl = do
addHdr $ UrpLink (Left f)
when (fl /= "") $ do
addHdr $ UrpLink (Right fl)
link :: (MonadMake m) => File -> UrpGen m ()
link f = link' f []
csrc' :: (MonadMake m) => File -> String -> String -> UrpGen m ()
csrc' f cfl lfl = do
addHdr $ UrpSrc f cfl lfl
when (lfl /= "") $ do
addHdr $ UrpLink (Right lfl)
csrc :: (MonadMake m) => File -> UrpGen m ()
csrc f = csrc' f [] []
ffi :: (MonadMake m) => File -> UrpGen m ()
ffi s = addHdr $ UrpFFI s
sql :: (MonadMake m) => File -> UrpGen m ()
sql f = addHdr $ UrpSql f
jsFunc m u j = addHdr $ UrpJSFunc m u j
safeGet' :: (MonadMake m) => String -> UrpGen m ()
safeGet' uri
| otherwise = addHdr $ UrpSafeGet uri
safeGet :: (MonadMake m) => File -> String -> UrpGen m ()
safeGet m fn
| (takeExtension m) /= ".ur" = fail (printf "safeGet: not an Ur/Web module name specified (%s)" (toFilePath m))
| otherwise = safeGet' (printf "%s/%s" (takeBaseName m) fn)
url = UrpUrl
mime = UrpMime
style = UrpStyle
all = UrpAll
table = UrpTable
env = UrpEnvVar
hdr = UrpHeader
requestHeader = UrpHeader
responseHeader = UrpResponseHeader
script :: (MonadMake m) => String -> UrpGen m ()
script s = addHdr $ UrpScript s
guessMime inf = fixup $ BS.unpack (defaultMimeLookup (fromString inf)) where
fixup "application/javascript" = "text/javascript"
fixup m = m
pkgconfig :: (MonadMake m) => String -> UrpGen m ()
pkgconfig l = addHdr $ UrpPkgConfig l
type BinOptions = [ BinOption ]
data BinOption = NoScan | UseUrembed deriving(Show, Eq)
bin :: (MonadIO m, MonadMake m) => File -> BinOptions -> UrpGen m ()
bin src bo = do
let ds = if NoScan `elem` bo then "--dont-scan" else ""
case UseUrembed `elem` bo of
False -> do
c <- readFileForMake src
bin' (toFilePath src) c bo
True -> do
a <- urautogen `liftM` get
library $ do
rule $ shell [cmd|urembed -o @(a </> (takeFileName src .="urp")) $(string ds) $src|]
bin' :: (MonadIO m, MonadMake m) => FilePath -> BS.ByteString -> BinOptions -> UrpGen m ()
bin' src_name src_contents' bo = do
dir <- urautogen `liftM` get
let mm = guessMime src_name
let mn = (mkname src_name)
let wrapmod ext = (dir </> mn) .= ext
let binmod ext = (dir </> (mn ++ "_c")) .= ext
let jsmod ext = (dir </> (mn ++ "_js")) .= ext
(src_contents, nurls) <-
if not (NoScan `elem` bo) then
if ((takeExtension src_name) == ".css") then do
(e,urls) <- return $ runWriter $ parse_css src_contents' $ \x -> do
let (url, query) = span (\c -> not $ elem c "?#") x
let mn = modname (const (fromFilePath $ mkname url))
tell [ mn ]
return $ "/" ++ mn ++ "/blobpage" ++ query
case e of
Left e -> do
fail $ printf "Error while parsing css %s: %s" src_name (show e)
Right b -> do
return (b, L.nub urls)
else
return (src_contents', [])
else
return (src_contents', [])
-- Binary module
let binfunc = printf "uw_%s_binary" (modname binmod)
let textfunc = printf "uw_%s_text" (modname binmod)
toFile (binmod ".c") $ do
line $ "/* Thanks, http://stupefydeveloper.blogspot.ru/2008/08/cc-embed-binary-data-into-elf.html */"
line $ "#include <urweb.h>"
line $ "#include <stdio.h>"
line $ printf "#define BLOBSZ %d" (BS.length src_contents)
line $ "static char blob[BLOBSZ];"
line $ "uw_Basis_blob " ++ binfunc ++ " (uw_context ctx, uw_unit unit)"
line $ "{"
line $ " uw_Basis_blob uwblob;"
line $ " uwblob.data = &blob[0];"
line $ " uwblob.size = BLOBSZ;"
line $ " return uwblob;"
line $ "}"
line $ ""
line $ "uw_Basis_string " ++ textfunc ++ " (uw_context ctx, uw_unit unit) {"
line $ " char* data = &blob[0];"
line $ " size_t size = sizeof(blob);"
line $ " char * c = uw_malloc(ctx, size+1);"
line $ " char * write = c;"
line $ " int i;"
line $ " for (i = 0; i < size; i++) {"
line $ " *write = data[i];"
line $ " if (*write == '\\0')"
line $ " *write = '\\n';"
line $ " *write++;"
line $ " }"
line $ " *write=0;"
line $ " return c;"
line $ " }"
line $ ""
let append f wr = liftIO $ BS.appendFile f $ execWriter $ wr
append (toFilePath (binmod ".c")) $ do
let line s = tell ((BS.pack s)`mappend`(BS.pack "\n"))
line $ ""
line $ "static char blob[BLOBSZ] = {"
let buf = reverse $ BS.foldl (\a c -> (BS.pack (printf "0x%02X ," c)) : a) [] src_contents
tell (BS.concat buf)
line $ "};"
line $ ""
toFile (binmod ".h") $ do
line $ "#include <urweb.h>"
line $ "uw_Basis_blob " ++ binfunc ++ " (uw_context ctx, uw_unit unit);"
line $ "uw_Basis_string " ++ textfunc ++ " (uw_context ctx, uw_unit unit);"
toFile (binmod ".urs") $ do
line $ "val binary : unit -> transaction blob"
line $ "val text : unit -> transaction string"
include (binmod ".h")
csrc (binmod ".c")
ffi (binmod ".urs")
-- JavaScript FFI Module
(jstypes,jsdecls) <-
if not (NoScan `elem` bo) then
if ((takeExtension src_name) == ".js") then do
e <- liftMake $ parse_js src_contents
case e of
Left e -> do
fail $ printf "Error while parsing javascript %s: %s" src_name e
Right decls -> do
return decls
else
return ([],[])
else
return ([],[])
forM_ jsdecls $ \decl -> do
addHdr $ UrpJSFunc (modname jsmod) (urname decl) (jsname decl)
addHdr $ UrpClientOnly $ (modname jsmod) ++ "." ++ (urname decl)
toFile (jsmod ".urs") $ do
forM_ jstypes $ \decl -> line (urtdecl decl)
forM_ jsdecls $ \decl -> line (urdecl decl)
ffi (jsmod ".urs")
-- Wrapper module
toFile (wrapmod ".urs") $ do
line $ "val binary : unit -> transaction blob"
line $ "val text : unit -> transaction string"
line $ "val blobpage : unit -> transaction page"
line $ "val geturl : url"
forM_ jstypes $ \decl -> line (urtdecl decl)
forM_ jsdecls $ \d -> line (urdecl d)
line $ "val propagated_urls : list url"
toFile (wrapmod ".ur") $ do
line $ "val binary = " ++ modname binmod ++ ".binary"
line $ "val text = " ++ modname binmod ++ ".text"
forM_ jsdecls $ \d ->
line $ printf "val %s = %s.%s" (urname d) (modname jsmod) (urname d)
line $ printf "fun blobpage {} = b <- binary () ; returnBlob b (blessMime \"%s\")" mm
line $ "val geturl = url(blobpage {})"
line $ "val propagated_urls = "
forM_ nurls $ \u -> do
line $ " " ++ u ++ ".geturl ::"
line $ " []"
allow mime mm
safeGet (wrapmod ".ur") "blobpage"
safeGet (wrapmod ".ur") "blob"
module_ (pair $ wrapmod ".ur")
where
mkname :: FilePath -> String
mkname = upper1 . notnum . map under . takeFileName where
under c | c`elem`"_-. /" = '_'
| otherwise = c
upper1 [] = []
upper1 (x:xs) = (toUpper x) : xs
notnum n@(x:xs) | isDigit x = "f" ++ n
| otherwise = n
modname :: (String -> File) -> String
modname f = upper1 . takeBaseName $ f ".urs" where
upper1 [] = []
upper1 (x:xs) = (toUpper x) : xs
{-
- Content parsing helpers
-}
data JSFunc = JSFunc {
urdecl :: String -- ^ URS declaration for this function
, urname :: String -- ^ UrWeb name of this function
, jsname :: String -- ^ JavaScript name of this function
} deriving(Show)
data JSType = JSType {
urtdecl :: String
} deriving(Show)
-- | Parse the JavaScript file, extract top-level functions, convert their
-- signatures into Ur/Web format, return them as the list of strings
parse_js :: BS.ByteString -> Make (Either String ([JSType],[JSFunc]))
parse_js contents = do
runErrorT $ do
c <- either fail return (JS.parse (BS.unpack contents) "<urembed_input>")
f <- concat <$> (forM (findTopLevelFunctions c) $ \f@(fn:_) -> (do
ts <- mapM extractEmbeddedType (f`zip`(False:repeat True))
let urdecl_ = urs_line ts
let urname_ = (fst (head ts))
let jsname_ = fn
return [JSFunc urdecl_ urname_ jsname_]
) `catchError` (\(e::String) -> do
err $ printf "ignoring function %s, reason:\n\t%s" fn e
return []))
t <- concat <$> (forM (findTopLevelVars c) $ \vn -> (do
(n,t) <- extractEmbeddedType (vn,False)
return [JSType $ printf "type %s" t]
)`catchError` (\(e::String) -> do
err $ printf "ignoring variable %s, reason:\n\t%s" vn e
return []))
return (t,f)
where
urs_line :: [(String,String)] -> String
urs_line [] = error "wrong function signature"
urs_line ((n,nt):args) = printf "val %s : %s" n (fmtargs args) where
fmtargs :: [(String,String)] -> String
fmtargs ((an,at):as) = printf "%s -> %s" at (fmtargs as)
fmtargs [] = let pf = L.stripPrefix "pure_" nt in
case pf of
Just p -> p
Nothing -> printf "transaction %s" nt
extractEmbeddedType :: (Monad m) => (String,Bool) -> m (String,String)
extractEmbeddedType ([],_) = error "BUG: empty identifier"
extractEmbeddedType (name,fallback) = check (msum [span2 "__" name , span2 "_as_" name]) where
check (Just (n,t)) = return (n,t)
check _ | fallback == True = return (name,name)
| fallback == False = fail $ printf "Can't extract the type from the identifier '%s'" name
findTopLevelFunctions :: JSNode -> [[String]]
findTopLevelFunctions top = map decls $ listify is_func top where
is_func n@(JSFunction a b c d e f) = True
is_func _ = False
decls (JSFunction a b c d e f) = (identifiers b) ++ (identifiersC d)
findTopLevelVars :: JSNode -> [String]
findTopLevelVars top = map decls $ listify is_var top where
is_var n@(JSVarDecl a []) = True
is_var _ = False
decls (JSVarDecl a _) = (head $ identifiers a);
identifiersC x = map name $ listify ids x where
ids i@(NT (JSIdentifier s) _ com) = True
ids _ = False
name (NT (JSIdentifier n) _ com)
| not $ null $ comglue = n ++ "_as_" ++ comglue
| otherwise = n
where
comglue = concat $ map
(\c ->
case c of
CommentA _ c -> unwords $ filter (\c -> c /= "/*" && c /= "*/") $ words c
_ -> "") com
identifiers x = map name $ listify ids x where
ids i@(JSIdentifier s) = True
ids _ = False
name (JSIdentifier n) = n
err,out :: (MonadIO m) => String -> m ()
err = hio stderr
out = hio stdout
span2 :: String -> String -> Maybe (String,String)
span2 inf s = span' [] s where
span' _ [] = Nothing
span' acc (c:cs)
| L.isPrefixOf inf (c:cs) = Just (acc, drop (length inf) (c:cs))
| otherwise = span' (acc++[c]) cs
hio :: (MonadIO m) => Handle -> String -> m ()
hio h = liftIO . hPutStrLn h
transform_css :: (Stream s m Char) => ParsecT s u m [Either ByteString [Char]]
transform_css = do
l1 <- map Left <$> blabla
l2 <- map Right <$> funs
e <- try (eof >> return True) <|> (return False)
case e of
True -> return (l1++l2)
False -> do
l <- transform_css
return (l1 ++ l2 ++ l)
where
symbol = P.symbol l
lexeme = P.lexeme l
string = lexeme (
between (char '\'') (char '\'') (strchars '\'') <|>
between (char '"') (char '"') (strchars '"')) <|>
manyTill anyChar (try (char ')'))
where
strchars e = many $ satisfy (/=e)
fun1 = lexeme $ do
symbol "url"
symbol "("
s <- string
symbol ")"
return s
blabla = do
l <- manyTill anyChar (eof <|> (try (lookAhead fun1) >> return ()))
case null l of
True -> return []
False -> return [BS.pack l]
funs = many (try fun1)
l = P.makeTokenParser $ P.LanguageDef
{ P.commentStart = "/*"
, P.commentEnd = "*/"
, P.commentLine = "//"
, P.nestedComments = True
, P.identStart = P.letter
, P.identLetter = P.alphaNum <|> oneOf "_@-"
, P.reservedNames = []
, P.reservedOpNames = []
, P.caseSensitive = False
, P.opStart = l
, P.opLetter = l
}
where l = oneOf ":!#$%&*+./<=>?@\\^|-~"
parse_css :: (Monad m) => BS.ByteString -> (String -> m String) -> m (Either P.ParseError BS.ByteString)
parse_css inp f = do
case P.runParser transform_css () "-" inp of
Left e -> return $ Left e
Right pr -> do
b <- forM pr $ \i -> do
case i of
Left bs -> return bs
Right u -> do
u' <- f u
return (BS.pack $ "url('" ++ u' ++ "')")
return $ Right $ BS.concat b
| grwlf/cake3 | src/Development/Cake3/Ext/UrWeb1.hs | bsd-3-clause | 24,818 | 10 | 27 | 6,434 | 9,181 | 4,625 | 4,556 | 601 | 12 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
-- | Build project(s).
module Stack.Build
(build
,clean)
where
import Control.Monad
import Control.Monad.Catch (MonadCatch, MonadMask)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Resource
import Data.Either
import Data.Function
import Data.Map.Strict (Map)
import qualified Data.Set as S
import Network.HTTP.Client.Conduit (HasHttpManager)
import Path.IO
import Prelude hiding (FilePath, writeFile)
import Stack.Build.ConstructPlan
import Stack.Build.Execute
import Stack.Build.Installed
import Stack.Build.Source
import Stack.Build.Types
import Stack.Constants
import Stack.Fetch as Fetch
import Stack.GhcPkg
import Stack.Package
import Stack.Types
import Stack.Types.Internal
{- EKB TODO: doc generation for stack-doc-server
#ifndef mingw32_HOST_OS
import System.Posix.Files (createSymbolicLink,removeLink)
#endif
--}
type M env m = (MonadIO m,MonadReader env m,HasHttpManager env,HasBuildConfig env,MonadLogger m,MonadBaseControl IO m,MonadCatch m,MonadMask m,HasLogLevel env)
-- | Build
build :: M env m => BuildOpts -> m ()
build bopts = do
menv <- getMinimalEnvOverride
cabalPkgVer <- getCabalPkgVer menv
(mbp, locals, sourceMap) <- loadSourceMap bopts
(installedMap, locallyRegistered) <- getInstalled menv profiling sourceMap
baseConfigOpts <- mkBaseConfigOpts bopts
let extraToBuild = either (const []) id $ boptsTargets bopts
plan <- withLoadPackage menv $ \loadPackage ->
constructPlan mbp baseConfigOpts locals extraToBuild locallyRegistered loadPackage sourceMap installedMap
if boptsDryrun bopts
then printPlan plan
else executePlan menv bopts baseConfigOpts cabalPkgVer locals plan
where
profiling = boptsLibProfile bopts || boptsExeProfile bopts
-- | Get the @BaseConfigOpts@ necessary for constructing configure options
mkBaseConfigOpts :: (MonadIO m, MonadReader env m, HasBuildConfig env, MonadThrow m)
=> BuildOpts -> m BaseConfigOpts
mkBaseConfigOpts bopts = do
snapDBPath <- packageDatabaseDeps
localDBPath <- packageDatabaseLocal
snapInstallRoot <- installationRootDeps
localInstallRoot <- installationRootLocal
return BaseConfigOpts
{ bcoSnapDB = snapDBPath
, bcoLocalDB = localDBPath
, bcoSnapInstallRoot = snapInstallRoot
, bcoLocalInstallRoot = localInstallRoot
, bcoBuildOpts = bopts
}
-- | Provide a function for loading package information from the package index
withLoadPackage :: M env m
=> EnvOverride
-> ((PackageName -> Version -> Map FlagName Bool -> IO Package) -> m a)
-> m a
withLoadPackage menv inner = do
bconfig <- asks getBuildConfig
withCabalLoader menv $ \cabalLoader ->
inner $ \name version flags -> do
bs <- cabalLoader $ PackageIdentifier name version -- TODO automatically update index the first time this fails
readPackageBS (depPackageConfig bconfig flags) bs
where
-- | Package config to be used for dependencies
depPackageConfig :: BuildConfig -> Map FlagName Bool -> PackageConfig
depPackageConfig bconfig flags = PackageConfig
{ packageConfigEnableTests = False
, packageConfigEnableBenchmarks = False
, packageConfigFlags = flags
, packageConfigGhcVersion = bcGhcVersion bconfig
, packageConfigPlatform = configPlatform (getConfig bconfig)
}
-- | Reset the build (remove Shake database and .gen files).
clean :: (M env m) => m ()
clean = do
bconfig <- asks getBuildConfig
menv <- getMinimalEnvOverride
cabalPkgVer <- getCabalPkgVer menv
forM_
(S.toList (bcPackages bconfig))
(distDirFromDir cabalPkgVer >=> removeTreeIfExists)
----------------------------------------------------------
-- DEAD CODE BELOW HERE
----------------------------------------------------------
{- EKB TODO: doc generation for stack-doc-server
(boptsFinalAction bopts == DoHaddock)
(buildDocIndex
(wanted pwd)
docLoc
packages
mgr
logLevel)
-}
{- EKB TODO: doc generation for stack-doc-server
-- | Build the haddock documentation index and contents.
buildDocIndex :: (Package -> Wanted)
-> Path Abs Dir
-> Set Package
-> Manager
-> LogLevel
-> Rules ()
buildDocIndex wanted docLoc packages mgr logLevel =
do runHaddock "--gen-contents" $(mkRelFile "index.html")
runHaddock "--gen-index" $(mkRelFile "doc-index.html")
combineHoogle
where
runWithLogging = runStackLoggingT mgr logLevel
runHaddock genOpt destFilename =
do let destPath = toFilePath (docLoc </> destFilename)
want [destPath]
destPath %> \_ ->
runWithLogging
(do needDeps
ifcOpts <- liftIO (fmap concat (mapM toInterfaceOpt (S.toList packages)))
runIn docLoc
"haddock"
mempty
(genOpt:ifcOpts)
Nothing)
toInterfaceOpt package =
do let pv = joinPkgVer (packageName package,packageVersion package)
srcPath = (toFilePath docLoc) ++ "/" ++
pv ++ "/" ++
packageNameString (packageName package) ++ "." ++
haddockExtension
exists <- doesFileExist srcPath
return (if exists
then ["-i"
,"../" ++
pv ++
"," ++
srcPath]
else [])
combineHoogle =
do let destHoogleDbLoc = hoogleDatabaseFile docLoc
destPath = toFilePath destHoogleDbLoc
want [destPath]
destPath %> \_ ->
runWithLogging
(do needDeps
srcHoogleDbs <- liftIO (fmap concat (mapM toSrcHoogleDb (S.toList packages)))
callProcess
"hoogle"
("combine" :
"-o" :
toFilePath destHoogleDbLoc :
srcHoogleDbs))
toSrcHoogleDb package =
do let srcPath = toFilePath docLoc ++ "/" ++
joinPkgVer (packageName package,packageVersion package) ++ "/" ++
packageNameString (packageName package) ++ "." ++
hoogleDbExtension
exists <- doesFileExist srcPath
return (if exists
then [srcPath]
else [])
needDeps =
need (concatMap (\package -> if wanted package == Wanted
then let dir = packageDir package
in [toFilePath (builtFileFromDir dir)]
else [])
(S.toList packages))
#ifndef mingw32_HOST_OS
-- | Remove existing links docs for package from @~/.shake/doc@.
removeDocLinks :: Path Abs Dir -> Package -> IO ()
removeDocLinks docLoc package =
do createDirectoryIfMissing True
(toFilePath docLoc)
userDocLs <-
fmap (map (toFilePath docLoc ++))
(getDirectoryContents (toFilePath docLoc))
forM_ userDocLs $
\docPath ->
do isDir <- doesDirectoryExist docPath
when isDir
(case breakPkgVer (FilePath.takeFileName docPath) of
Just (p,_) ->
when (p == packageName package)
(removeLink docPath)
Nothing -> return ())
-- | Add link for package to @~/.shake/doc@.
createDocLinks :: Path Abs Dir -> Package -> IO ()
createDocLinks docLoc package =
do let pkgVer =
joinPkgVer (packageName package,(packageVersion package))
pkgVerLoc <- liftIO (parseRelDir pkgVer)
let pkgDestDocLoc = docLoc </> pkgVerLoc
pkgDestDocPath =
FilePath.dropTrailingPathSeparator (toFilePath pkgDestDocLoc)
cabalDocLoc = parent docLoc </>
$(mkRelDir "share/doc/")
haddockLocs <-
do cabalDocExists <- doesDirectoryExist (toFilePath cabalDocLoc)
if cabalDocExists
then findFiles cabalDocLoc
(\fileLoc ->
FilePath.takeExtensions (toFilePath fileLoc) ==
"." ++ haddockExtension &&
dirname (parent fileLoc) ==
$(mkRelDir "html/") &&
toFilePath (dirname (parent (parent fileLoc))) ==
(pkgVer ++ "/"))
(\dirLoc ->
not (isHiddenDir dirLoc) &&
dirname (parent (parent dirLoc)) /=
$(mkRelDir "html/"))
else return []
case haddockLocs of
[haddockLoc] ->
case stripDir (parent docLoc)
haddockLoc of
Just relHaddockPath ->
do let srcRelPathCollapsed =
FilePath.takeDirectory (FilePath.dropTrailingPathSeparator (toFilePath relHaddockPath))
{-srcRelPath = "../" ++ srcRelPathCollapsed-}
createSymbolicLink (FilePath.dropTrailingPathSeparator srcRelPathCollapsed)
pkgDestDocPath
Nothing -> return ()
_ -> return ()
#endif /* not defined(mingw32_HOST_OS) */
-- | Get @-i@ arguments for haddock for dependencies.
haddockInterfaceOpts :: Path Abs Dir -> Package -> Set Package -> IO [String]
haddockInterfaceOpts userDocLoc package packages =
do mglobalDocLoc <- getGlobalDocPath
globalPkgVers <-
case mglobalDocLoc of
Nothing -> return M.empty
Just globalDocLoc -> getDocPackages globalDocLoc
let toInterfaceOpt pn =
case find (\dpi -> packageName dpi == pn) (S.toList packages) of
Nothing ->
return (case (M.lookup pn globalPkgVers,mglobalDocLoc) of
(Just (v:_),Just globalDocLoc) ->
["-i"
,"../" ++ joinPkgVer (pn,v) ++
"," ++
toFilePath globalDocLoc ++ "/" ++
joinPkgVer (pn,v) ++ "/" ++
packageNameString pn ++ "." ++
haddockExtension]
_ -> [])
Just dpi ->
do let destPath = (toFilePath userDocLoc ++ "/" ++
joinPkgVer (pn,packageVersion dpi) ++ "/" ++
packageNameString pn ++ "." ++
haddockExtension)
exists <- doesFileExist destPath
return (if exists
then ["-i"
,"../" ++
joinPkgVer (pn,packageVersion dpi) ++
"," ++
destPath]
else [])
--TODO: use not only direct dependencies, but dependencies of dependencies etc.
--(e.g. redis-fp doesn't include @text@ in its dependencies which means the 'Text'
--datatype isn't linked in its haddocks)
fmap concat (mapM toInterfaceOpt (S.toList (packageAllDeps package)))
--------------------------------------------------------------------------------
-- Paths
{- EKB TODO: doc generation for stack-doc-server
-- | Returns true for paths whose last directory component begins with ".".
isHiddenDir :: Path b Dir -> Bool
isHiddenDir = isPrefixOf "." . toFilePath . dirname
-}
--}
| mietek/stack | src/Stack/Build.hs | bsd-3-clause | 12,655 | 0 | 15 | 4,506 | 899 | 486 | 413 | 91 | 2 |
{-# OPTIONS -Wall #-}
-- The pec embedded compiler
-- Copyright 2011-2012, Brett Letner
module Pec.LLVM (dModule) where
import Control.Concurrent
import Data.Char
import Data.Generics.Uniplate.Data
import Data.List
import Data.Maybe
import Development.Shake.FilePath
import Grm.Prims
import Language.LLVM.Abs
import Numeric
import Pec.IUtil
import qualified Language.Pir.Abs as I
data St = St
{ strings :: [(String,String)]
, free_vars :: [String]
, enums :: [(String,Integer)]
, fields :: [(String,Integer)]
, tydecls :: [(String,I.TyDecl)]
, defines :: [Define]
}
dModule :: FilePath -> I.Module -> IO ()
dModule outdir m@(I.Module a _ _) = do
xs <- readMVar gTyDecls
let st0 = St{ strings = ss
, enums = concatMap tyEnums $ universeBi xs
, free_vars = map vtvar ifvs
, fields = concatMap tyFields $ universeBi xs
, tydecls = [ (dTypeVar y, z) | (y, z) <- xs ]
, defines = []
}
let ds = map (dTypeD st0) xs
let st = st0{ defines = ds }
writeFileBinary (joinPath [outdir, fn]) $
ppShow $
transformBi elimNoOpS $
transformBi allocasAtStart $
Module $
map dStringD ss ++
ds ++
map dBuiltin builtinTbl ++
map (dDeclare st) ifvs ++
map (dDefine st) cs
where
I.Module _ _ cs = transformBi inlineAtoms m
ifvs = nub $ concatMap fvsIDefine cs
fn = n ++ ".ll"
n = case a of
"" -> error "unused:dModule"
_ -> init a
ss = [ (s, "@.str" ++ show i)
| (I.StringL s ,i) <- zip (nub $ sort $ universeBi m)
[ 0 :: Int .. ]]
dDeclare :: St -> I.TVar -> Define
dDeclare st x = case ty of
PtrT (FunT a bs) -> Declare a v bs
_ -> error $ "declare not a function type:" ++ ppShow x
where
TVar ty v = dTVar st x
dTypeD :: St -> (I.Type, I.TyDecl) -> Define
dTypeD st (x,y) = TypeD (dTypeVar x) $ dTyDecl st y
dTypeVar :: I.Type -> String
dTypeVar x0 = '%' : loop x0
where
loop x = case x of
I.Type a [] -> a
I.Type a xs ->
a ++ "$" ++ concat (intersperse "." $ map loop xs) ++ "$"
dTyDecl :: St -> I.TyDecl -> Type
dTyDecl st x = case x of
I.TyEnum bs -> lengthT bs
I.TyRecord bs -> StructT $ map dFieldT bs
I.TyTagged bs -> StructT
[ lengthT bs
, maximumBy (\a b -> compare (sizeT st a) (sizeT st b))
[ dType t | I.ConC _ t <- bs ]
]
lengthT :: [a] -> Type
lengthT = IntT . show . bitsToEncode . genericLength
sizeT :: St -> Type -> Integer
sizeT st x = case x of
VoidT -> 0
CharT -> 8
FloatT -> 32
DoubleT -> 64
PtrT{} -> sizeofptr
FunT{} -> sizeofptr
IntT a -> read a
StructT bs -> sum $ map (sizeT st) bs
ArrayT a b -> read a * (sizeT st b)
UserT a -> case lookup a $ tydecls st of
Just b -> sizeT st $ dTyDecl st b
Nothing -> error $ "unused:sizeT:UserT:" ++ ppShow x
VarArgsT -> error $ "unused:sizeT:VarArgsT:" ++ ppShow x
sizeofptr :: Integer
sizeofptr = 32
dFieldT :: I.FieldT -> Type
dFieldT (I.FieldT _ b) = dType b
dVar :: Bool -> String -> String
dVar is_free v = (if is_free then '@' else '%') : map f v
where
f c = case c of
'~' -> '$'
_ -> c
dDefine :: St -> I.Define -> Define
dDefine st (I.Define a b cs ds) =
Define (dType a) (dVar True b) (map (dTVar st) cs)
(concatMap (dStmt st) ds)
dStmt :: St -> I.Stmt -> [Stmt]
dStmt st x = case x of
I.LetS a b -> dExp st a b
I.StoreS a b -> [ StoreS (dAtom st b) (dTVar st a) ]
I.CallS a b -> [ CallS (dTVar st a) (map (dAtom st) b) ]
I.SwitchS a b cs -> concat
[ [ SwitchS (dAtom st a) l1 $ map (dSwitchAlt st) lcs ]
, [ LabelS l1 ]
, concatMap (dStmt st) b
, [ Br0S l0 ]
, concatMap (dSwitchAltBody st l0) lcs
, [ LabelS l0 ]
]
where
l0 = uLbl a
l1 = uLbl b
lcs = [ (uLbl c, c) | c <- cs ]
I.IfS a b c -> concat
[ [ BrS (duAtom st a) l1 l2 ]
, [ LabelS l1 ]
, concatMap (dStmt st) b
, [ Br0S l3 ]
, [ LabelS l2 ]
, concatMap (dStmt st) c
, [ Br0S l3 ]
, [ LabelS l3 ]
]
where
l1 = uLbl a
l2 = uLbl b
l3 = uLbl c
I.WhenS a b -> concat
[ [ BrS (duAtom st a) l1 l2 ]
, [ LabelS l1 ]
, concatMap (dStmt st) b
, [ Br0S l2 ]
, [ LabelS l2 ]
]
where
l1 = uLbl a
l2 = uLbl b
I.WhileS a b c -> concat
[ [ Br0S l0 ]
, [ LabelS l0 ]
, concatMap (dStmt st) a
, [ BrS (duAtom st b) l1 l2 ]
, [ LabelS l1 ]
, concatMap (dStmt st) c
, [ Br0S l0 ]
, [ LabelS l2 ]
]
where
l0 = uLbl a
l1 = uLbl b
l2 = uLbl c
I.ReturnS a -> [ ReturnS $ dAtom st a ]
I.NoOpS -> []
uLbl :: a -> String
uLbl a = uId a "Lbl"
dSwitchAlt :: St -> (String, I.SwitchAlt) -> SwitchAlt
dSwitchAlt st (lbl, I.SwitchAlt a _) = SwitchAlt tl lbl
where
tl = case dTLit st a of
TLit (PtrT (FunT b _)) c -> TLit b c -- BAL: Shouldn't base report the correct type here without the need for this fixup?
b -> b
dSwitchAltBody :: St -> String -> (String, I.SwitchAlt) -> [Stmt]
dSwitchAltBody st lbl0 (lbl, I.SwitchAlt _ b) = concat
[ [ LabelS lbl ]
, concatMap (dStmt st) b
, [ Br0S lbl0 ]
]
variantTypes :: St -> Exp -> (Type,Type)
variantTypes st x = case [ (a, b) | TypeD v (StructT [a,b]) <- defines st, v == v0 ] of
[y] -> y
_ -> error $ "unused:variantTypes:" ++ ppShow x
where
IdxE (TVar (PtrT (UserT v0)) _) _ = x
fldE :: TVar -> Integer -> Exp
fldE a i = IdxE a $ LitA $ TLit (IntT "32") $ NmbrL $ show i
bitcastE :: TVar -> Type -> Exp
bitcastE = CastE Bitcast
dExp :: St -> I.TVar -> I.Exp -> [Stmt]
dExp st tv@(I.TVar v t) x = case x of
I.CallE (I.TVar "tagv" _) [I.VarA b] ->
[ LetS v1 $ fldE (dTVar st b) 0, letS $ LoadE $ TVar (PtrT $ dType t) v1 ]
where
v1 = uId b "%.tag"
I.CallE (I.TVar "un" _) [_, I.VarA c] ->
[ LetS v1 e, letS $ bitcastE (TVar (PtrT ta) v1) tb ]
where
v1 = uId c "%.data"
e = fldE (dTVar st c) 1
(_,ta) = variantTypes st e
tb = dType t
I.CallE (I.TVar "mk" a) [I.LitA (I.TLit (I.StringL b) _)] -> fst $ dTag st tv a b
I.CallE (I.TVar "mk" a) [I.LitA (I.TLit (I.StringL b) _), c] ->
ss0 ++
[ LetS datap0 $ fldE tv1 1
, LetS datap1 $ bitcastE (TVar (PtrT tb) datap0) (PtrT tc)
, StoreS atomc (TVar (PtrT tc) datap1)
, s
]
where
(ss,(tv1,tb)) = dTag st tv a b
(ss0, s) = (init ss, last ss)
datap0 = uId c "%.data"
datap1 = uId datap0 "%.data"
atomc = dAtom st c
tc = tyAtom atomc
I.CallE a [I.VarA b] | isJust mi -> [ letS $ fldE (dTVar st b) $ fromJust mi ]
where mi = lookup (vtvar a) $ fields st
I.CallE (I.TVar "idx" _) [I.VarA b, c] -> [ letS $ IdxE (dTVar st b) $ dAtom st c ]
I.CallE a [b,c] | isBinOp a -> [ letS $ llvmBinOp st a b c ]
I.CallE a b -> [ letS $ CallE (dTVar st a) (map (dAtom st) b) ]
I.CastE a b -> [ letS $ CastE cast tva tb ]
where
tva@(TVar ta _) = dTVar st a
tb = dType b
y = ttvar a
sa = sizeT st ta
sb = sizeT st tb
cast
| isSigned y && isFloating b = Sitofp
| isUnsigned y && isFloating b = Uitofp
| isFloating y && isSigned b = Fptosi
| isFloating y && isUnsigned b = Fptoui
| isFloating y && isFloating b && sa < sb = Fpext
| isFloating y && isFloating b && sa > sb = Fptrunc
| isSigned y && isSigned b && sa < sb = Sext
| isSigned y && isSigned b && sa > sb = Trunc
| isUnsigned y && isUnsigned b && sa < sb = Zext
| isUnsigned y && isUnsigned b && sa > sb = Trunc
| otherwise = Bitcast
I.AllocaE a -> [ letS $ AllocaE $ dType a ]
I.LoadE a -> [ letS $ LoadE $ dTVar st a ]
I.AtomE a -> [ letS $ AtomE $ dAtom st a ]
where
letS = LetS (dVar False v)
dTag :: St -> I.TVar -> a -> String -> ([Stmt], (TVar, Type))
dTag st tv a b =
([ LetS v1 $ AllocaE t
, LetS tagp tagfld
, StoreS (LitA $ TLit ta $ dEnum st b) (TVar (PtrT ta) tagp)
, LetS v0 $ LoadE tv1
], (tv1,tb))
where
TVar t v0 = dTVar st tv
v1 = uId a "%.v"
tv1 = TVar (PtrT t) v1
tagp = uId b "%.tag"
tagfld = fldE tv1 0
(ta,tb) = variantTypes st tagfld
duAtom :: St -> I.Atom -> UAtom
duAtom st = uAtom . dAtom st
llvmBinOp :: St -> I.TVar -> I.Atom -> I.Atom -> Exp
llvmBinOp st a b c =
BinOpE (f ty) (dType ty) (duAtom st b) (duAtom st c)
where
f = fromJust $ lookup (vtvar a) binOpTbl
ty = tatom b
tyAtom :: Atom -> Type
tyAtom x = case x of
LitA (TLit a _) -> a
VarA (TVar a _) -> a
uAtom :: Atom -> UAtom
uAtom x = case x of
LitA (TLit _ b) -> LitUA b
VarA (TVar _ b) -> VarUA b
dAtom :: St -> I.Atom -> Atom
dAtom st x = case x of
I.LitA a -> LitA $ dTLit st a
I.VarA a -> VarA $ dTVar st a
dLit :: St -> I.Type -> I.Lit -> Lit
dLit st t x = case x of
I.StringL a -> case lookup a $ strings st of
Just v -> StringL (show $ length a + 1) v
Nothing -> error $ "unused:dLit:string"
I.NmbrL a
| isFloating t -> NmbrL $ show (readNumber a :: Double)
| isFloat a -> error $ "non-integral literal:" ++ a
| otherwise -> NmbrL $ show (readNumber a :: Integer)
I.CharL a -> NmbrL $ show $ ord a
I.EnumL a -> dEnum st a
I.VoidL -> VoidL
dEnum :: St -> String -> Lit
dEnum st x = case x of
"False_" -> FalseL
"True_" -> TrueL
_ -> case lookup x $ enums st of
Nothing -> error $ "unused:dEnum:" ++ ppShow (enums st, x)
Just i -> NmbrL $ show i
dTVar :: St -> I.TVar -> TVar
dTVar st (I.TVar a b) = case lookup a builtinTbl of
Just t -> TVar t (dVar True a)
Nothing -> TVar (dType b) (dVar (a `elem` (free_vars st ++ builtins)) a)
dBuiltin :: (String, Type) -> Define
dBuiltin (s, PtrT (FunT a bs)) = Declare a ('@':s) bs
dBuiltin x = error $ "unused:dBuiltin:" ++ ppShow x
builtinTbl :: [(String, Type)]
builtinTbl =
[ ("printf", PtrT (FunT VoidT [PtrT CharT, VarArgsT])) ]
dTLit :: St -> I.TLit -> TLit
dTLit st (I.TLit a b) = TLit (dType b) (dLit st b a)
dType :: I.Type -> Type
dType t@(I.Type a b) = case (a,b) of
("Ptr_", [c]) -> PtrT (dType c)
("Void_", []) -> VoidT
("I_",_) -> IntT $ nCnt b
("W_",_) -> IntT $ nCnt b
("Fun_", ts) -> PtrT (FunT (dType $ last ts) (map dType $ init ts))
("Array_", [c,d]) -> ArrayT (nCnt [c]) (dType d)
("Float_", []) -> FloatT
("Double_", []) -> DoubleT
("Char_", []) -> CharT
_ -> UserT $ dTypeVar t
fSOrU :: a -> a -> a -> I.Type -> a
fSOrU a b c t
| isFloating t = a
| isSigned t = b
| otherwise = c
fOrN :: a -> a -> I.Type -> a
fOrN a b t
| isFloating t = a
| otherwise = b
binOpTbl :: [(String, I.Type -> BinOp)]
binOpTbl =
[ ("eq", \_ -> Icmp Equ)
, ("ne", \_ -> Icmp Neq)
, ("gt", fSOrU (Fcmp Ogt) (Icmp Sgt) (Icmp Ugt))
, ("gte", fSOrU (Fcmp Oge) (Icmp Sge) (Icmp Uge))
, ("lt", fSOrU (Fcmp Olt) (Icmp Slt) (Icmp Ult))
, ("lte", fSOrU (Fcmp Ole) (Icmp Sle) (Icmp Ule))
, ("add", fOrN Fadd Add)
, ("sub", fOrN Fsub Sub)
, ("mul", fOrN Fmul Mul)
, ("div", fSOrU Fdiv Sdiv Udiv)
, ("rem", fSOrU Frem Srem Urem)
, ("shl", \_ -> Shl)
, ("shr", \_ -> Lshr)
, ("band", \_ -> And)
, ("bor", \_ -> Or)
, ("bxor", \_ -> Xor)
, ("bnot", \_ -> error $ "todo:implement binary not in LLVM") -- BAL
, ("and", \_ -> error $ "todo:implement boolean and in LLVM") -- BAL:doesn't this get desugared?
, ("or", \_ -> error $ "todo:implement boolean or in LLVM") -- BAL:doesn't this get desugared?
]
dStringD :: (String, Lident) -> Define
dStringD (s,v) = StringD v (show $ 1 + length s) $ concatMap const_char s
const_char :: Char -> String
const_char c
| c < ' ' || c > '~' || c == '\\' = encode_char c
| otherwise = [c]
encode_char :: Enum a => a -> String
encode_char c =
'\\' : (if i <= 0xf then "0" else "") ++ map toUpper (showHex i "")
where i = fromEnum c
allocasAtStart :: Define -> Define -- also removes unused allocas
allocasAtStart (Define a b cs ds) = Define a b cs $
[ s | s@(LetS v AllocaE{}) <- universeBi ds, v `elem` universeBi ds1 ]
++ ds1
where
ds1 = transformBi f ds
f :: Stmt -> Stmt
f s
| isAllocaS s = NoOpS
| otherwise = s
allocasAtStart x = x
isAllocaS :: Stmt -> Bool
isAllocaS (LetS _ AllocaE{}) = True
isAllocaS _ = False
elimNoOpS :: Module -> Module
elimNoOpS = transformBi (filter ((/=) NoOpS))
| stevezhee/pec | Pec/LLVM.hs | bsd-3-clause | 12,321 | 0 | 19 | 3,601 | 6,063 | 3,081 | 2,982 | 352 | 12 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.EXT.PackedPixels
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.EXT.PackedPixels (
-- * Extension Support
glGetEXTPackedPixels,
gl_EXT_packed_pixels,
-- * Enums
pattern GL_UNSIGNED_BYTE_3_3_2_EXT,
pattern GL_UNSIGNED_INT_10_10_10_2_EXT,
pattern GL_UNSIGNED_INT_8_8_8_8_EXT,
pattern GL_UNSIGNED_SHORT_4_4_4_4_EXT,
pattern GL_UNSIGNED_SHORT_5_5_5_1_EXT
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
| haskell-opengl/OpenGLRaw | src/Graphics/GL/EXT/PackedPixels.hs | bsd-3-clause | 816 | 0 | 5 | 107 | 67 | 48 | 19 | 11 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
#if __GLASGOW_HASKELL__ >= 702 && __GLASGOW_HASKELL__ < 710
{-# LANGUAGE Trustworthy #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Control.Comonad.Density
-- Copyright : (C) 2008-2011 Edward Kmett
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GADTs, MPTCs)
--
-- The 'Density' 'Comonad' for a 'Functor' (aka the 'Comonad generated by a 'Functor')
-- The 'Density' term dates back to Dubuc''s 1974 thesis. The term
-- 'Monad' genererated by a 'Functor' dates back to 1972 in Street''s
-- ''Formal Theory of Monads''.
--
-- The left Kan extension of a 'Functor' along itself (@'Lan' f f@) forms a 'Comonad'. This is
-- that 'Comonad'.
----------------------------------------------------------------------------
module Control.Comonad.Density
( Density(..)
, liftDensity
, densityToAdjunction, adjunctionToDensity
, densityToLan, lanToDensity
) where
import Control.Applicative
import Control.Comonad
import Control.Comonad.Trans.Class
import Data.Functor.Apply
import Data.Functor.Adjunction
import Data.Functor.Extend
import Data.Functor.Kan.Lan
data Density k a where
Density :: (k b -> a) -> k b -> Density k a
instance Functor (Density f) where
fmap f (Density g h) = Density (f . g) h
{-# INLINE fmap #-}
instance Extend (Density f) where
duplicated (Density f ws) = Density (Density f) ws
{-# INLINE duplicated #-}
instance Comonad (Density f) where
duplicate (Density f ws) = Density (Density f) ws
{-# INLINE duplicate #-}
extract (Density f a) = f a
{-# INLINE extract #-}
instance ComonadTrans Density where
lower (Density f c) = extend f c
{-# INLINE lower #-}
instance Apply f => Apply (Density f) where
Density kxf x <.> Density kya y =
Density (\k -> kxf (fmap fst k) (kya (fmap snd k))) ((,) <$> x <.> y)
{-# INLINE (<.>) #-}
instance Applicative f => Applicative (Density f) where
pure a = Density (const a) (pure ())
{-# INLINE pure #-}
Density kxf x <*> Density kya y =
Density (\k -> kxf (fmap fst k) (kya (fmap snd k))) (liftA2 (,) x y)
{-# INLINE (<*>) #-}
-- | The natural transformation from a @'Comonad' w@ to the 'Comonad' generated by @w@ (forwards).
--
-- This is merely a right-inverse (section) of 'lower', rather than a full inverse.
--
-- @
-- 'lower' . 'liftDensity' β‘ 'id'
-- @
liftDensity :: Comonad w => w a -> Density w a
liftDensity = Density extract
{-# INLINE liftDensity #-}
-- | The Density 'Comonad' of a left adjoint is isomorphic to the 'Comonad' formed by that 'Adjunction'.
--
-- This isomorphism is witnessed by 'densityToAdjunction' and 'adjunctionToDensity'.
--
-- @
-- 'densityToAdjunction' . 'adjunctionToDensity' β‘ 'id'
-- 'adjunctionToDensity' . 'densityToAdjunction' β‘ 'id'
-- @
densityToAdjunction :: Adjunction f g => Density f a -> f (g a)
densityToAdjunction (Density f v) = fmap (leftAdjunct f) v
{-# INLINE densityToAdjunction #-}
adjunctionToDensity :: Adjunction f g => f (g a) -> Density f a
adjunctionToDensity = Density counit
{-# INLINE adjunctionToDensity #-}
-- | The 'Density' 'Comonad' of a 'Functor' @f@ is obtained by taking the left Kan extension
-- ('Lan') of @f@ along itself. This isomorphism is witnessed by 'lanToDensity' and 'densityToLan'
--
-- @
-- 'lanToDensity' . 'densityToLan' β‘ 'id'
-- 'densityToLan' . 'lanToDensity' β‘ 'id'
-- @
lanToDensity :: Lan f f a -> Density f a
lanToDensity (Lan f v) = Density f v
{-# INLINE lanToDensity #-}
densityToLan :: Density f a -> Lan f f a
densityToLan (Density f v) = Lan f v
{-# INLINE densityToLan #-}
| xuwei-k/kan-extensions | src/Control/Comonad/Density.hs | bsd-3-clause | 3,776 | 0 | 13 | 674 | 776 | 421 | 355 | 56 | 1 |
{-# LANGUAGE TemplateHaskell #-}
------------------------------------------------------------------------------
-- | This module defines our application's state type and an alias for its
-- handler monad.
--
module Application where
------------------------------------------------------------------------------
import Control.Lens
import Snap.Snaplet
import Snap.Snaplet.Auth
import Snap.Snaplet.Heist
import Snap.Snaplet.I18N
import Snap.Snaplet.MongoDB.Core
import Snap.Snaplet.Session
------------------------------------------------------------------------------
data App = App
{ _heist :: Snaplet (Heist App)
, _i18n :: Snaplet I18N
, _appSession :: Snaplet SessionManager
, _appMongoDB :: Snaplet MongoDB
, _appAuth :: Snaplet (AuthManager App)
, _adminRole :: Role -- ^ Role for admin user. keep it simple for now.
}
makeLenses ''App
instance HasHeist App where
heistLens = subSnaplet heist
instance HasI18N App where
i18nLens = i18n
instance HasMongoDB App where
getMongoDB app = app ^. (appMongoDB . snapletValue)
-- getMongoDB = (^& (appMongoDB . snapletValue))
------------------------------------------------------------------------------
type AppHandler = Handler App App
| HaskellCNOrg/snap-web | src/Application.hs | bsd-3-clause | 1,347 | 0 | 11 | 293 | 202 | 118 | 84 | 24 | 0 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnExpr]{Renaming of expressions}
Basically dependency analysis.
Handles @Match@, @GRHSs@, @HsExpr@, and @Qualifier@ datatypes. In
general, all of these functions return a renamed thing, and a set of
free variables.
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE MultiWayIf #-}
module RnExpr (
rnLExpr, rnExpr, rnStmts
) where
#include "HsVersions.h"
import RnBinds ( rnLocalBindsAndThen, rnLocalValBindsLHS, rnLocalValBindsRHS,
rnMatchGroup, rnGRHS, makeMiniFixityEnv)
import HsSyn
import TcRnMonad
import Module ( getModule )
import RnEnv
import RnSplice ( rnBracket, rnSpliceExpr, checkThLocalName )
import RnTypes
import RnPat
import DynFlags
import PrelNames
import BasicTypes
import Name
import NameSet
import RdrName
import UniqSet
import Data.List
import Util
import ListSetOps ( removeDups )
import ErrUtils
import Outputable
import SrcLoc
import FastString
import Control.Monad
import TysWiredIn ( nilDataConName )
import qualified GHC.LanguageExtensions as LangExt
import Data.Ord
import Data.Array
{-
************************************************************************
* *
\subsubsection{Expressions}
* *
************************************************************************
-}
rnExprs :: [LHsExpr RdrName] -> RnM ([LHsExpr Name], FreeVars)
rnExprs ls = rnExprs' ls emptyUniqSet
where
rnExprs' [] acc = return ([], acc)
rnExprs' (expr:exprs) acc =
do { (expr', fvExpr) <- rnLExpr expr
-- Now we do a "seq" on the free vars because typically it's small
-- or empty, especially in very long lists of constants
; let acc' = acc `plusFV` fvExpr
; (exprs', fvExprs) <- acc' `seq` rnExprs' exprs acc'
; return (expr':exprs', fvExprs) }
-- Variables. We look up the variable and return the resulting name.
rnLExpr :: LHsExpr RdrName -> RnM (LHsExpr Name, FreeVars)
rnLExpr = wrapLocFstM rnExpr
rnExpr :: HsExpr RdrName -> RnM (HsExpr Name, FreeVars)
finishHsVar :: Located Name -> RnM (HsExpr Name, FreeVars)
-- Separated from rnExpr because it's also used
-- when renaming infix expressions
finishHsVar (L l name)
= do { this_mod <- getModule
; when (nameIsLocalOrFrom this_mod name) $
checkThLocalName name
; return (HsVar (L l name), unitFV name) }
rnUnboundVar :: RdrName -> RnM (HsExpr Name, FreeVars)
rnUnboundVar v
= do { if isUnqual v
then -- Treat this as a "hole"
-- Do not fail right now; instead, return HsUnboundVar
-- and let the type checker report the error
return (HsUnboundVar (rdrNameOcc v), emptyFVs)
else -- Fail immediately (qualified name)
do { n <- reportUnboundName v
; return (HsVar (noLoc n), emptyFVs) } }
rnExpr (HsVar (L l v))
= do { opt_DuplicateRecordFields <- xoptM LangExt.DuplicateRecordFields
; mb_name <- lookupOccRn_overloaded opt_DuplicateRecordFields v
; case mb_name of {
Nothing -> rnUnboundVar v ;
Just (Left name)
| name == nilDataConName -- Treat [] as an ExplicitList, so that
-- OverloadedLists works correctly
-> rnExpr (ExplicitList placeHolderType Nothing [])
| otherwise
-> finishHsVar (L l name) ;
Just (Right [f@(FieldOcc (L _ fn) s)]) ->
return (HsRecFld (ambiguousFieldOcc (FieldOcc (L l fn) s))
, unitFV (selectorFieldOcc f)) ;
Just (Right fs@(_:_:_)) -> return (HsRecFld (Ambiguous (L l v)
PlaceHolder)
, mkFVs (map selectorFieldOcc fs));
Just (Right []) -> error "runExpr/HsVar" } }
rnExpr (HsIPVar v)
= return (HsIPVar v, emptyFVs)
rnExpr (HsOverLabel v)
= return (HsOverLabel v, emptyFVs)
rnExpr (HsLit lit@(HsString src s))
= do { opt_OverloadedStrings <- xoptM LangExt.OverloadedStrings
; if opt_OverloadedStrings then
rnExpr (HsOverLit (mkHsIsString src s placeHolderType))
else do {
; rnLit lit
; return (HsLit lit, emptyFVs) } }
rnExpr (HsLit lit)
= do { rnLit lit
; return (HsLit lit, emptyFVs) }
rnExpr (HsOverLit lit)
= do { (lit', fvs) <- rnOverLit lit
; return (HsOverLit lit', fvs) }
rnExpr (HsApp fun arg)
= do { (fun',fvFun) <- rnLExpr fun
; (arg',fvArg) <- rnLExpr arg
; return (HsApp fun' arg', fvFun `plusFV` fvArg) }
rnExpr (HsAppType fun arg)
= do { (fun',fvFun) <- rnLExpr fun
; (arg',fvArg) <- rnHsWcType HsTypeCtx arg
; return (HsAppType fun' arg', fvFun `plusFV` fvArg) }
rnExpr (OpApp e1 op _ e2)
= do { (e1', fv_e1) <- rnLExpr e1
; (e2', fv_e2) <- rnLExpr e2
; (op', fv_op) <- rnLExpr op
-- Deal with fixity
-- When renaming code synthesised from "deriving" declarations
-- we used to avoid fixity stuff, but we can't easily tell any
-- more, so I've removed the test. Adding HsPars in TcGenDeriv
-- should prevent bad things happening.
; fixity <- case op' of
L _ (HsVar (L _ n)) -> lookupFixityRn n
L _ (HsRecFld f) -> lookupFieldFixityRn f
_ -> return (Fixity (show minPrecedence) minPrecedence InfixL)
-- c.f. lookupFixity for unbound
; final_e <- mkOpAppRn e1' op' fixity e2'
; return (final_e, fv_e1 `plusFV` fv_op `plusFV` fv_e2) }
rnExpr (NegApp e _)
= do { (e', fv_e) <- rnLExpr e
; (neg_name, fv_neg) <- lookupSyntaxName negateName
; final_e <- mkNegAppRn e' neg_name
; return (final_e, fv_e `plusFV` fv_neg) }
------------------------------------------
-- Template Haskell extensions
-- Don't ifdef-GHCI them because we want to fail gracefully
-- (not with an rnExpr crash) in a stage-1 compiler.
rnExpr e@(HsBracket br_body) = rnBracket e br_body
rnExpr (HsSpliceE splice) = rnSpliceExpr splice
---------------------------------------------
-- Sections
-- See Note [Parsing sections] in Parser.y
rnExpr (HsPar (L loc (section@(SectionL {}))))
= do { (section', fvs) <- rnSection section
; return (HsPar (L loc section'), fvs) }
rnExpr (HsPar (L loc (section@(SectionR {}))))
= do { (section', fvs) <- rnSection section
; return (HsPar (L loc section'), fvs) }
rnExpr (HsPar e)
= do { (e', fvs_e) <- rnLExpr e
; return (HsPar e', fvs_e) }
rnExpr expr@(SectionL {})
= do { addErr (sectionErr expr); rnSection expr }
rnExpr expr@(SectionR {})
= do { addErr (sectionErr expr); rnSection expr }
---------------------------------------------
rnExpr (HsCoreAnn src ann expr)
= do { (expr', fvs_expr) <- rnLExpr expr
; return (HsCoreAnn src ann expr', fvs_expr) }
rnExpr (HsSCC src lbl expr)
= do { (expr', fvs_expr) <- rnLExpr expr
; return (HsSCC src lbl expr', fvs_expr) }
rnExpr (HsTickPragma src info srcInfo expr)
= do { (expr', fvs_expr) <- rnLExpr expr
; return (HsTickPragma src info srcInfo expr', fvs_expr) }
rnExpr (HsLam matches)
= do { (matches', fvMatch) <- rnMatchGroup LambdaExpr rnLExpr matches
; return (HsLam matches', fvMatch) }
rnExpr (HsLamCase _arg matches)
= do { (matches', fvs_ms) <- rnMatchGroup CaseAlt rnLExpr matches
-- ; return (HsLamCase arg matches', fvs_ms) }
; return (HsLamCase placeHolderType matches', fvs_ms) }
rnExpr (HsCase expr matches)
= do { (new_expr, e_fvs) <- rnLExpr expr
; (new_matches, ms_fvs) <- rnMatchGroup CaseAlt rnLExpr matches
; return (HsCase new_expr new_matches, e_fvs `plusFV` ms_fvs) }
rnExpr (HsLet (L l binds) expr)
= rnLocalBindsAndThen binds $ \binds' _ -> do
{ (expr',fvExpr) <- rnLExpr expr
; return (HsLet (L l binds') expr', fvExpr) }
rnExpr (HsDo do_or_lc (L l stmts) _)
= do { ((stmts', _), fvs) <-
rnStmtsWithPostProcessing do_or_lc rnLExpr
postProcessStmtsForApplicativeDo stmts
(\ _ -> return ((), emptyFVs))
; return ( HsDo do_or_lc (L l stmts') placeHolderType, fvs ) }
rnExpr (ExplicitList _ _ exps)
= do { opt_OverloadedLists <- xoptM LangExt.OverloadedLists
; (exps', fvs) <- rnExprs exps
; if opt_OverloadedLists
then do {
; (from_list_n_name, fvs') <- lookupSyntaxName fromListNName
; return (ExplicitList placeHolderType (Just from_list_n_name) exps'
, fvs `plusFV` fvs') }
else
return (ExplicitList placeHolderType Nothing exps', fvs) }
rnExpr (ExplicitPArr _ exps)
= do { (exps', fvs) <- rnExprs exps
; return (ExplicitPArr placeHolderType exps', fvs) }
rnExpr (ExplicitTuple tup_args boxity)
= do { checkTupleSection tup_args
; checkTupSize (length tup_args)
; (tup_args', fvs) <- mapAndUnzipM rnTupArg tup_args
; return (ExplicitTuple tup_args' boxity, plusFVs fvs) }
where
rnTupArg (L l (Present e)) = do { (e',fvs) <- rnLExpr e
; return (L l (Present e'), fvs) }
rnTupArg (L l (Missing _)) = return (L l (Missing placeHolderType)
, emptyFVs)
rnExpr (RecordCon { rcon_con_name = con_id
, rcon_flds = rec_binds@(HsRecFields { rec_dotdot = dd }) })
= do { con_lname@(L _ con_name) <- lookupLocatedOccRn con_id
; (flds, fvs) <- rnHsRecFields (HsRecFieldCon con_name) mk_hs_var rec_binds
; (flds', fvss) <- mapAndUnzipM rn_field flds
; let rec_binds' = HsRecFields { rec_flds = flds', rec_dotdot = dd }
; return (RecordCon { rcon_con_name = con_lname, rcon_flds = rec_binds'
, rcon_con_expr = noPostTcExpr, rcon_con_like = PlaceHolder }
, fvs `plusFV` plusFVs fvss `addOneFV` con_name) }
where
mk_hs_var l n = HsVar (L l n)
rn_field (L l fld) = do { (arg', fvs) <- rnLExpr (hsRecFieldArg fld)
; return (L l (fld { hsRecFieldArg = arg' }), fvs) }
rnExpr (RecordUpd { rupd_expr = expr, rupd_flds = rbinds })
= do { (expr', fvExpr) <- rnLExpr expr
; (rbinds', fvRbinds) <- rnHsRecUpdFields rbinds
; return (RecordUpd { rupd_expr = expr', rupd_flds = rbinds'
, rupd_cons = PlaceHolder, rupd_in_tys = PlaceHolder
, rupd_out_tys = PlaceHolder, rupd_wrap = PlaceHolder }
, fvExpr `plusFV` fvRbinds) }
rnExpr (ExprWithTySig expr pty)
= do { (pty', fvTy) <- rnHsSigWcType ExprWithTySigCtx pty
; (expr', fvExpr) <- bindSigTyVarsFV (hsWcScopedTvs pty') $
rnLExpr expr
; return (ExprWithTySig expr' pty', fvExpr `plusFV` fvTy) }
rnExpr (HsIf _ p b1 b2)
= do { (p', fvP) <- rnLExpr p
; (b1', fvB1) <- rnLExpr b1
; (b2', fvB2) <- rnLExpr b2
; (mb_ite, fvITE) <- lookupIfThenElse
; return (HsIf mb_ite p' b1' b2', plusFVs [fvITE, fvP, fvB1, fvB2]) }
rnExpr (HsMultiIf _ty alts)
= do { (alts', fvs) <- mapFvRn (rnGRHS IfAlt rnLExpr) alts
-- ; return (HsMultiIf ty alts', fvs) }
; return (HsMultiIf placeHolderType alts', fvs) }
rnExpr (ArithSeq _ _ seq)
= do { opt_OverloadedLists <- xoptM LangExt.OverloadedLists
; (new_seq, fvs) <- rnArithSeq seq
; if opt_OverloadedLists
then do {
; (from_list_name, fvs') <- lookupSyntaxName fromListName
; return (ArithSeq noPostTcExpr (Just from_list_name) new_seq, fvs `plusFV` fvs') }
else
return (ArithSeq noPostTcExpr Nothing new_seq, fvs) }
rnExpr (PArrSeq _ seq)
= do { (new_seq, fvs) <- rnArithSeq seq
; return (PArrSeq noPostTcExpr new_seq, fvs) }
{-
These three are pattern syntax appearing in expressions.
Since all the symbols are reservedops we can simply reject them.
We return a (bogus) EWildPat in each case.
-}
rnExpr EWildPat = return (hsHoleExpr, emptyFVs) -- "_" is just a hole
rnExpr e@(EAsPat {}) =
patSynErr e (text "Did you mean to enable TypeApplications?")
rnExpr e@(EViewPat {}) = patSynErr e empty
rnExpr e@(ELazyPat {}) = patSynErr e empty
{-
************************************************************************
* *
Static values
* *
************************************************************************
For the static form we check that the free variables are all top-level
value bindings. This is done by checking that the name is external or
wired-in. See the Notes about the NameSorts in Name.hs.
-}
rnExpr e@(HsStatic expr) = do
target <- fmap hscTarget getDynFlags
case target of
-- SPT entries are expected to exist in object code so far, and this is
-- not the case in interpreted mode. See bug #9878.
HscInterpreted -> addErr $ sep
[ text "The static form is not supported in interpreted mode."
, text "Please use -fobject-code."
]
_ -> return ()
(expr',fvExpr) <- rnLExpr expr
stage <- getStage
case stage of
Brack _ _ -> return () -- Don't check names if we are inside brackets.
-- We don't want to reject cases like:
-- \e -> [| static $(e) |]
-- if $(e) turns out to produce a legal expression.
Splice _ -> addErr $ sep
[ text "static forms cannot be used in splices:"
, nest 2 $ ppr e
]
_ -> do
let isTopLevelName n = isExternalName n || isWiredInName n
case nameSetElems $ filterNameSet
(\n -> not (isTopLevelName n || isUnboundName n))
fvExpr of
[] -> return ()
fvNonGlobal -> addErr $ cat
[ text $ "Only identifiers of top-level bindings can "
++ "appear in the body of the static form:"
, nest 2 $ ppr e
, text "but the following identifiers were found instead:"
, nest 2 $ vcat $ map ppr fvNonGlobal
]
return (HsStatic expr', fvExpr)
{-
************************************************************************
* *
Arrow notation
* *
************************************************************************
-}
rnExpr (HsProc pat body)
= newArrowScope $
rnPat ProcExpr pat $ \ pat' -> do
{ (body',fvBody) <- rnCmdTop body
; return (HsProc pat' body', fvBody) }
-- Ideally, these would be done in parsing, but to keep parsing simple, we do it here.
rnExpr e@(HsArrApp {}) = arrowFail e
rnExpr e@(HsArrForm {}) = arrowFail e
rnExpr other = pprPanic "rnExpr: unexpected expression" (ppr other)
-- HsWrap
hsHoleExpr :: HsExpr id
hsHoleExpr = HsUnboundVar (mkVarOcc "_")
arrowFail :: HsExpr RdrName -> RnM (HsExpr Name, FreeVars)
arrowFail e
= do { addErr (vcat [ text "Arrow command found where an expression was expected:"
, nest 2 (ppr e) ])
-- Return a place-holder hole, so that we can carry on
-- to report other errors
; return (hsHoleExpr, emptyFVs) }
----------------------
-- See Note [Parsing sections] in Parser.y
rnSection :: HsExpr RdrName -> RnM (HsExpr Name, FreeVars)
rnSection section@(SectionR op expr)
= do { (op', fvs_op) <- rnLExpr op
; (expr', fvs_expr) <- rnLExpr expr
; checkSectionPrec InfixR section op' expr'
; return (SectionR op' expr', fvs_op `plusFV` fvs_expr) }
rnSection section@(SectionL expr op)
= do { (expr', fvs_expr) <- rnLExpr expr
; (op', fvs_op) <- rnLExpr op
; checkSectionPrec InfixL section op' expr'
; return (SectionL expr' op', fvs_op `plusFV` fvs_expr) }
rnSection other = pprPanic "rnSection" (ppr other)
{-
************************************************************************
* *
Arrow commands
* *
************************************************************************
-}
rnCmdArgs :: [LHsCmdTop RdrName] -> RnM ([LHsCmdTop Name], FreeVars)
rnCmdArgs [] = return ([], emptyFVs)
rnCmdArgs (arg:args)
= do { (arg',fvArg) <- rnCmdTop arg
; (args',fvArgs) <- rnCmdArgs args
; return (arg':args', fvArg `plusFV` fvArgs) }
rnCmdTop :: LHsCmdTop RdrName -> RnM (LHsCmdTop Name, FreeVars)
rnCmdTop = wrapLocFstM rnCmdTop'
where
rnCmdTop' (HsCmdTop cmd _ _ _)
= do { (cmd', fvCmd) <- rnLCmd cmd
; let cmd_names = [arrAName, composeAName, firstAName] ++
nameSetElems (methodNamesCmd (unLoc cmd'))
-- Generate the rebindable syntax for the monad
; (cmd_names', cmd_fvs) <- lookupSyntaxNames cmd_names
; return (HsCmdTop cmd' placeHolderType placeHolderType
(cmd_names `zip` cmd_names'),
fvCmd `plusFV` cmd_fvs) }
rnLCmd :: LHsCmd RdrName -> RnM (LHsCmd Name, FreeVars)
rnLCmd = wrapLocFstM rnCmd
rnCmd :: HsCmd RdrName -> RnM (HsCmd Name, FreeVars)
rnCmd (HsCmdArrApp arrow arg _ ho rtl)
= do { (arrow',fvArrow) <- select_arrow_scope (rnLExpr arrow)
; (arg',fvArg) <- rnLExpr arg
; return (HsCmdArrApp arrow' arg' placeHolderType ho rtl,
fvArrow `plusFV` fvArg) }
where
select_arrow_scope tc = case ho of
HsHigherOrderApp -> tc
HsFirstOrderApp -> escapeArrowScope tc
-- See Note [Escaping the arrow scope] in TcRnTypes
-- Before renaming 'arrow', use the environment of the enclosing
-- proc for the (-<) case.
-- Local bindings, inside the enclosing proc, are not in scope
-- inside 'arrow'. In the higher-order case (-<<), they are.
-- infix form
rnCmd (HsCmdArrForm op (Just _) [arg1, arg2])
= do { (op',fv_op) <- escapeArrowScope (rnLExpr op)
; let L _ (HsVar (L _ op_name)) = op'
; (arg1',fv_arg1) <- rnCmdTop arg1
; (arg2',fv_arg2) <- rnCmdTop arg2
-- Deal with fixity
; fixity <- lookupFixityRn op_name
; final_e <- mkOpFormRn arg1' op' fixity arg2'
; return (final_e, fv_arg1 `plusFV` fv_op `plusFV` fv_arg2) }
rnCmd (HsCmdArrForm op fixity cmds)
= do { (op',fvOp) <- escapeArrowScope (rnLExpr op)
; (cmds',fvCmds) <- rnCmdArgs cmds
; return (HsCmdArrForm op' fixity cmds', fvOp `plusFV` fvCmds) }
rnCmd (HsCmdApp fun arg)
= do { (fun',fvFun) <- rnLCmd fun
; (arg',fvArg) <- rnLExpr arg
; return (HsCmdApp fun' arg', fvFun `plusFV` fvArg) }
rnCmd (HsCmdLam matches)
= do { (matches', fvMatch) <- rnMatchGroup LambdaExpr rnLCmd matches
; return (HsCmdLam matches', fvMatch) }
rnCmd (HsCmdPar e)
= do { (e', fvs_e) <- rnLCmd e
; return (HsCmdPar e', fvs_e) }
rnCmd (HsCmdCase expr matches)
= do { (new_expr, e_fvs) <- rnLExpr expr
; (new_matches, ms_fvs) <- rnMatchGroup CaseAlt rnLCmd matches
; return (HsCmdCase new_expr new_matches, e_fvs `plusFV` ms_fvs) }
rnCmd (HsCmdIf _ p b1 b2)
= do { (p', fvP) <- rnLExpr p
; (b1', fvB1) <- rnLCmd b1
; (b2', fvB2) <- rnLCmd b2
; (mb_ite, fvITE) <- lookupIfThenElse
; return (HsCmdIf mb_ite p' b1' b2', plusFVs [fvITE, fvP, fvB1, fvB2]) }
rnCmd (HsCmdLet (L l binds) cmd)
= rnLocalBindsAndThen binds $ \ binds' _ -> do
{ (cmd',fvExpr) <- rnLCmd cmd
; return (HsCmdLet (L l binds') cmd', fvExpr) }
rnCmd (HsCmdDo (L l stmts) _)
= do { ((stmts', _), fvs) <-
rnStmts ArrowExpr rnLCmd stmts (\ _ -> return ((), emptyFVs))
; return ( HsCmdDo (L l stmts') placeHolderType, fvs ) }
rnCmd cmd@(HsCmdWrap {}) = pprPanic "rnCmd" (ppr cmd)
---------------------------------------------------
type CmdNeeds = FreeVars -- Only inhabitants are
-- appAName, choiceAName, loopAName
-- find what methods the Cmd needs (loop, choice, apply)
methodNamesLCmd :: LHsCmd Name -> CmdNeeds
methodNamesLCmd = methodNamesCmd . unLoc
methodNamesCmd :: HsCmd Name -> CmdNeeds
methodNamesCmd (HsCmdArrApp _arrow _arg _ HsFirstOrderApp _rtl)
= emptyFVs
methodNamesCmd (HsCmdArrApp _arrow _arg _ HsHigherOrderApp _rtl)
= unitFV appAName
methodNamesCmd (HsCmdArrForm {}) = emptyFVs
methodNamesCmd (HsCmdWrap _ cmd) = methodNamesCmd cmd
methodNamesCmd (HsCmdPar c) = methodNamesLCmd c
methodNamesCmd (HsCmdIf _ _ c1 c2)
= methodNamesLCmd c1 `plusFV` methodNamesLCmd c2 `addOneFV` choiceAName
methodNamesCmd (HsCmdLet _ c) = methodNamesLCmd c
methodNamesCmd (HsCmdDo (L _ stmts) _) = methodNamesStmts stmts
methodNamesCmd (HsCmdApp c _) = methodNamesLCmd c
methodNamesCmd (HsCmdLam match) = methodNamesMatch match
methodNamesCmd (HsCmdCase _ matches)
= methodNamesMatch matches `addOneFV` choiceAName
--methodNamesCmd _ = emptyFVs
-- Other forms can't occur in commands, but it's not convenient
-- to error here so we just do what's convenient.
-- The type checker will complain later
---------------------------------------------------
methodNamesMatch :: MatchGroup Name (LHsCmd Name) -> FreeVars
methodNamesMatch (MG { mg_alts = L _ ms })
= plusFVs (map do_one ms)
where
do_one (L _ (Match _ _ _ grhss)) = methodNamesGRHSs grhss
-------------------------------------------------
-- gaw 2004
methodNamesGRHSs :: GRHSs Name (LHsCmd Name) -> FreeVars
methodNamesGRHSs (GRHSs grhss _) = plusFVs (map methodNamesGRHS grhss)
-------------------------------------------------
methodNamesGRHS :: Located (GRHS Name (LHsCmd Name)) -> CmdNeeds
methodNamesGRHS (L _ (GRHS _ rhs)) = methodNamesLCmd rhs
---------------------------------------------------
methodNamesStmts :: [Located (StmtLR Name Name (LHsCmd Name))] -> FreeVars
methodNamesStmts stmts = plusFVs (map methodNamesLStmt stmts)
---------------------------------------------------
methodNamesLStmt :: Located (StmtLR Name Name (LHsCmd Name)) -> FreeVars
methodNamesLStmt = methodNamesStmt . unLoc
methodNamesStmt :: StmtLR Name Name (LHsCmd Name) -> FreeVars
methodNamesStmt (LastStmt cmd _ _) = methodNamesLCmd cmd
methodNamesStmt (BodyStmt cmd _ _ _) = methodNamesLCmd cmd
methodNamesStmt (BindStmt _ cmd _ _ _) = methodNamesLCmd cmd
methodNamesStmt (RecStmt { recS_stmts = stmts }) =
methodNamesStmts stmts `addOneFV` loopAName
methodNamesStmt (LetStmt {}) = emptyFVs
methodNamesStmt (ParStmt {}) = emptyFVs
methodNamesStmt (TransStmt {}) = emptyFVs
methodNamesStmt ApplicativeStmt{} = emptyFVs
-- ParStmt and TransStmt can't occur in commands, but it's not
-- convenient to error here so we just do what's convenient
{-
************************************************************************
* *
Arithmetic sequences
* *
************************************************************************
-}
rnArithSeq :: ArithSeqInfo RdrName -> RnM (ArithSeqInfo Name, FreeVars)
rnArithSeq (From expr)
= do { (expr', fvExpr) <- rnLExpr expr
; return (From expr', fvExpr) }
rnArithSeq (FromThen expr1 expr2)
= do { (expr1', fvExpr1) <- rnLExpr expr1
; (expr2', fvExpr2) <- rnLExpr expr2
; return (FromThen expr1' expr2', fvExpr1 `plusFV` fvExpr2) }
rnArithSeq (FromTo expr1 expr2)
= do { (expr1', fvExpr1) <- rnLExpr expr1
; (expr2', fvExpr2) <- rnLExpr expr2
; return (FromTo expr1' expr2', fvExpr1 `plusFV` fvExpr2) }
rnArithSeq (FromThenTo expr1 expr2 expr3)
= do { (expr1', fvExpr1) <- rnLExpr expr1
; (expr2', fvExpr2) <- rnLExpr expr2
; (expr3', fvExpr3) <- rnLExpr expr3
; return (FromThenTo expr1' expr2' expr3',
plusFVs [fvExpr1, fvExpr2, fvExpr3]) }
{-
************************************************************************
* *
\subsubsection{@Stmt@s: in @do@ expressions}
* *
************************************************************************
-}
-- | Rename some Stmts
rnStmts :: Outputable (body RdrName)
=> HsStmtContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-- ^ How to rename the body of each statement (e.g. rnLExpr)
-> [LStmt RdrName (Located (body RdrName))]
-- ^ Statements
-> ([Name] -> RnM (thing, FreeVars))
-- ^ if these statements scope over something, this renames it
-- and returns the result.
-> RnM (([LStmt Name (Located (body Name))], thing), FreeVars)
rnStmts ctxt rnBody = rnStmtsWithPostProcessing ctxt rnBody noPostProcessStmts
-- | like 'rnStmts' but applies a post-processing step to the renamed Stmts
rnStmtsWithPostProcessing
:: Outputable (body RdrName)
=> HsStmtContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-- ^ How to rename the body of each statement (e.g. rnLExpr)
-> (HsStmtContext Name
-> [(LStmt Name (Located (body Name)), FreeVars)]
-> RnM ([LStmt Name (Located (body Name))], FreeVars))
-- ^ postprocess the statements
-> [LStmt RdrName (Located (body RdrName))]
-- ^ Statements
-> ([Name] -> RnM (thing, FreeVars))
-- ^ if these statements scope over something, this renames it
-- and returns the result.
-> RnM (([LStmt Name (Located (body Name))], thing), FreeVars)
rnStmtsWithPostProcessing ctxt rnBody ppStmts stmts thing_inside
= do { ((stmts', thing), fvs) <-
rnStmtsWithFreeVars ctxt rnBody stmts thing_inside
; (pp_stmts, fvs') <- ppStmts ctxt stmts'
; return ((pp_stmts, thing), fvs `plusFV` fvs')
}
-- | maybe rearrange statements according to the ApplicativeDo transformation
postProcessStmtsForApplicativeDo
:: HsStmtContext Name
-> [(ExprLStmt Name, FreeVars)]
-> RnM ([ExprLStmt Name], FreeVars)
postProcessStmtsForApplicativeDo ctxt stmts
= do {
-- rearrange the statements using ApplicativeStmt if
-- -XApplicativeDo is on. Also strip out the FreeVars attached
-- to each Stmt body.
ado_is_on <- xoptM LangExt.ApplicativeDo
; let is_do_expr | DoExpr <- ctxt = True
| otherwise = False
; if ado_is_on && is_do_expr
then rearrangeForApplicativeDo ctxt stmts
else noPostProcessStmts ctxt stmts }
-- | strip the FreeVars annotations from statements
noPostProcessStmts
:: HsStmtContext Name
-> [(LStmt Name (Located (body Name)), FreeVars)]
-> RnM ([LStmt Name (Located (body Name))], FreeVars)
noPostProcessStmts _ stmts = return (map fst stmts, emptyNameSet)
rnStmtsWithFreeVars :: Outputable (body RdrName)
=> HsStmtContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> [LStmt RdrName (Located (body RdrName))]
-> ([Name] -> RnM (thing, FreeVars))
-> RnM ( ([(LStmt Name (Located (body Name)), FreeVars)], thing)
, FreeVars)
-- Each Stmt body is annotated with its FreeVars, so that
-- we can rearrange statements for ApplicativeDo.
--
-- Variables bound by the Stmts, and mentioned in thing_inside,
-- do not appear in the result FreeVars
rnStmtsWithFreeVars ctxt _ [] thing_inside
= do { checkEmptyStmts ctxt
; (thing, fvs) <- thing_inside []
; return (([], thing), fvs) }
rnStmtsWithFreeVars MDoExpr rnBody stmts thing_inside -- Deal with mdo
= -- Behave like do { rec { ...all but last... }; last }
do { ((stmts1, (stmts2, thing)), fvs)
<- rnStmt MDoExpr rnBody (noLoc $ mkRecStmt all_but_last) $ \ _ ->
do { last_stmt' <- checkLastStmt MDoExpr last_stmt
; rnStmt MDoExpr rnBody last_stmt' thing_inside }
; return (((stmts1 ++ stmts2), thing), fvs) }
where
Just (all_but_last, last_stmt) = snocView stmts
rnStmtsWithFreeVars ctxt rnBody (lstmt@(L loc _) : lstmts) thing_inside
| null lstmts
= setSrcSpan loc $
do { lstmt' <- checkLastStmt ctxt lstmt
; rnStmt ctxt rnBody lstmt' thing_inside }
| otherwise
= do { ((stmts1, (stmts2, thing)), fvs)
<- setSrcSpan loc $
do { checkStmt ctxt lstmt
; rnStmt ctxt rnBody lstmt $ \ bndrs1 ->
rnStmtsWithFreeVars ctxt rnBody lstmts $ \ bndrs2 ->
thing_inside (bndrs1 ++ bndrs2) }
; return (((stmts1 ++ stmts2), thing), fvs) }
----------------------
rnStmt :: Outputable (body RdrName)
=> HsStmtContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-- ^ How to rename the body of the statement
-> LStmt RdrName (Located (body RdrName))
-- ^ The statement
-> ([Name] -> RnM (thing, FreeVars))
-- ^ Rename the stuff that this statement scopes over
-> RnM ( ([(LStmt Name (Located (body Name)), FreeVars)], thing)
, FreeVars)
-- Variables bound by the Stmt, and mentioned in thing_inside,
-- do not appear in the result FreeVars
rnStmt ctxt rnBody (L loc (LastStmt body noret _)) thing_inside
= do { (body', fv_expr) <- rnBody body
; (ret_op, fvs1) <- lookupStmtName ctxt returnMName
; (thing, fvs3) <- thing_inside []
; return (([(L loc (LastStmt body' noret ret_op), fv_expr)], thing),
fv_expr `plusFV` fvs1 `plusFV` fvs3) }
rnStmt ctxt rnBody (L loc (BodyStmt body _ _ _)) thing_inside
= do { (body', fv_expr) <- rnBody body
; (then_op, fvs1) <- lookupStmtName ctxt thenMName
; (guard_op, fvs2) <- if isListCompExpr ctxt
then lookupStmtName ctxt guardMName
else return (noSyntaxExpr, emptyFVs)
-- Only list/parr/monad comprehensions use 'guard'
-- Also for sub-stmts of same eg [ e | x<-xs, gd | blah ]
-- Here "gd" is a guard
; (thing, fvs3) <- thing_inside []
; return (([(L loc (BodyStmt body'
then_op guard_op placeHolderType), fv_expr)], thing),
fv_expr `plusFV` fvs1 `plusFV` fvs2 `plusFV` fvs3) }
rnStmt ctxt rnBody (L loc (BindStmt pat body _ _ _)) thing_inside
= do { (body', fv_expr) <- rnBody body
-- The binders do not scope over the expression
; (bind_op, fvs1) <- lookupStmtName ctxt bindMName
; xMonadFailEnabled <- fmap (xopt LangExt.MonadFailDesugaring) getDynFlags
; let failFunction | xMonadFailEnabled = failMName
| otherwise = failMName_preMFP
; (fail_op, fvs2) <- lookupSyntaxName failFunction
; rnPat (StmtCtxt ctxt) pat $ \ pat' -> do
{ (thing, fvs3) <- thing_inside (collectPatBinders pat')
; return (( [( L loc (BindStmt pat' body' bind_op fail_op PlaceHolder)
, fv_expr )]
, thing),
fv_expr `plusFV` fvs1 `plusFV` fvs2 `plusFV` fvs3) }}
-- fv_expr shouldn't really be filtered by the rnPatsAndThen
-- but it does not matter because the names are unique
rnStmt _ _ (L loc (LetStmt (L l binds))) thing_inside
= do { rnLocalBindsAndThen binds $ \binds' bind_fvs -> do
{ (thing, fvs) <- thing_inside (collectLocalBinders binds')
; return (([(L loc (LetStmt (L l binds')), bind_fvs)], thing), fvs) } }
rnStmt ctxt rnBody (L loc (RecStmt { recS_stmts = rec_stmts })) thing_inside
= do { (return_op, fvs1) <- lookupStmtName ctxt returnMName
; (mfix_op, fvs2) <- lookupStmtName ctxt mfixName
; (bind_op, fvs3) <- lookupStmtName ctxt bindMName
; let empty_rec_stmt = emptyRecStmtName { recS_ret_fn = return_op
, recS_mfix_fn = mfix_op
, recS_bind_fn = bind_op }
-- Step1: Bring all the binders of the mdo into scope
-- (Remember that this also removes the binders from the
-- finally-returned free-vars.)
-- And rename each individual stmt, making a
-- singleton segment. At this stage the FwdRefs field
-- isn't finished: it's empty for all except a BindStmt
-- for which it's the fwd refs within the bind itself
-- (This set may not be empty, because we're in a recursive
-- context.)
; rnRecStmtsAndThen rnBody rec_stmts $ \ segs -> do
{ let bndrs = nameSetElems $ foldr (unionNameSet . (\(ds,_,_,_) -> ds))
emptyNameSet segs
; (thing, fvs_later) <- thing_inside bndrs
; let (rec_stmts', fvs) = segmentRecStmts loc ctxt empty_rec_stmt segs fvs_later
-- We aren't going to try to group RecStmts with
-- ApplicativeDo, so attaching empty FVs is fine.
; return ( ((zip rec_stmts' (repeat emptyNameSet)), thing)
, fvs `plusFV` fvs1 `plusFV` fvs2 `plusFV` fvs3) } }
rnStmt ctxt _ (L loc (ParStmt segs _ _ _)) thing_inside
= do { (mzip_op, fvs1) <- lookupStmtNamePoly ctxt mzipName
; (bind_op, fvs2) <- lookupStmtName ctxt bindMName
; (return_op, fvs3) <- lookupStmtName ctxt returnMName
; ((segs', thing), fvs4) <- rnParallelStmts (ParStmtCtxt ctxt) return_op segs thing_inside
; return ( ([(L loc (ParStmt segs' mzip_op bind_op placeHolderType), fvs4)], thing)
, fvs1 `plusFV` fvs2 `plusFV` fvs3 `plusFV` fvs4) }
rnStmt ctxt _ (L loc (TransStmt { trS_stmts = stmts, trS_by = by, trS_form = form
, trS_using = using })) thing_inside
= do { -- Rename the 'using' expression in the context before the transform is begun
(using', fvs1) <- rnLExpr using
-- Rename the stmts and the 'by' expression
-- Keep track of the variables mentioned in the 'by' expression
; ((stmts', (by', used_bndrs, thing)), fvs2)
<- rnStmts (TransStmtCtxt ctxt) rnLExpr stmts $ \ bndrs ->
do { (by', fvs_by) <- mapMaybeFvRn rnLExpr by
; (thing, fvs_thing) <- thing_inside bndrs
; let fvs = fvs_by `plusFV` fvs_thing
used_bndrs = filter (`elemNameSet` fvs) bndrs
-- The paper (Fig 5) has a bug here; we must treat any free variable
-- of the "thing inside", **or of the by-expression**, as used
; return ((by', used_bndrs, thing), fvs) }
-- Lookup `return`, `(>>=)` and `liftM` for monad comprehensions
; (return_op, fvs3) <- lookupStmtName ctxt returnMName
; (bind_op, fvs4) <- lookupStmtName ctxt bindMName
; (fmap_op, fvs5) <- case form of
ThenForm -> return (noExpr, emptyFVs)
_ -> lookupStmtNamePoly ctxt fmapName
; let all_fvs = fvs1 `plusFV` fvs2 `plusFV` fvs3
`plusFV` fvs4 `plusFV` fvs5
bndr_map = used_bndrs `zip` used_bndrs
-- See Note [TransStmt binder map] in HsExpr
; traceRn (text "rnStmt: implicitly rebound these used binders:" <+> ppr bndr_map)
; return (([(L loc (TransStmt { trS_stmts = stmts', trS_bndrs = bndr_map
, trS_by = by', trS_using = using', trS_form = form
, trS_ret = return_op, trS_bind = bind_op
, trS_bind_arg_ty = PlaceHolder
, trS_fmap = fmap_op }), fvs2)], thing), all_fvs) }
rnStmt _ _ (L _ ApplicativeStmt{}) _ =
panic "rnStmt: ApplicativeStmt"
rnParallelStmts :: forall thing. HsStmtContext Name
-> SyntaxExpr Name
-> [ParStmtBlock RdrName RdrName]
-> ([Name] -> RnM (thing, FreeVars))
-> RnM (([ParStmtBlock Name Name], thing), FreeVars)
-- Note [Renaming parallel Stmts]
rnParallelStmts ctxt return_op segs thing_inside
= do { orig_lcl_env <- getLocalRdrEnv
; rn_segs orig_lcl_env [] segs }
where
rn_segs :: LocalRdrEnv
-> [Name] -> [ParStmtBlock RdrName RdrName]
-> RnM (([ParStmtBlock Name Name], thing), FreeVars)
rn_segs _ bndrs_so_far []
= do { let (bndrs', dups) = removeDups cmpByOcc bndrs_so_far
; mapM_ dupErr dups
; (thing, fvs) <- bindLocalNames bndrs' (thing_inside bndrs')
; return (([], thing), fvs) }
rn_segs env bndrs_so_far (ParStmtBlock stmts _ _ : segs)
= do { ((stmts', (used_bndrs, segs', thing)), fvs)
<- rnStmts ctxt rnLExpr stmts $ \ bndrs ->
setLocalRdrEnv env $ do
{ ((segs', thing), fvs) <- rn_segs env (bndrs ++ bndrs_so_far) segs
; let used_bndrs = filter (`elemNameSet` fvs) bndrs
; return ((used_bndrs, segs', thing), fvs) }
; let seg' = ParStmtBlock stmts' used_bndrs return_op
; return ((seg':segs', thing), fvs) }
cmpByOcc n1 n2 = nameOccName n1 `compare` nameOccName n2
dupErr vs = addErr (text "Duplicate binding in parallel list comprehension for:"
<+> quotes (ppr (head vs)))
lookupStmtName :: HsStmtContext Name -> Name -> RnM (SyntaxExpr Name, FreeVars)
-- Like lookupSyntaxName, but respects contexts
lookupStmtName ctxt n
| rebindableContext ctxt
= lookupSyntaxName n
| otherwise
= return (mkRnSyntaxExpr n, emptyFVs)
lookupStmtNamePoly :: HsStmtContext Name -> Name -> RnM (HsExpr Name, FreeVars)
lookupStmtNamePoly ctxt name
| rebindableContext ctxt
= do { rebindable_on <- xoptM LangExt.RebindableSyntax
; if rebindable_on
then do { fm <- lookupOccRn (nameRdrName name)
; return (HsVar (noLoc fm), unitFV fm) }
else not_rebindable }
| otherwise
= not_rebindable
where
not_rebindable = return (HsVar (noLoc name), emptyFVs)
-- | Is this a context where we respect RebindableSyntax?
-- but ListComp/PArrComp are never rebindable
-- Neither is ArrowExpr, which has its own desugarer in DsArrows
rebindableContext :: HsStmtContext Name -> Bool
rebindableContext ctxt = case ctxt of
ListComp -> False
PArrComp -> False
ArrowExpr -> False
PatGuard {} -> False
DoExpr -> True
MDoExpr -> True
MonadComp -> True
GhciStmtCtxt -> True -- I suppose?
ParStmtCtxt c -> rebindableContext c -- Look inside to
TransStmtCtxt c -> rebindableContext c -- the parent context
{-
Note [Renaming parallel Stmts]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Renaming parallel statements is painful. Given, say
[ a+c | a <- as, bs <- bss
| c <- bs, a <- ds ]
Note that
(a) In order to report "Defined but not used" about 'bs', we must
rename each group of Stmts with a thing_inside whose FreeVars
include at least {a,c}
(b) We want to report that 'a' is illegally bound in both branches
(c) The 'bs' in the second group must obviously not be captured by
the binding in the first group
To satisfy (a) we nest the segements.
To satisfy (b) we check for duplicates just before thing_inside.
To satisfy (c) we reset the LocalRdrEnv each time.
************************************************************************
* *
\subsubsection{mdo expressions}
* *
************************************************************************
-}
type FwdRefs = NameSet
type Segment stmts = (Defs,
Uses, -- May include defs
FwdRefs, -- A subset of uses that are
-- (a) used before they are bound in this segment, or
-- (b) used here, and bound in subsequent segments
stmts) -- Either Stmt or [Stmt]
-- wrapper that does both the left- and right-hand sides
rnRecStmtsAndThen :: Outputable (body RdrName) =>
(Located (body RdrName)
-> RnM (Located (body Name), FreeVars))
-> [LStmt RdrName (Located (body RdrName))]
-- assumes that the FreeVars returned includes
-- the FreeVars of the Segments
-> ([Segment (LStmt Name (Located (body Name)))]
-> RnM (a, FreeVars))
-> RnM (a, FreeVars)
rnRecStmtsAndThen rnBody s cont
= do { -- (A) Make the mini fixity env for all of the stmts
fix_env <- makeMiniFixityEnv (collectRecStmtsFixities s)
-- (B) Do the LHSes
; new_lhs_and_fv <- rn_rec_stmts_lhs fix_env s
-- ...bring them and their fixities into scope
; let bound_names = collectLStmtsBinders (map fst new_lhs_and_fv)
-- Fake uses of variables introduced implicitly (warning suppression, see #4404)
implicit_uses = lStmtsImplicits (map fst new_lhs_and_fv)
; bindLocalNamesFV bound_names $
addLocalFixities fix_env bound_names $ do
-- (C) do the right-hand-sides and thing-inside
{ segs <- rn_rec_stmts rnBody bound_names new_lhs_and_fv
; (res, fvs) <- cont segs
; warnUnusedLocalBinds bound_names (fvs `unionNameSet` implicit_uses)
; return (res, fvs) }}
-- get all the fixity decls in any Let stmt
collectRecStmtsFixities :: [LStmtLR RdrName RdrName body] -> [LFixitySig RdrName]
collectRecStmtsFixities l =
foldr (\ s -> \acc -> case s of
(L _ (LetStmt (L _ (HsValBinds (ValBindsIn _ sigs))))) ->
foldr (\ sig -> \ acc -> case sig of
(L loc (FixSig s)) -> (L loc s) : acc
_ -> acc) acc sigs
_ -> acc) [] l
-- left-hand sides
rn_rec_stmt_lhs :: Outputable body => MiniFixityEnv
-> LStmt RdrName body
-- rename LHS, and return its FVs
-- Warning: we will only need the FreeVars below in the case of a BindStmt,
-- so we don't bother to compute it accurately in the other cases
-> RnM [(LStmtLR Name RdrName body, FreeVars)]
rn_rec_stmt_lhs _ (L loc (BodyStmt body a b c))
= return [(L loc (BodyStmt body a b c), emptyFVs)]
rn_rec_stmt_lhs _ (L loc (LastStmt body noret a))
= return [(L loc (LastStmt body noret a), emptyFVs)]
rn_rec_stmt_lhs fix_env (L loc (BindStmt pat body a b t))
= do
-- should the ctxt be MDo instead?
(pat', fv_pat) <- rnBindPat (localRecNameMaker fix_env) pat
return [(L loc (BindStmt pat' body a b t),
fv_pat)]
rn_rec_stmt_lhs _ (L _ (LetStmt (L _ binds@(HsIPBinds _))))
= failWith (badIpBinds (text "an mdo expression") binds)
rn_rec_stmt_lhs fix_env (L loc (LetStmt (L l(HsValBinds binds))))
= do (_bound_names, binds') <- rnLocalValBindsLHS fix_env binds
return [(L loc (LetStmt (L l (HsValBinds binds'))),
-- Warning: this is bogus; see function invariant
emptyFVs
)]
-- XXX Do we need to do something with the return and mfix names?
rn_rec_stmt_lhs fix_env (L _ (RecStmt { recS_stmts = stmts })) -- Flatten Rec inside Rec
= rn_rec_stmts_lhs fix_env stmts
rn_rec_stmt_lhs _ stmt@(L _ (ParStmt {})) -- Syntactically illegal in mdo
= pprPanic "rn_rec_stmt" (ppr stmt)
rn_rec_stmt_lhs _ stmt@(L _ (TransStmt {})) -- Syntactically illegal in mdo
= pprPanic "rn_rec_stmt" (ppr stmt)
rn_rec_stmt_lhs _ stmt@(L _ (ApplicativeStmt {})) -- Shouldn't appear yet
= pprPanic "rn_rec_stmt" (ppr stmt)
rn_rec_stmt_lhs _ (L _ (LetStmt (L _ EmptyLocalBinds)))
= panic "rn_rec_stmt LetStmt EmptyLocalBinds"
rn_rec_stmts_lhs :: Outputable body => MiniFixityEnv
-> [LStmt RdrName body]
-> RnM [(LStmtLR Name RdrName body, FreeVars)]
rn_rec_stmts_lhs fix_env stmts
= do { ls <- concatMapM (rn_rec_stmt_lhs fix_env) stmts
; let boundNames = collectLStmtsBinders (map fst ls)
-- First do error checking: we need to check for dups here because we
-- don't bind all of the variables from the Stmt at once
-- with bindLocatedLocals.
; checkDupNames boundNames
; return ls }
-- right-hand-sides
rn_rec_stmt :: (Outputable (body RdrName)) =>
(Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> [Name]
-> (LStmtLR Name RdrName (Located (body RdrName)), FreeVars)
-> RnM [Segment (LStmt Name (Located (body Name)))]
-- Rename a Stmt that is inside a RecStmt (or mdo)
-- Assumes all binders are already in scope
-- Turns each stmt into a singleton Stmt
rn_rec_stmt rnBody _ (L loc (LastStmt body noret _), _)
= do { (body', fv_expr) <- rnBody body
; (ret_op, fvs1) <- lookupSyntaxName returnMName
; return [(emptyNameSet, fv_expr `plusFV` fvs1, emptyNameSet,
L loc (LastStmt body' noret ret_op))] }
rn_rec_stmt rnBody _ (L loc (BodyStmt body _ _ _), _)
= do { (body', fvs) <- rnBody body
; (then_op, fvs1) <- lookupSyntaxName thenMName
; return [(emptyNameSet, fvs `plusFV` fvs1, emptyNameSet,
L loc (BodyStmt body' then_op noSyntaxExpr placeHolderType))] }
rn_rec_stmt rnBody _ (L loc (BindStmt pat' body _ _ _), fv_pat)
= do { (body', fv_expr) <- rnBody body
; (bind_op, fvs1) <- lookupSyntaxName bindMName
; xMonadFailEnabled <- fmap (xopt LangExt.MonadFailDesugaring) getDynFlags
; let failFunction | xMonadFailEnabled = failMName
| otherwise = failMName_preMFP
; (fail_op, fvs2) <- lookupSyntaxName failFunction
; let bndrs = mkNameSet (collectPatBinders pat')
fvs = fv_expr `plusFV` fv_pat `plusFV` fvs1 `plusFV` fvs2
; return [(bndrs, fvs, bndrs `intersectNameSet` fvs,
L loc (BindStmt pat' body' bind_op fail_op PlaceHolder))] }
rn_rec_stmt _ _ (L _ (LetStmt (L _ binds@(HsIPBinds _))), _)
= failWith (badIpBinds (text "an mdo expression") binds)
rn_rec_stmt _ all_bndrs (L loc (LetStmt (L l (HsValBinds binds'))), _)
= do { (binds', du_binds) <- rnLocalValBindsRHS (mkNameSet all_bndrs) binds'
-- fixities and unused are handled above in rnRecStmtsAndThen
; let fvs = allUses du_binds
; return [(duDefs du_binds, fvs, emptyNameSet,
L loc (LetStmt (L l (HsValBinds binds'))))] }
-- no RecStmt case because they get flattened above when doing the LHSes
rn_rec_stmt _ _ stmt@(L _ (RecStmt {}), _)
= pprPanic "rn_rec_stmt: RecStmt" (ppr stmt)
rn_rec_stmt _ _ stmt@(L _ (ParStmt {}), _) -- Syntactically illegal in mdo
= pprPanic "rn_rec_stmt: ParStmt" (ppr stmt)
rn_rec_stmt _ _ stmt@(L _ (TransStmt {}), _) -- Syntactically illegal in mdo
= pprPanic "rn_rec_stmt: TransStmt" (ppr stmt)
rn_rec_stmt _ _ (L _ (LetStmt (L _ EmptyLocalBinds)), _)
= panic "rn_rec_stmt: LetStmt EmptyLocalBinds"
rn_rec_stmt _ _ stmt@(L _ (ApplicativeStmt {}), _)
= pprPanic "rn_rec_stmt: ApplicativeStmt" (ppr stmt)
rn_rec_stmts :: Outputable (body RdrName) =>
(Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> [Name]
-> [(LStmtLR Name RdrName (Located (body RdrName)), FreeVars)]
-> RnM [Segment (LStmt Name (Located (body Name)))]
rn_rec_stmts rnBody bndrs stmts
= do { segs_s <- mapM (rn_rec_stmt rnBody bndrs) stmts
; return (concat segs_s) }
---------------------------------------------
segmentRecStmts :: SrcSpan -> HsStmtContext Name
-> Stmt Name body
-> [Segment (LStmt Name body)] -> FreeVars
-> ([LStmt Name body], FreeVars)
segmentRecStmts loc ctxt empty_rec_stmt segs fvs_later
| null segs
= ([], fvs_later)
| MDoExpr <- ctxt
= segsToStmts empty_rec_stmt grouped_segs fvs_later
-- Step 4: Turn the segments into Stmts
-- Use RecStmt when and only when there are fwd refs
-- Also gather up the uses from the end towards the
-- start, so we can tell the RecStmt which things are
-- used 'after' the RecStmt
| otherwise
= ([ L loc $
empty_rec_stmt { recS_stmts = ss
, recS_later_ids = nameSetElems (defs `intersectNameSet` fvs_later)
, recS_rec_ids = nameSetElems (defs `intersectNameSet` uses) }]
, uses `plusFV` fvs_later)
where
(defs_s, uses_s, _, ss) = unzip4 segs
defs = plusFVs defs_s
uses = plusFVs uses_s
-- Step 2: Fill in the fwd refs.
-- The segments are all singletons, but their fwd-ref
-- field mentions all the things used by the segment
-- that are bound after their use
segs_w_fwd_refs = addFwdRefs segs
-- Step 3: Group together the segments to make bigger segments
-- Invariant: in the result, no segment uses a variable
-- bound in a later segment
grouped_segs = glomSegments ctxt segs_w_fwd_refs
----------------------------
addFwdRefs :: [Segment a] -> [Segment a]
-- So far the segments only have forward refs *within* the Stmt
-- (which happens for bind: x <- ...x...)
-- This function adds the cross-seg fwd ref info
addFwdRefs segs
= fst (foldr mk_seg ([], emptyNameSet) segs)
where
mk_seg (defs, uses, fwds, stmts) (segs, later_defs)
= (new_seg : segs, all_defs)
where
new_seg = (defs, uses, new_fwds, stmts)
all_defs = later_defs `unionNameSet` defs
new_fwds = fwds `unionNameSet` (uses `intersectNameSet` later_defs)
-- Add the downstream fwd refs here
{-
Note [Segmenting mdo]
~~~~~~~~~~~~~~~~~~~~~
NB. June 7 2012: We only glom segments that appear in an explicit mdo;
and leave those found in "do rec"'s intact. See
http://ghc.haskell.org/trac/ghc/ticket/4148 for the discussion
leading to this design choice. Hence the test in segmentRecStmts.
Note [Glomming segments]
~~~~~~~~~~~~~~~~~~~~~~~~
Glomming the singleton segments of an mdo into minimal recursive groups.
At first I thought this was just strongly connected components, but
there's an important constraint: the order of the stmts must not change.
Consider
mdo { x <- ...y...
p <- z
y <- ...x...
q <- x
z <- y
r <- x }
Here, the first stmt mention 'y', which is bound in the third.
But that means that the innocent second stmt (p <- z) gets caught
up in the recursion. And that in turn means that the binding for
'z' has to be included... and so on.
Start at the tail { r <- x }
Now add the next one { z <- y ; r <- x }
Now add one more { q <- x ; z <- y ; r <- x }
Now one more... but this time we have to group a bunch into rec
{ rec { y <- ...x... ; q <- x ; z <- y } ; r <- x }
Now one more, which we can add on without a rec
{ p <- z ;
rec { y <- ...x... ; q <- x ; z <- y } ;
r <- x }
Finally we add the last one; since it mentions y we have to
glom it together with the first two groups
{ rec { x <- ...y...; p <- z ; y <- ...x... ;
q <- x ; z <- y } ;
r <- x }
-}
glomSegments :: HsStmtContext Name
-> [Segment (LStmt Name body)]
-> [Segment [LStmt Name body]] -- Each segment has a non-empty list of Stmts
-- See Note [Glomming segments]
glomSegments _ [] = []
glomSegments ctxt ((defs,uses,fwds,stmt) : segs)
-- Actually stmts will always be a singleton
= (seg_defs, seg_uses, seg_fwds, seg_stmts) : others
where
segs' = glomSegments ctxt segs
(extras, others) = grab uses segs'
(ds, us, fs, ss) = unzip4 extras
seg_defs = plusFVs ds `plusFV` defs
seg_uses = plusFVs us `plusFV` uses
seg_fwds = plusFVs fs `plusFV` fwds
seg_stmts = stmt : concat ss
grab :: NameSet -- The client
-> [Segment a]
-> ([Segment a], -- Needed by the 'client'
[Segment a]) -- Not needed by the client
-- The result is simply a split of the input
grab uses dus
= (reverse yeses, reverse noes)
where
(noes, yeses) = span not_needed (reverse dus)
not_needed (defs,_,_,_) = not (intersectsNameSet defs uses)
----------------------------------------------------
segsToStmts :: Stmt Name body -- A RecStmt with the SyntaxOps filled in
-> [Segment [LStmt Name body]] -- Each Segment has a non-empty list of Stmts
-> FreeVars -- Free vars used 'later'
-> ([LStmt Name body], FreeVars)
segsToStmts _ [] fvs_later = ([], fvs_later)
segsToStmts empty_rec_stmt ((defs, uses, fwds, ss) : segs) fvs_later
= ASSERT( not (null ss) )
(new_stmt : later_stmts, later_uses `plusFV` uses)
where
(later_stmts, later_uses) = segsToStmts empty_rec_stmt segs fvs_later
new_stmt | non_rec = head ss
| otherwise = L (getLoc (head ss)) rec_stmt
rec_stmt = empty_rec_stmt { recS_stmts = ss
, recS_later_ids = nameSetElems used_later
, recS_rec_ids = nameSetElems fwds }
non_rec = isSingleton ss && isEmptyNameSet fwds
used_later = defs `intersectNameSet` later_uses
-- The ones needed after the RecStmt
{-
************************************************************************
* *
ApplicativeDo
* *
************************************************************************
Note [ApplicativeDo]
= Example =
For a sequence of statements
do
x <- A
y <- B x
z <- C
return (f x y z)
We want to transform this to
(\(x,y) z -> f x y z) <$> (do x <- A; y <- B x; return (x,y)) <*> C
It would be easy to notice that "y <- B x" and "z <- C" are
independent and do something like this:
do
x <- A
(y,z) <- (,) <$> B x <*> C
return (f x y z)
But this isn't enough! A and C were also independent, and this
transformation loses the ability to do A and C in parallel.
The algorithm works by first splitting the sequence of statements into
independent "segments", and a separate "tail" (the final statement). In
our example above, the segements would be
[ x <- A
, y <- B x ]
[ z <- C ]
and the tail is:
return (f x y z)
Then we take these segments and make an Applicative expression from them:
(\(x,y) z -> return (f x y z))
<$> do { x <- A; y <- B x; return (x,y) }
<*> C
Finally, we recursively apply the transformation to each segment, to
discover any nested parallelism.
= Syntax & spec =
expr ::= ... | do {stmt_1; ..; stmt_n} expr | ...
stmt ::= pat <- expr
| (arg_1 | ... | arg_n) -- applicative composition, n>=1
| ... -- other kinds of statement (e.g. let)
arg ::= pat <- expr
| {stmt_1; ..; stmt_n} {var_1..var_n}
(note that in the actual implementation,the expr in a do statement is
represented by a LastStmt as the final stmt, this is just a
representational issue and may change later.)
== Transformation to introduce applicative stmts ==
ado {} tail = tail
ado {pat <- expr} {return expr'} = (mkArg(pat <- expr)); return expr'
ado {one} tail = one : tail
ado stmts tail
| n == 1 = ado before (ado after tail)
where (before,after) = split(stmts_1)
| n > 1 = (mkArg(stmts_1) | ... | mkArg(stmts_n)); tail
where
{stmts_1 .. stmts_n} = segments(stmts)
segments(stmts) =
-- divide stmts into segments with no interdependencies
mkArg({pat <- expr}) = (pat <- expr)
mkArg({stmt_1; ...; stmt_n}) =
{stmt_1; ...; stmt_n} {vars(stmt_1) u .. u vars(stmt_n)}
split({stmt_1; ..; stmt_n) =
({stmt_1; ..; stmt_i}, {stmt_i+1; ..; stmt_n})
-- 1 <= i <= n
-- i is a good place to insert a bind
== Desugaring for do ==
dsDo {} expr = expr
dsDo {pat <- rhs; stmts} expr =
rhs >>= \pat -> dsDo stmts expr
dsDo {(arg_1 | ... | arg_n)} (return expr) =
(\argpat (arg_1) .. argpat(arg_n) -> expr)
<$> argexpr(arg_1)
<*> ...
<*> argexpr(arg_n)
dsDo {(arg_1 | ... | arg_n); stmts} expr =
join (\argpat (arg_1) .. argpat(arg_n) -> dsDo stmts expr)
<$> argexpr(arg_1)
<*> ...
<*> argexpr(arg_n)
-}
-- | rearrange a list of statements using ApplicativeDoStmt. See
-- Note [ApplicativeDo].
rearrangeForApplicativeDo
:: HsStmtContext Name
-> [(ExprLStmt Name, FreeVars)]
-> RnM ([ExprLStmt Name], FreeVars)
rearrangeForApplicativeDo _ [] = return ([], emptyNameSet)
rearrangeForApplicativeDo _ [(one,_)] = return ([one], emptyNameSet)
rearrangeForApplicativeDo ctxt stmts0 = do
optimal_ado <- goptM Opt_OptimalApplicativeDo
let stmt_tree | optimal_ado = mkStmtTreeOptimal stmts
| otherwise = mkStmtTreeHeuristic stmts
stmtTreeToStmts ctxt stmt_tree [last] last_fvs
where
(stmts,(last,last_fvs)) = findLast stmts0
findLast [] = error "findLast"
findLast [last] = ([],last)
findLast (x:xs) = (x:rest,last) where (rest,last) = findLast xs
-- | A tree of statements using a mixture of applicative and bind constructs.
data StmtTree a
= StmtTreeOne a
| StmtTreeBind (StmtTree a) (StmtTree a)
| StmtTreeApplicative [StmtTree a]
flattenStmtTree :: StmtTree a -> [a]
flattenStmtTree t = go t []
where
go (StmtTreeOne a) as = a : as
go (StmtTreeBind l r) as = go l (go r as)
go (StmtTreeApplicative ts) as = foldr go as ts
type ExprStmtTree = StmtTree (ExprLStmt Name, FreeVars)
type Cost = Int
-- | Turn a sequence of statements into an ExprStmtTree using a
-- heuristic algorithm. /O(n^2)/
mkStmtTreeHeuristic :: [(ExprLStmt Name, FreeVars)] -> ExprStmtTree
mkStmtTreeHeuristic [one] = StmtTreeOne one
mkStmtTreeHeuristic stmts =
case segments stmts of
[one] -> split one
segs -> StmtTreeApplicative (map split segs)
where
split [one] = StmtTreeOne one
split stmts =
StmtTreeBind (mkStmtTreeHeuristic before) (mkStmtTreeHeuristic after)
where (before, after) = splitSegment stmts
-- | Turn a sequence of statements into an ExprStmtTree optimally,
-- using dynamic programming. /O(n^3)/
mkStmtTreeOptimal :: [(ExprLStmt Name, FreeVars)] -> ExprStmtTree
mkStmtTreeOptimal stmts =
ASSERT(not (null stmts)) -- the empty case is handled by the caller;
-- we don't support empty StmtTrees.
fst (arr ! (0,n))
where
n = length stmts - 1
stmt_arr = listArray (0,n) stmts
-- lazy cache of optimal trees for subsequences of the input
arr :: Array (Int,Int) (ExprStmtTree, Cost)
arr = array ((0,0),(n,n))
[ ((lo,hi), tree lo hi)
| lo <- [0..n]
, hi <- [lo..n] ]
-- compute the optimal tree for the sequence [lo..hi]
tree lo hi
| hi == lo = (StmtTreeOne (stmt_arr ! lo), 1)
| otherwise =
case segments [ stmt_arr ! i | i <- [lo..hi] ] of
[] -> panic "mkStmtTree"
[_one] -> split lo hi
segs -> (StmtTreeApplicative trees, maximum costs)
where
bounds = scanl (\(_,hi) a -> (hi+1, hi + length a)) (0,lo-1) segs
(trees,costs) = unzip (map (uncurry split) (tail bounds))
-- find the best place to split the segment [lo..hi]
split :: Int -> Int -> (ExprStmtTree, Cost)
split lo hi
| hi == lo = (StmtTreeOne (stmt_arr ! lo), 1)
| otherwise = (StmtTreeBind before after, c1+c2)
where
-- As per the paper, for a sequence s1...sn, we want to find
-- the split with the minimum cost, where the cost is the
-- sum of the cost of the left and right subsequences.
--
-- As an optimisation (also in the paper) if the cost of
-- s1..s(n-1) is different from the cost of s2..sn, we know
-- that the optimal solution is the lower of the two. Only
-- in the case that these two have the same cost do we need
-- to do the exhaustive search.
--
((before,c1),(after,c2))
| hi - lo == 1
= ((StmtTreeOne (stmt_arr ! lo), 1),
(StmtTreeOne (stmt_arr ! hi), 1))
| left_cost < right_cost
= ((left,left_cost), (StmtTreeOne (stmt_arr ! hi), 1))
| otherwise -- left_cost > right_cost
= ((StmtTreeOne (stmt_arr ! lo), 1), (right,right_cost))
| otherwise = minimumBy (comparing cost) alternatives
where
(left, left_cost) = arr ! (lo,hi-1)
(right, right_cost) = arr ! (lo+1,hi)
cost ((_,c1),(_,c2)) = c1 + c2
alternatives = [ (arr ! (lo,k), arr ! (k+1,hi))
| k <- [lo .. hi-1] ]
-- | Turn the ExprStmtTree back into a sequence of statements, using
-- ApplicativeStmt where necessary.
stmtTreeToStmts
:: HsStmtContext Name
-> ExprStmtTree
-> [ExprLStmt Name] -- ^ the "tail"
-> FreeVars -- ^ free variables of the tail
-> RnM ( [ExprLStmt Name] -- ( output statements,
, FreeVars ) -- , things we needed
-- If we have a single bind, and we can do it without a join, transform
-- to an ApplicativeStmt. This corresponds to the rule
-- dsBlock [pat <- rhs] (return expr) = expr <$> rhs
-- In the spec, but we do it here rather than in the desugarer,
-- because we need the typechecker to typecheck the <$> form rather than
-- the bind form, which would give rise to a Monad constraint.
stmtTreeToStmts ctxt (StmtTreeOne (L _ (BindStmt pat rhs _ _ _),_))
tail _tail_fvs
| isIrrefutableHsPat pat, (False,tail') <- needJoin tail
-- WARNING: isIrrefutableHsPat on (HsPat Name) doesn't have enough info
-- to know which types have only one constructor. So only
-- tuples come out as irrefutable; other single-constructor
-- types, and newtypes, will not. See the code for
-- isIrrefuatableHsPat
= mkApplicativeStmt ctxt [ApplicativeArgOne pat rhs] False tail'
stmtTreeToStmts _ctxt (StmtTreeOne (s,_)) tail _tail_fvs =
return (s : tail, emptyNameSet)
stmtTreeToStmts ctxt (StmtTreeBind before after) tail tail_fvs = do
(stmts1, fvs1) <- stmtTreeToStmts ctxt after tail tail_fvs
let tail1_fvs = unionNameSets (tail_fvs : map snd (flattenStmtTree after))
(stmts2, fvs2) <- stmtTreeToStmts ctxt before stmts1 tail1_fvs
return (stmts2, fvs1 `plusFV` fvs2)
stmtTreeToStmts ctxt (StmtTreeApplicative trees) tail tail_fvs = do
pairs <- mapM (stmtTreeArg ctxt tail_fvs) trees
let (stmts', fvss) = unzip pairs
let (need_join, tail') = needJoin tail
(stmts, fvs) <- mkApplicativeStmt ctxt stmts' need_join tail'
return (stmts, unionNameSets (fvs:fvss))
where
stmtTreeArg _ctxt _tail_fvs (StmtTreeOne (L _ (BindStmt pat exp _ _ _), _)) =
return (ApplicativeArgOne pat exp, emptyFVs)
stmtTreeArg ctxt tail_fvs tree = do
let stmts = flattenStmtTree tree
pvarset = mkNameSet (concatMap (collectStmtBinders.unLoc.fst) stmts)
`intersectNameSet` tail_fvs
pvars = nameSetElems pvarset
pat = mkBigLHsVarPatTup pvars
tup = mkBigLHsVarTup pvars
(stmts',fvs2) <- stmtTreeToStmts ctxt tree [] pvarset
(mb_ret, fvs1) <-
if | L _ ApplicativeStmt{} <- last stmts' ->
return (unLoc tup, emptyNameSet)
| otherwise -> do
(ret,fvs) <- lookupStmtNamePoly ctxt returnMName
return (HsApp (noLoc ret) tup, fvs)
return ( ApplicativeArgMany stmts' mb_ret pat
, fvs1 `plusFV` fvs2)
-- | Divide a sequence of statements into segments, where no segment
-- depends on any variables defined by a statement in another segment.
segments
:: [(ExprLStmt Name, FreeVars)]
-> [[(ExprLStmt Name, FreeVars)]]
segments stmts = map fst $ merge $ reverse $ map reverse $ walk (reverse stmts)
where
allvars = mkNameSet (concatMap (collectStmtBinders.unLoc.fst) stmts)
-- We would rather not have a segment that just has LetStmts in
-- it, so combine those with an adjacent segment where possible.
merge [] = []
merge (seg : segs)
= case rest of
[] -> [(seg,all_lets)]
((s,s_lets):ss) | all_lets || s_lets
-> (seg ++ s, all_lets && s_lets) : ss
_otherwise -> (seg,all_lets) : rest
where
rest = merge segs
all_lets = all (isLetStmt . fst) seg
-- walk splits the statement sequence into segments, traversing
-- the sequence from the back to the front, and keeping track of
-- the set of free variables of the current segment. Whenever
-- this set of free variables is empty, we have a complete segment.
walk :: [(ExprLStmt Name, FreeVars)] -> [[(ExprLStmt Name, FreeVars)]]
walk [] = []
walk ((stmt,fvs) : stmts) = ((stmt,fvs) : seg) : walk rest
where (seg,rest) = chunter fvs' stmts
(_, fvs') = stmtRefs stmt fvs
chunter _ [] = ([], [])
chunter vars ((stmt,fvs) : rest)
| not (isEmptyNameSet vars)
= ((stmt,fvs) : chunk, rest')
where (chunk,rest') = chunter vars' rest
(pvars, evars) = stmtRefs stmt fvs
vars' = (vars `minusNameSet` pvars) `unionNameSet` evars
chunter _ rest = ([], rest)
stmtRefs stmt fvs
| isLetStmt stmt = (pvars, fvs' `minusNameSet` pvars)
| otherwise = (pvars, fvs')
where fvs' = fvs `intersectNameSet` allvars
pvars = mkNameSet (collectStmtBinders (unLoc stmt))
isLetStmt :: LStmt a b -> Bool
isLetStmt (L _ LetStmt{}) = True
isLetStmt _ = False
-- | Find a "good" place to insert a bind in an indivisible segment.
-- This is the only place where we use heuristics. The current
-- heuristic is to peel off the first group of independent statements
-- and put the bind after those.
splitSegment
:: [(ExprLStmt Name, FreeVars)]
-> ( [(ExprLStmt Name, FreeVars)]
, [(ExprLStmt Name, FreeVars)] )
splitSegment [one,two] = ([one],[two])
-- there is no choice when there are only two statements; this just saves
-- some work in a common case.
splitSegment stmts
| Just (lets,binds,rest) <- slurpIndependentStmts stmts
= if not (null lets)
then (lets, binds++rest)
else (lets++binds, rest)
| otherwise
= case stmts of
(x:xs) -> ([x],xs)
_other -> (stmts,[])
slurpIndependentStmts
:: [(LStmt Name (Located (body Name)), FreeVars)]
-> Maybe ( [(LStmt Name (Located (body Name)), FreeVars)] -- LetStmts
, [(LStmt Name (Located (body Name)), FreeVars)] -- BindStmts
, [(LStmt Name (Located (body Name)), FreeVars)] )
slurpIndependentStmts stmts = go [] [] emptyNameSet stmts
where
-- If we encounter a BindStmt that doesn't depend on a previous BindStmt
-- in this group, then add it to the group.
go lets indep bndrs ((L loc (BindStmt pat body bind_op fail_op ty), fvs) : rest)
| isEmptyNameSet (bndrs `intersectNameSet` fvs)
= go lets ((L loc (BindStmt pat body bind_op fail_op ty), fvs) : indep)
bndrs' rest
where bndrs' = bndrs `unionNameSet` mkNameSet (collectPatBinders pat)
-- If we encounter a LetStmt that doesn't depend on a BindStmt in this
-- group, then move it to the beginning, so that it doesn't interfere with
-- grouping more BindStmts.
-- TODO: perhaps we shouldn't do this if there are any strict bindings,
-- because we might be moving evaluation earlier.
go lets indep bndrs ((L loc (LetStmt binds), fvs) : rest)
| isEmptyNameSet (bndrs `intersectNameSet` fvs)
= go ((L loc (LetStmt binds), fvs) : lets) indep bndrs rest
go _ [] _ _ = Nothing
go _ [_] _ _ = Nothing
go lets indep _ stmts = Just (reverse lets, reverse indep, stmts)
-- | Build an ApplicativeStmt, and strip the "return" from the tail
-- if necessary.
--
-- For example, if we start with
-- do x <- E1; y <- E2; return (f x y)
-- then we get
-- do (E1[x] | E2[y]); f x y
--
-- the LastStmt in this case has the return removed, but we set the
-- flag on the LastStmt to indicate this, so that we can print out the
-- original statement correctly in error messages. It is easier to do
-- it this way rather than try to ignore the return later in both the
-- typechecker and the desugarer (I tried it that way first!).
mkApplicativeStmt
:: HsStmtContext Name
-> [ApplicativeArg Name Name] -- ^ The args
-> Bool -- ^ True <=> need a join
-> [ExprLStmt Name] -- ^ The body statements
-> RnM ([ExprLStmt Name], FreeVars)
mkApplicativeStmt ctxt args need_join body_stmts
= do { (fmap_op, fvs1) <- lookupStmtName ctxt fmapName
; (ap_op, fvs2) <- lookupStmtName ctxt apAName
; (mb_join, fvs3) <-
if need_join then
do { (join_op, fvs) <- lookupStmtName ctxt joinMName
; return (Just join_op, fvs) }
else
return (Nothing, emptyNameSet)
; let applicative_stmt = noLoc $ ApplicativeStmt
(zip (fmap_op : repeat ap_op) args)
mb_join
placeHolderType
; return ( applicative_stmt : body_stmts
, fvs1 `plusFV` fvs2 `plusFV` fvs3) }
-- | Given the statements following an ApplicativeStmt, determine whether
-- we need a @join@ or not, and remove the @return@ if necessary.
needJoin :: [ExprLStmt Name] -> (Bool, [ExprLStmt Name])
needJoin [] = (False, []) -- we're in an ApplicativeArg
needJoin [L loc (LastStmt e _ t)]
| Just arg <- isReturnApp e = (False, [L loc (LastStmt arg True t)])
needJoin stmts = (True, stmts)
-- | @Just e@, if the expression is @return e@, otherwise @Nothing@
isReturnApp :: LHsExpr Name -> Maybe (LHsExpr Name)
isReturnApp (L _ (HsPar expr)) = isReturnApp expr
isReturnApp (L _ (HsApp f arg))
| is_return f = Just arg
| otherwise = Nothing
where
is_return (L _ (HsPar e)) = is_return e
is_return (L _ (HsAppType e _)) = is_return e
is_return (L _ (HsVar (L _ r))) = r == returnMName || r == pureAName
-- TODO: I don't know how to get this right for rebindable syntax
is_return _ = False
isReturnApp _ = Nothing
{-
************************************************************************
* *
\subsubsection{Errors}
* *
************************************************************************
-}
checkEmptyStmts :: HsStmtContext Name -> RnM ()
-- We've seen an empty sequence of Stmts... is that ok?
checkEmptyStmts ctxt
= unless (okEmpty ctxt) (addErr (emptyErr ctxt))
okEmpty :: HsStmtContext a -> Bool
okEmpty (PatGuard {}) = True
okEmpty _ = False
emptyErr :: HsStmtContext Name -> SDoc
emptyErr (ParStmtCtxt {}) = text "Empty statement group in parallel comprehension"
emptyErr (TransStmtCtxt {}) = text "Empty statement group preceding 'group' or 'then'"
emptyErr ctxt = text "Empty" <+> pprStmtContext ctxt
----------------------
checkLastStmt :: Outputable (body RdrName) => HsStmtContext Name
-> LStmt RdrName (Located (body RdrName))
-> RnM (LStmt RdrName (Located (body RdrName)))
checkLastStmt ctxt lstmt@(L loc stmt)
= case ctxt of
ListComp -> check_comp
MonadComp -> check_comp
PArrComp -> check_comp
ArrowExpr -> check_do
DoExpr -> check_do
MDoExpr -> check_do
_ -> check_other
where
check_do -- Expect BodyStmt, and change it to LastStmt
= case stmt of
BodyStmt e _ _ _ -> return (L loc (mkLastStmt e))
LastStmt {} -> return lstmt -- "Deriving" clauses may generate a
-- LastStmt directly (unlike the parser)
_ -> do { addErr (hang last_error 2 (ppr stmt)); return lstmt }
last_error = (text "The last statement in" <+> pprAStmtContext ctxt
<+> text "must be an expression")
check_comp -- Expect LastStmt; this should be enforced by the parser!
= case stmt of
LastStmt {} -> return lstmt
_ -> pprPanic "checkLastStmt" (ppr lstmt)
check_other -- Behave just as if this wasn't the last stmt
= do { checkStmt ctxt lstmt; return lstmt }
-- Checking when a particular Stmt is ok
checkStmt :: HsStmtContext Name
-> LStmt RdrName (Located (body RdrName))
-> RnM ()
checkStmt ctxt (L _ stmt)
= do { dflags <- getDynFlags
; case okStmt dflags ctxt stmt of
IsValid -> return ()
NotValid extra -> addErr (msg $$ extra) }
where
msg = sep [ text "Unexpected" <+> pprStmtCat stmt <+> ptext (sLit "statement")
, text "in" <+> pprAStmtContext ctxt ]
pprStmtCat :: Stmt a body -> SDoc
pprStmtCat (TransStmt {}) = text "transform"
pprStmtCat (LastStmt {}) = text "return expression"
pprStmtCat (BodyStmt {}) = text "body"
pprStmtCat (BindStmt {}) = text "binding"
pprStmtCat (LetStmt {}) = text "let"
pprStmtCat (RecStmt {}) = text "rec"
pprStmtCat (ParStmt {}) = text "parallel"
pprStmtCat (ApplicativeStmt {}) = panic "pprStmtCat: ApplicativeStmt"
------------
emptyInvalid :: Validity -- Payload is the empty document
emptyInvalid = NotValid Outputable.empty
okStmt, okDoStmt, okCompStmt, okParStmt, okPArrStmt
:: DynFlags -> HsStmtContext Name
-> Stmt RdrName (Located (body RdrName)) -> Validity
-- Return Nothing if OK, (Just extra) if not ok
-- The "extra" is an SDoc that is appended to an generic error message
okStmt dflags ctxt stmt
= case ctxt of
PatGuard {} -> okPatGuardStmt stmt
ParStmtCtxt ctxt -> okParStmt dflags ctxt stmt
DoExpr -> okDoStmt dflags ctxt stmt
MDoExpr -> okDoStmt dflags ctxt stmt
ArrowExpr -> okDoStmt dflags ctxt stmt
GhciStmtCtxt -> okDoStmt dflags ctxt stmt
ListComp -> okCompStmt dflags ctxt stmt
MonadComp -> okCompStmt dflags ctxt stmt
PArrComp -> okPArrStmt dflags ctxt stmt
TransStmtCtxt ctxt -> okStmt dflags ctxt stmt
-------------
okPatGuardStmt :: Stmt RdrName (Located (body RdrName)) -> Validity
okPatGuardStmt stmt
= case stmt of
BodyStmt {} -> IsValid
BindStmt {} -> IsValid
LetStmt {} -> IsValid
_ -> emptyInvalid
-------------
okParStmt dflags ctxt stmt
= case stmt of
LetStmt (L _ (HsIPBinds {})) -> emptyInvalid
_ -> okStmt dflags ctxt stmt
----------------
okDoStmt dflags ctxt stmt
= case stmt of
RecStmt {}
| LangExt.RecursiveDo `xopt` dflags -> IsValid
| ArrowExpr <- ctxt -> IsValid -- Arrows allows 'rec'
| otherwise -> NotValid (text "Use RecursiveDo")
BindStmt {} -> IsValid
LetStmt {} -> IsValid
BodyStmt {} -> IsValid
_ -> emptyInvalid
----------------
okCompStmt dflags _ stmt
= case stmt of
BindStmt {} -> IsValid
LetStmt {} -> IsValid
BodyStmt {} -> IsValid
ParStmt {}
| LangExt.ParallelListComp `xopt` dflags -> IsValid
| otherwise -> NotValid (text "Use ParallelListComp")
TransStmt {}
| LangExt.TransformListComp `xopt` dflags -> IsValid
| otherwise -> NotValid (text "Use TransformListComp")
RecStmt {} -> emptyInvalid
LastStmt {} -> emptyInvalid -- Should not happen (dealt with by checkLastStmt)
ApplicativeStmt {} -> emptyInvalid
----------------
okPArrStmt dflags _ stmt
= case stmt of
BindStmt {} -> IsValid
LetStmt {} -> IsValid
BodyStmt {} -> IsValid
ParStmt {}
| LangExt.ParallelListComp `xopt` dflags -> IsValid
| otherwise -> NotValid (text "Use ParallelListComp")
TransStmt {} -> emptyInvalid
RecStmt {} -> emptyInvalid
LastStmt {} -> emptyInvalid -- Should not happen (dealt with by checkLastStmt)
ApplicativeStmt {} -> emptyInvalid
---------
checkTupleSection :: [LHsTupArg RdrName] -> RnM ()
checkTupleSection args
= do { tuple_section <- xoptM LangExt.TupleSections
; checkErr (all tupArgPresent args || tuple_section) msg }
where
msg = text "Illegal tuple section: use TupleSections"
---------
sectionErr :: HsExpr RdrName -> SDoc
sectionErr expr
= hang (text "A section must be enclosed in parentheses")
2 (text "thus:" <+> (parens (ppr expr)))
patSynErr :: HsExpr RdrName -> SDoc -> RnM (HsExpr Name, FreeVars)
patSynErr e explanation = do { addErr (sep [text "Pattern syntax in expression context:",
nest 4 (ppr e)] $$
explanation)
; return (EWildPat, emptyFVs) }
badIpBinds :: Outputable a => SDoc -> a -> SDoc
badIpBinds what binds
= hang (text "Implicit-parameter bindings illegal in" <+> what)
2 (ppr binds)
| mcschroeder/ghc | compiler/rename/RnExpr.hs | bsd-3-clause | 78,620 | 107 | 22 | 22,682 | 19,916 | 10,578 | 9,338 | 1,161 | 15 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE NoStrict #-}
{-# LANGUAGE TupleSections #-}
module Data.IP.Builder
( -- * 'P.BoundedPrim' 'B.Builder's for general, IPv4 and IPv6 addresses.
ipBuilder
, ipv4Builder
, ipv6Builder
) where
import qualified Data.ByteString.Builder as B
import qualified Data.ByteString.Builder.Prim as P
import Data.ByteString.Builder.Prim ((>$<), (>*<))
import GHC.Exts
import GHC.Word (Word8(..), Word16(..), Word32(..))
import Data.IP.Addr
------------ IP builders
{-# INLINE ipBuilder #-}
-- | 'P.BoundedPrim' bytestring 'B.Builder' for general 'IP' addresses.
ipBuilder :: IP -> B.Builder
ipBuilder (IPv4 addr) = ipv4Builder addr
ipBuilder (IPv6 addr) = ipv6Builder addr
{-# INLINE ipv4Builder #-}
-- | 'P.BoundedPrim' bytestring 'B.Builder' for 'IPv4' addresses.
ipv4Builder :: IPv4 -> B.Builder
ipv4Builder addr = P.primBounded ipv4Bounded $! fromIPv4w addr
{-# INLINE ipv6Builder #-}
-- | 'P.BoundedPrim' bytestring 'B.Builder' for 'IPv6' addresses.
ipv6Builder :: IPv6 -> B.Builder
ipv6Builder addr = P.primBounded ipv6Bounded $! fromIPv6w addr
------------ Builder utilities
-- Convert fixed to bounded for fusion
toB :: P.FixedPrim a -> P.BoundedPrim a
toB = P.liftFixedToBounded
{-# INLINE toB #-}
ipv4Bounded :: P.BoundedPrim Word32
ipv4Bounded =
quads >$< ((P.word8Dec >*< dotsep) >*< (P.word8Dec >*< dotsep))
>*< ((P.word8Dec >*< dotsep) >*< P.word8Dec)
where
quads a = ((qdot 0o30# a, qdot 0o20# a), (qdot 0o10# a, qfin a))
{-# INLINE quads #-}
qdot s (W32# a) = (W8# (wordToWord8Compat# ((word32ToWordCompat# a `uncheckedShiftRL#` s) `and#` 0xff##)), ())
{-# INLINE qdot #-}
qfin (W32# a) = W8# (wordToWord8Compat# (word32ToWordCompat# a `and#` 0xff##))
{-# INLINE qfin #-}
dotsep = const 0x2e >$< toB P.word8
-- | For each 32-bit chunk of an IPv6 address, encode its display format in the
-- presentation form of the address, based on its location relative to the
-- "best gap", i.e. the left-most longest run of zeros. The "hi" (H) and/or
-- "lo" (L) 16 bits may be accompanied by colons (C) on the left and/or right.
--
data FF = CHL Word32 -- ^ :<h>:<l>
| HL Word32 -- ^ <h>:<l>
| NOP -- ^ nop
| COL -- ^ :
| CC -- ^ : :
| CLO Word32 -- ^ :<l>
| CHC Word32 -- ^ :<h>:
| HC Word32 -- ^ <h>:
-- Build an IPv6 address in conformance with
-- [RFC5952](http://tools.ietf.org/html/rfc5952 RFC 5952).
--
ipv6Bounded :: P.BoundedPrim (Word32, Word32, Word32, Word32)
ipv6Bounded =
P.condB generalCase
( genFields >$< output128 )
( P.condB v4mapped
( pairPair >$< (colsep >*< colsep)
>*< (ffff >*< (fstUnit >$< colsep >*< ipv4Bounded)) )
( pairPair >$< (P.emptyB >*< colsep) >*< (colsep >*< ipv4Bounded) ) )
where
-- The boundedPrim switches and predicates need to be inlined for best
-- performance, gaining a factor of ~2 in throughput in tests.
--
{-# INLINE output128 #-}
{-# INLINE output64 #-}
{-# INLINE generalCase #-}
{-# INLINE v4mapped #-}
{-# INLINE output32 #-}
generalCase :: (Word32, Word32, Word32, Word32) -> Bool
generalCase (w0, w1, w2, w3) =
w0 /= 0 || w1 /= 0 || (w2 /= 0xffff && (w2 /= 0 || w3 <= 0xffff))
--
v4mapped :: (Word32, Word32, Word32, Word32) -> Bool
v4mapped (w0, w1, w2, _) =
w0 == 0 && w1 == 0 && w2 == 0xffff
-- BoundedPrim for the full 128-bit IPv6 address given as
-- a pair of pairs of FF values, which encode the
-- output format of each of the 32-bit chunks.
--
output128 :: P.BoundedPrim ((FF, FF), (FF, FF))
output128 = output64 >*< output64
output64 = (output32 >*< output32)
--
-- And finally the per-word case-work.
--
output32 :: P.BoundedPrim FF
output32 =
P.condB (\case { CHL _ -> True; _ -> False }) build_CHL $ -- :hi:lo
P.condB (\case { HL _ -> True; _ -> False }) build_HL $ -- hi:lo
P.condB (\case { NOP -> True; _ -> False }) build_NOP $ --
P.condB (\case { COL -> True; _ -> False }) build_COL $ -- :
P.condB (\case { CC -> True; _ -> False }) build_CC $ -- : :
P.condB (\case { CLO _ -> True; _ -> False }) build_CLO $ -- :lo
P.condB (\case { CHC _ -> True; _ -> False }) build_CHC $ -- :hi:
build_HC -- hi:
-- encoders for the eight field format (FF) cases.
--
build_CHL = ( \ case CHL w -> ( fstUnit (hi16 w), fstUnit (lo16 w) )
_ -> undefined )
>$< (colsep >*< P.word16Hex)
>*< (colsep >*< P.word16Hex)
--
build_HL = ( \ case HL w -> ( hi16 w, fstUnit (lo16 w) )
_ -> undefined )
>$< P.word16Hex >*< colsep >*< P.word16Hex
--
build_NOP = P.emptyB
--
build_COL = const () >$< colsep
--
build_CC = const ((), ()) >$< colsep >*< colsep
--
build_CLO = ( \ case CLO w -> fstUnit (lo16 w)
_ -> undefined )
>$< colsep >*< P.word16Hex
--
build_CHC = ( \ case CHC w -> fstUnit (sndUnit (hi16 w))
_ -> undefined )
>$< colsep >*< P.word16Hex >*< colsep
--
build_HC = ( \ case HC w -> sndUnit (hi16 w)
_ -> undefined )
>$< P.word16Hex >*< colsep
-- static encoders
--
colsep :: P.BoundedPrim a
colsep = toB $ const 0x3a >$< P.word8
--
ffff :: P.BoundedPrim a
ffff = toB $ const 0xffff >$< P.word16HexFixed
-- | Helpers
hi16, lo16 :: Word32 -> Word16
hi16 !(W32# w) = W16# (wordToWord16Compat# (word32ToWordCompat# w `uncheckedShiftRL#` 16#))
lo16 !(W32# w) = W16# (wordToWord16Compat# (word32ToWordCompat# w `and#` 0xffff##))
--
fstUnit :: a -> ((), a)
fstUnit = ((), )
--
sndUnit :: a -> (a, ())
sndUnit = (, ())
--
pairPair (a, b, c, d) = ((a, b), (c, d))
-- Construct fields decorated with output format details
genFields (w0, w1, w2, w3) =
let !(!gapStart, !gapEnd) = bestgap w0 w1 w2 w3
!f0 = makeF0 gapStart gapEnd w0
!f1 = makeF12 gapStart gapEnd 2# 3# w1
!f2 = makeF12 gapStart gapEnd 4# 5# w2
!f3 = makeF3 gapStart gapEnd w3
in ((f0, f1), (f2, f3))
makeF0 (I# gapStart) (I# gapEnd) !w =
case (gapEnd ==# 0#) `orI#` (gapStart ># 1#) of
1# -> HL w
_ -> case gapStart ==# 0# of
1# -> COL
_ -> HC w
{-# INLINE makeF0 #-}
makeF12 (I# gapStart) (I# gapEnd) il ir !w =
case (gapEnd <=# il) `orI#` (gapStart ># ir) of
1# -> CHL w
_ -> case gapStart >=# il of
1# -> case gapStart ==# il of
1# -> COL
_ -> CHC w
_ -> case gapEnd ==# ir of
0# -> NOP
_ -> CLO w
{-# INLINE makeF12 #-}
makeF3 (I# gapStart) (I# gapEnd) !w =
case gapEnd <=# 6# of
1# -> CHL w
_ -> case gapStart ==# 6# of
0# -> case gapEnd ==# 8# of
1# -> COL
_ -> CLO w
_ -> CC
{-# INLINE makeF3 #-}
-- | Unrolled and inlined calculation of the first longest
-- run (gap) of 16-bit aligned zeros in the input address.
--
bestgap :: Word32 -> Word32 -> Word32 -> Word32 -> (Int, Int)
bestgap !(W32# a0) !(W32# a1) !(W32# a2) !(W32# a3) =
finalGap
(updateGap (0xffff## `and#` (word32ToWordCompat# a3))
(updateGap (0xffff0000## `and#` (word32ToWordCompat# a3))
(updateGap (0xffff## `and#` (word32ToWordCompat# a2))
(updateGap (0xffff0000## `and#` (word32ToWordCompat# a2))
(updateGap (0xffff## `and#` (word32ToWordCompat# a1))
(updateGap (0xffff0000## `and#` (word32ToWordCompat# a1))
(updateGap (0xffff## `and#` (word32ToWordCompat# a0))
(initGap (0xffff0000## `and#` (word32ToWordCompat# a0))))))))))
where
-- The state after the first input word is always i' = 7,
-- but if the input word is zero, then also g=z=1 and e'=7.
initGap :: Word# -> Int#
initGap w = case w of { 0## -> 0x1717#; _ -> 0x0707# }
-- Update the nibbles of g|e'|z|i' based on the next input
-- word. We always decrement i', reset z on non-zero input,
-- otherwise increment z and check for a new best gap, if so
-- we replace g|e' with z|i'.
updateGap :: Word# -> Int# -> Int#
updateGap w g = case w `neWord#` 0## of
1# -> (g +# 0xffff#) `andI#` 0xff0f# -- g, e, 0, --i
_ -> let old = g +# 0xf# -- ++z, --i
zi = old `andI#` 0xff#
new = (zi `uncheckedIShiftL#` 8#) `orI#` zi
in case new ># old of
1# -> new -- z, i, z, i
_ -> old -- g, e, z, i
-- Extract gap start and end from the nibbles of g|e'|z|i'
-- where g is the gap width and e' is 8 minus its end.
finalGap :: Int# -> (Int, Int)
finalGap i =
let g = i `uncheckedIShiftRL#` 12#
in case g <# 2# of
1# -> (0, 0)
_ -> let e = 8# -# ((i `uncheckedIShiftRL#` 8#) `andI#` 0xf#)
s = e -# g
in (I# s, I# e)
{-# INLINE bestgap #-}
#if MIN_VERSION_base(4,16,0)
word32ToWordCompat# :: Word32# -> Word#
word32ToWordCompat# = word32ToWord#
wordToWord8Compat# :: Word# -> Word8#
wordToWord8Compat# = wordToWord8#
wordToWord16Compat# :: Word# -> Word16#
wordToWord16Compat# = wordToWord16#
#else
word32ToWordCompat# :: Word# -> Word#
word32ToWordCompat# x = x
wordToWord8Compat# :: Word# -> Word#
wordToWord8Compat# x = x
wordToWord16Compat# :: Word# -> Word#
wordToWord16Compat# x = x
#endif
| kazu-yamamoto/iproute | Data/IP/Builder.hs | bsd-3-clause | 10,407 | 0 | 25 | 3,457 | 2,719 | 1,511 | 1,208 | 184 | 22 |
{-# LANGUAGE ScopedTypeVariables #-}
-- | This module is unstable; functions are not guaranteed to be the same or even to exist in future versions
--
-- It is intended primarily for use by this library itself.
module Data.Bitmap.Util
( tablespoon
, subStr
, padByte
) where
import Control.Exception
import qualified Data.String.Class as S
import Data.Word
import System.IO.Unsafe (unsafePerformIO)
handlers :: [Handler (Either String a)]
handlers = [ Handler $ \(e :: ArithException) -> return . Left . show $ e
, Handler $ \(e :: ArrayException) -> return . Left . show $ e
, Handler $ \(e :: ErrorCall) -> return . Left . show $ e
, Handler $ \(e :: PatternMatchFail) -> return . Left . show $ e
, Handler $ \(e :: SomeException) -> throwIO e
]
-- | Hack to catch "pureish" asynchronous errors
--
-- This is only used as a workaround to the binary library's shortcoming of
-- using asynchronous errors instead of pure error handling, and also zlib's
-- same shortcoming.
--
-- This function is similar to the @spoon@ package's @teaspoon@ function,
-- except that it can return more information when an exception is caught.
tablespoon :: a -> Either String a
tablespoon x = unsafePerformIO $ (Right `fmap` evaluate x) `catches` handlers
-- | Return a substring
--
-- 'subStr' @index@ @length@ returns @length@ characters from the string
-- starting at @index@, which starts at 0.
--
-- > subStr 1 2 "abcd" == "bc"
subStr :: (S.StringCells s) => Int -> Int -> s -> s
subStr index length_ = S.take length_ . S.drop index
padByte :: Word8
padByte = 0x00
| bairyn/bitmaps | src/Data/Bitmap/Util.hs | bsd-3-clause | 1,643 | 0 | 10 | 365 | 342 | 199 | 143 | 21 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Views.Common.SEO where
import Control.Monad
import qualified Data.Text as T
import Data.Text.Lazy(Text)
import Data.String (fromString)
import qualified Text.Printf as PF
import Network.URI
import Text.Blaze.Html5((!))
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import qualified Utils.BlazeExtra.Attributes as EA
import Utils.URI.String
import Models.Schema
metaProperty p v =
H.meta ! property p ! A.content v
where
property = H.customAttribute "property"
metaName n v = H.meta ! A.name n ! A.content v
keywordsAndDescription keywords description = do
metaName "keywords" $ H.toValue keywords
metaName "description" $ H.toValue description
openGraph :: String -> String -> String -> H.Html
openGraph title url description = do
metaProperty "og:type" "website"
metaProperty "og:title" $ H.toValue title
metaProperty "og:url" $ H.toValue url
metaProperty "og:description" $ H.toValue description
canonical :: String -> H.Html
canonical url =
H.link ! A.rel "canonical" ! A.href ( H.toValue url)
gaEvent :: String-> String ->H.Attribute
gaEvent ev ct =
let
v = (PF.printf "ga('send', 'event', '%s', '%s');" ev ct) :: String
in
A.onclick $ H.toValue v
utmParams :: String -> String -> [(String,String)]
utmParams host name =
[("utm_source",host)
,("utm_campaign",name)
,("utm_medium","website")]
| DavidAlphaFox/sblog | src/Views/Common/SEO.hs | bsd-3-clause | 1,437 | 0 | 11 | 235 | 469 | 252 | 217 | 40 | 1 |
{-# LANGUAGE Safe, TypeFamilies #-}
module Data.Logic.Atom (
Atom, atom, unit
) where
import Control.Monad.Predicate
import Data.Logic.Term
import Data.Logic.Var
-- |A constant term.
newtype Atom a s = Atom a
instance Eq a => Term (Atom a) where
type Collapse (Atom a) = a
collapse (Atom x) = return x
unify (Atom x) (Atom y) = bool (x == y)
occurs _ _ = return False
-- |Constructs an atom.
atom :: Eq a => a -> Var (Atom a) s
atom = bind . Atom
-- |Synonym for @atom ()@.
unit :: Var (Atom ()) s
unit = atom ()
| YellPika/tlogic | src/Data/Logic/Atom.hs | bsd-3-clause | 539 | 0 | 9 | 127 | 212 | 115 | 97 | 16 | 1 |
{-|
Module : AERN2.Utils.Bench
Description : utilities for benchmarks
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : portable
-}
module AERN2.Utils.Bench
(
listFromGen
)
where
-- import Test.QuickCheck
import Test.QuickCheck.Random (mkQCGen)
import Test.QuickCheck.Gen (Gen(..))
import MixedTypesNumPrelude
-- import qualified Prelude as P
listFromGen :: Gen a -> [a]
listFromGen gen =
list
where
list =
concat $ map genSome [1..]
where
genSome size =
unGen (sequence $ replicate 10 gen) qcGen (int size)
qcGen = mkQCGen (int 148548830)
| michalkonecny/aern2 | aern2-mp/src/AERN2/Utils/Bench.hs | bsd-3-clause | 718 | 0 | 12 | 195 | 135 | 76 | 59 | 14 | 1 |
-- | The type of cave kinds. Every level in the game is an instantiated
-- cave kind.
module Game.LambdaHack.Content.CaveKind
( pattern DEFAULT_RANDOM
, CaveKind(..), InitSleep(..), makeData
#ifdef EXPOSE_INTERNAL
-- * Internal operations
, validateSingle, validateAll, mandatoryGroups
#endif
) where
import Prelude ()
import Game.LambdaHack.Core.Prelude
import qualified Data.Text as T
import Game.LambdaHack.Content.ItemKind (ItemKind)
import Game.LambdaHack.Content.PlaceKind (PlaceKind)
import qualified Game.LambdaHack.Content.RuleKind as RK
import Game.LambdaHack.Content.TileKind (TileKind)
import qualified Game.LambdaHack.Core.Dice as Dice
import Game.LambdaHack.Core.Random
import Game.LambdaHack.Definition.ContentData
import Game.LambdaHack.Definition.Defs
import Game.LambdaHack.Definition.DefsInternal
-- | Parameters for the generation of dungeon levels.
-- Warning: for efficiency, avoid embedded items in any of the common tiles.
data CaveKind = CaveKind
{ cname :: Text -- ^ short description
, cfreq :: Freqs CaveKind -- ^ frequency within groups
, cXminSize :: X -- ^ minimal X size of the whole cave
, cYminSize :: Y -- ^ minimal Y size of the whole cave
, ccellSize :: Dice.DiceXY -- ^ size of a map cell holding a place
, cminPlaceSize :: Dice.DiceXY -- ^ minimal size of places; for merging
, cmaxPlaceSize :: Dice.DiceXY -- ^ maximal size of places; for growing
, cdarkOdds :: Dice.Dice -- ^ the odds a place is dark
-- (level-scaled dice roll > 50)
, cnightOdds :: Dice.Dice -- ^ the odds the cave is dark
-- (level-scaled dice roll > 50)
, cauxConnects :: Rational -- ^ a proportion of extra connections
, cmaxVoid :: Rational
-- ^ at most this proportion of rooms may be void
, cdoorChance :: Chance -- ^ the chance of a door in an opening
, copenChance :: Chance -- ^ if there's a door, is it open?
, chidden :: Int -- ^ if not open, hidden one in n times
, cactorCoeff :: Int -- ^ the lower, the more monsters spawn
, cactorFreq :: Freqs ItemKind -- ^ actor groups to consider
, citemNum :: Dice.Dice -- ^ number of initial items in the cave
, citemFreq :: Freqs ItemKind -- ^ item groups to consider;
-- note that the groups are flattened; e.g., if an item is moved
-- to another included group with the same weight, the outcome
-- doesn't change
, cplaceFreq :: Freqs PlaceKind -- ^ place groups to consider
, cpassable :: Bool
-- ^ are passable default tiles permitted
, clabyrinth :: Bool -- ^ waste of time for AI to explore
, cdefTile :: GroupName TileKind -- ^ the default cave tile
, cdarkCorTile :: GroupName TileKind -- ^ the dark cave corridor tile
, clitCorTile :: GroupName TileKind -- ^ the lit cave corridor tile
, cwallTile :: GroupName TileKind -- ^ the tile used for @FWall@ fence
, ccornerTile :: GroupName TileKind -- ^ tile used for the fence corners
, cfenceTileN :: GroupName TileKind -- ^ the outer fence N wall
, cfenceTileE :: GroupName TileKind -- ^ the outer fence E wall
, cfenceTileS :: GroupName TileKind -- ^ the outer fence S wall
, cfenceTileW :: GroupName TileKind -- ^ the outer fence W wall
, cfenceApart :: Bool -- ^ are places touching fence banned
, cminStairDist :: Int -- ^ minimal distance between stairs
, cmaxStairsNum :: Dice.Dice -- ^ maximum number of stairs
, cescapeFreq :: Freqs PlaceKind -- ^ escape groups, if any
, cstairFreq :: Freqs PlaceKind -- ^ place groups for created stairs
, cstairAllowed :: Freqs PlaceKind -- ^ extra groups for inherited
, cskip :: [Int] -- ^ which faction starting positions to skip
, cinitSleep :: InitSleep -- ^ whether actors spawn sleeping
, cdesc :: Text -- ^ full cave description
}
deriving Show -- No Eq and Ord to make extending logically sound
data InitSleep = InitSleepAlways | InitSleepPermitted | InitSleepBanned
deriving (Show, Eq)
-- | Catch caves with not enough space for all the places. Check the size
-- of the cave descriptions to make sure they fit on screen. Etc.
validateSingle :: RK.RuleContent -> CaveKind -> [Text]
validateSingle corule CaveKind{..} =
let (minCellSizeX, minCellSizeY) = Dice.infDiceXY ccellSize
(maxCellSizeX, maxCellSizeY) = Dice.supDiceXY ccellSize
(minMinSizeX, minMinSizeY) = Dice.infDiceXY cminPlaceSize
(maxMinSizeX, maxMinSizeY) = Dice.supDiceXY cminPlaceSize
(minMaxSizeX, minMaxSizeY) = Dice.infDiceXY cmaxPlaceSize
in [ "cname longer than 25" | T.length cname > 25 ]
++ [ "cXminSize > RK.rWidthMax" | cXminSize > RK.rWidthMax corule ]
++ [ "cYminSize > RK.rHeightMax" | cYminSize > RK.rHeightMax corule ]
++ [ "cXminSize < 8" | cXminSize < 8 ]
++ [ "cYminSize < 8" | cYminSize < 8 ] -- see @focusArea@
++ [ "cXminSize - 2 < maxCellSizeX" | cXminSize - 2 < maxCellSizeX ]
++ [ "cYminSize - 2 < maxCellSizeY" | cYminSize - 2 < maxCellSizeY ]
++ [ "minCellSizeX < 2" | minCellSizeX < 2 ]
++ [ "minCellSizeY < 2" | minCellSizeY < 2 ]
++ [ "minCellSizeX < 4 and stairs"
| minCellSizeX < 4 && not (null cstairFreq) ]
++ [ "minCellSizeY < 4 and stairs"
| minCellSizeY < 4 && not (null cstairFreq) ]
-- The following four are heuristics, so not too restrictive:
++ [ "minCellSizeX < 6 && non-trivial stairs"
| minCellSizeX < 6 && not (length cstairFreq <= 1 && null cescapeFreq) ]
++ [ "minCellSizeY < 4 && non-trivial stairs"
| minCellSizeY < 4 && not (length cstairFreq <= 1 && null cescapeFreq) ]
++ [ "minMinSizeX < 5 && non-trivial stairs"
| minMinSizeX < 5 && not (length cstairFreq <= 1 && null cescapeFreq) ]
++ [ "minMinSizeY < 3 && non-trivial stairs"
| minMinSizeY < 3 && not (length cstairFreq <= 1 && null cescapeFreq) ]
++ [ "minMinSizeX < 1" | minMinSizeX < 1 ]
++ [ "minMinSizeY < 1" | minMinSizeY < 1 ]
++ [ "minMaxSizeX < maxMinSizeX" | minMaxSizeX < maxMinSizeX ]
++ [ "minMaxSizeY < maxMinSizeY" | minMaxSizeY < maxMinSizeY ]
++ [ "chidden < 0" | chidden < 0 ]
++ [ "cactorCoeff < 0" | cactorCoeff < 0 ]
++ [ "citemNum < 0" | Dice.infDice citemNum < 0 ]
++ [ "cmaxStairsNum < 0" | Dice.infDice cmaxStairsNum < 0 ]
++ [ "stairs suggested, but not defined"
| Dice.supDice cmaxStairsNum > 0 && null cstairFreq ]
-- | Validate all cave kinds.
-- Note that names don't have to be unique: we can have several variants
-- of a cave with a given name.
validateAll :: [CaveKind] -> ContentData CaveKind -> [Text]
validateAll _ _ = [] -- so far, always valid
-- * Mandatory item groups
mandatoryGroups :: [GroupName CaveKind]
mandatoryGroups =
[DEFAULT_RANDOM]
pattern DEFAULT_RANDOM :: GroupName CaveKind
pattern DEFAULT_RANDOM = GroupName "default random"
makeData :: RK.RuleContent
-> [CaveKind] -> [GroupName CaveKind] -> [GroupName CaveKind]
-> ContentData CaveKind
makeData corule content groupNamesSingleton groupNames =
makeContentData "CaveKind" cname cfreq (validateSingle corule) validateAll
content
groupNamesSingleton
(mandatoryGroups ++ groupNames)
| LambdaHack/LambdaHack | definition-src/Game/LambdaHack/Content/CaveKind.hs | bsd-3-clause | 7,660 | 0 | 34 | 2,054 | 1,349 | 778 | 571 | -1 | -1 |
-- |
-- Module : Data.Semiring.Properties
-- Copyright : Sebastian Fischer <mailto:[email protected]>
-- License : BSD3
--
-- This library provides properties for the 'Semiring' type class that
-- can be checked using libraries like QuickCheck or SmallCheck.
--
module Data.Semiring.Properties (
module Data.Semiring, module Data.Semiring.Properties
) where
import Data.Semiring
-- | > a .+. b == b .+. a
plus'comm :: Semiring s => s -> s -> Bool
plus'comm a b = a .+. b == b .+. a
-- | > zero .+. a == a
left'zero :: Semiring s => s -> Bool
left'zero a = zero .+. a == a
-- | > (a .+. b) .+. c == a .+. (b .+. c)
add'assoc :: Semiring s => s -> s -> s -> Bool
add'assoc a b c = (a .+. b) .+. c == a .+. (b .+. c)
-- | > one .*. a == a
left'one :: Semiring s => s -> Bool
left'one a = one .*. a == a
-- | > a .*. one == a
right'one :: Semiring s => s -> Bool
right'one a = a .*. one == a
-- | > (a .*. b) .*. c == a .*. (b .*. c)
mul'assoc :: Semiring s => s -> s -> s -> Bool
mul'assoc a b c = (a .*. b) .*. c == a .*. (b .*. c)
-- | > a .*. (b .+. c) == (a .*. b) .+. (a .*. c)
left'distr :: Semiring s => s -> s -> s -> Bool
left'distr a b c = a .*. (b .+. c) == (a .*. b) .+. (a .*. c)
-- | > (a .+. b) .*. c == (a .*. c) .+. (b .*. c)
right'distr :: Semiring s => s -> s -> s -> Bool
right'distr a b c = (a .+. b) .*. c == (a .*. c) .+. (b .*. c)
-- | > zero .*. a == zero
left'ann :: Semiring s => s -> Bool
left'ann a = zero .*. a == zero
-- | > a .*. zero == zero
right'ann :: Semiring s => s -> Bool
right'ann a = a .*. zero == zero
| sebfisch/haskell-regexp | src/Data/Semiring/Properties.hs | bsd-3-clause | 1,632 | 0 | 9 | 468 | 509 | 273 | 236 | 23 | 1 |
module Test.Cache(main) where
import Development.Shake
import Development.Shake.FilePath
import System.Directory
import Data.Char
import Test.Type
main = testBuild test $ do
vowels <- newCache $ \file -> do
src <- readFile' file
liftIO $ appendFile "trace.txt" "1"
pure $ length $ filter isDigit src
"*.out*" %> \x ->
writeFile' x . show =<< vowels (dropExtension x <.> "txt")
startCompiler <- newCache $ \() -> do
liftIO $ writeFile "compiler.txt" "on"
runAfter $ writeFile "compiler.txt" "off"
"*.lang" %> \out -> do
startCompiler ()
liftIO $ copyFile "compiler.txt" out
-- Bug fixed in https://github.com/ndmitchell/shake/pull/796
bug796_2 <- newCache $ \() -> do
readFile' "bug796.2"
"bug796" %> \out -> do
a <- readFile' "bug796.1"
b <- bug796_2 ()
writeFile' out $ a ++ b
test build = do
build ["clean"]
writeFile "trace.txt" ""
writeFile "vowels.txt" "abc123a"
build ["vowels.out1","vowels.out2","-j3","--sleep"]
assertContents "trace.txt" "1"
assertContents "vowels.out1" "3"
assertContents "vowels.out2" "3"
build ["vowels.out2","-j3"]
assertContents "trace.txt" "1"
assertContents "vowels.out1" "3"
writeFile "vowels.txt" "12xyz34"
build ["vowels.out2","-j3","--sleep"]
assertContents "trace.txt" "11"
assertContents "vowels.out2" "4"
build ["vowels.out1","-j3","--sleep"]
assertContents "trace.txt" "111"
assertContents "vowels.out1" "4"
build ["foo.lang","bar.lang"]
assertContents "foo.lang" "on"
assertContents "compiler.txt" "off"
writeFile "compiler.txt" "unstarted"
build ["foo.lang","bar.lang"]
assertContents "compiler.txt" "unstarted"
writeFile "bug796.1" "a"
writeFile "bug796.2" "b"
build ["bug796", "--sleep"]
assertContents "bug796" "ab"
writeFile "bug796.1" "A"
build ["bug796", "--sleep"]
assertContents "bug796" "Ab"
writeFile "bug796.2" "B"
build ["bug796", "--sleep"]
assertContents "bug796" "AB"
| ndmitchell/shake | src/Test/Cache.hs | bsd-3-clause | 2,096 | 0 | 14 | 484 | 607 | 275 | 332 | 59 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
-- |
-- Module: $HEADER$
-- Description: TODO
-- Copyright: (c) 2016 Peter TrΕ‘ko
-- License: BSD3
--
-- Stability: experimental
-- Portability: GHC specific language extensions.
--
-- TODO
module Data.DHT.DKS.Type.Message.UpdateSuccessorAck
( UpdateSuccessorAck(..)
)
where
import Data.Eq (Eq)
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import Text.Show (Show)
import Data.Default.Class (Default(def))
import Data.OverloadedRecords.TH (overloadedRecord)
import Data.DHT.DKS.Type.Hash (DksHash)
data UpdateSuccessorAck = UpdateSuccessorAck
{ _requester :: !DksHash
, _oldSuccessor :: !DksHash
, _successor :: !DksHash
}
deriving (Eq, Generic, Show, Typeable)
overloadedRecord def ''UpdateSuccessorAck
| FPBrno/dht-dks | src/Data/DHT/DKS/Type/Message/UpdateSuccessorAck.hs | bsd-3-clause | 1,039 | 0 | 9 | 167 | 175 | 113 | 62 | 29 | 0 |
--
-- A very simple example application using System.MIDI.
-- It's a basic MIDI monitor: prints all the incoming messages.
--
module Main where
--------------------------------------------------------------------------------
import Control.Monad
import Control.Concurrent
import System.MIDI
import System.MIDI.Utility
--------------------------------------------------------------------------------
-- the essence
mythread conn = do
events <- getEvents conn
mapM_ print events
(threadDelay 5000)
mythread conn
--------------------------------------------------------------------------------
-- main
main = do
src <- selectInputDevice "Select midi input device" Nothing
conn <- openSource src Nothing
putStrLn "connected"
threadid <- forkIO (mythread conn)
start conn ; putStrLn "started. Press 'ENTER' to exit."
getLine
stop conn ; putStrLn "stopped."
killThread threadid
close conn ; putStrLn "closed."
| chpatrick/hmidi | examples/monitor.hs | bsd-3-clause | 976 | 0 | 10 | 171 | 174 | 82 | 92 | 20 | 1 |
-- {-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
-- {-# LANGUAGE MultiParamTypeClasses #-}
-- {-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE RecordWildCards #-}
-- {-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- {-# LANGUAGE MultiWayIf #-}
-- {-# LANGUAGE OverloadedStrings #-}
-- {-# LANGUAGE RecordWildCards #-}
-- {-# OPTIONS_GHC -Wall #-}
-- {-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- {-# OPTIONS_GHC -fno-warn-orphans #-}
-- {-# OPTIONS_GHC -fno-warn-missing-signatures #-}
-- {-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
-- {-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
-- {-# OPTIONS_GHC -fno-warn-incomplete-uni-patterns #-}
-- {-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# LANGUAGE CPP #-}
-- {-# OPTIONS_GHC -cpp -DPiForallInstalled #-}
-- |
-- Copyright : (c) Andreas Reuleaux 2015
-- License : BSD2
-- Maintainer: Andreas Reuleaux <[email protected]>
-- Stability : experimental
-- Portability: non-portable
--
-- basic refactorings: renaming,
-- built upon the zipper navigation in the syntax tree,
{-
usage
renaming of simple expressions
runExcept $ renameExpr "y" "YY" $ lam "y" $ V "x"
* in the simpler Either monad, like so:
pp $ fromRight' $ (ezipper $ Mod m) >>= navigate [Decl 1, Rhs, Binding]
* or in the the Refactor monad, which has state as well
pp $ fromRight' $ refactor $ (rzipper $ Mod m) >>= navigate [Decl 1, Rhs, Binding] >>= rename "a" "b"
one would just navigate to the relevant piece of the syntax tree, like
-- top make the tree, list pair a zipper here (for printing the top lvl)
pp $ fromRight' $ refactor $ (rzipper $ Mod m) >>= top >>= rename "a" "b"
**
next things TODO:
see if renaming Zero to Z works - I doubt it - but make it work
-}
module Pire.Refactor.Refactor
(
module Pire.Refactor.Refactor
)
where
import Pire.Syntax.Expr
import Pire.Syntax.GetNm
-- import Pire.Syntax.Binder
import Pire.Syntax.MkVisible
import Pire.Syntax.Smart
import Pire.Syntax.Decl
import Pire.Syntax.Modules
import Pire.Syntax.Nm
-- import Pire.Modules
-- import Pire.Untie
import Pire.Syntax.Telescope
import Pire.Syntax.Constructor
import Pire.Refactor.Navigation
import Pire.Pretty.Common
import Pire.Utils
import Bound
import Bound.Term
#ifdef MIN_VERSION_GLASGOW_HASKELL
#if MIN_VERSION_GLASGOW_HASKELL(7,10,3,0)
-- ghc >= 7.10.3
-- import Control.Monad.Except
#else
-- older ghc versions, but MIN_VERSION_GLASGOW_HASKELL defined
#endif
#else
-- MIN_VERSION_GLASGOW_HASKELL not even defined yet (ghc <= 7.8.x)
import Control.Applicative
#endif
import Control.Monad.Except
import Control.Lens
-- import System.IO.Silently
import Pire.Syntax.Pattern
-- import Data.Bitraversable
-- import Data.Bifunctor
-- import Control.Monad.Trans.Either
import Pire.Forget (forgetExp, forgetMatch)
import Control.Monad.State.Strict
import Debug.Trace
#ifdef PiForallInstalled
-- import PiForall.Environment
-- import PiForall.TypeCheck
#endif
#ifdef DocTest
-- fromRight'
import Data.Either.Combinators
import Pire.Syntax.Eps
import Pire.NoPos
import Pire.Text2String (t2s)
import Pire.Parser.ParseUtils (parse, module_)
import Pire.Modules (getModules_)
import Pire.Refactor.LineColumnNavigation (lineColumn)
import Pire.Refactor.ForgetZipper (forgetZ)
import Pire.Parser.Expr (expr)
#endif
-- --------------------------------------------------
-- renaming exprs
{-|
see if a variable is a binding variable in an expression
>>> "x" `isBindingVarIn` (lam "x" $ V "y")
True
>>> "y" `isBindingVarIn` (lam "x" $ V "y")
False
The next example (cf. @samples/Nat.pi@) - seems fine to me (but test coverage is
maybe not exhaustive enough, think eg. of some case, where we would
have to recurse into the rhs of a match: @Zero -> \\x. n@, ie. recurse
into the @lambda: \\x. n@ then, etc.) let @c@ be a case expression:
@
case n of
Zero -> Zero
Succ n' -> n'
@
>>> let c = Case (V "n") [Match (PatCon "Zero" []) (Scope (DCon "Zero" [] (Annot Nothing))),Match (PatCon "Succ" [(RuntimeP, PatVar "n'")]) (Scope (V (B 0)))] (Annot Nothing)
>>> "y" `isBindingVarIn` c
False
OK?
>>> "Succ" `isBindingVarIn` c
False
>>> "n'" `isBindingVarIn` c
True
-}
-- Show a at least for debugging with Debug.Trace.trace below
-- isBindingVarIn :: (Eq a, Disp (Expr a a)) => a -> Expr a a -> Bool
isBindingVarIn :: (Eq a, MkVisible a, Disp (Expr a a), Show a) => a -> Expr a a -> Bool
_ `isBindingVarIn` (V _) = False
y `isBindingVarIn` (Ws_ v _) = y `isBindingVarIn` v
-- do we really need this ?
y `isBindingVarIn` (BndV _ ex) = y `isBindingVarIn` ex
_ `isBindingVarIn` (Nat _) = False
_ `isBindingVarIn` (Nat_ {}) = False
y `isBindingVarIn` (l :@ r) = y `isBindingVarIn` l || y `isBindingVarIn` r
y `isBindingVarIn` (Lam y' sc)
| y == y' = True
| otherwise = y `isBindingVarIn` (instantiate1 (V y') sc)
y `isBindingVarIn` (Lam_ _ bndr _ sc)
| y == y' = True
-- -- | otherwise = y `isBindingVarIn` (instantiate1 (Ws_ (V y') $ Ws "") sc)
| otherwise = y `isBindingVarIn` (instantiate1 (V y') sc)
where y' = name' bndr
y `isBindingVarIn` (LamPAs ns sc)
| y `elem` ns' = True
| otherwise = y `isBindingVarIn` (instantiate (\i -> V $ ns !! i ^. _2) sc)
where ns' = (^. _2) <$> ns
y `isBindingVarIn` (LamPAs_ _ ns _ sc)
| y `elem` ns' = True
| otherwise = y `isBindingVarIn` (instantiate (\i -> V $ ns !! i ^. _2 & name') sc)
where ns' = name' . (^. _2) <$> ns
-- y `isBindingVarIn` (Lam' y' sc)
-- | y == y' = True
-- | otherwise = y `isBindingVarIn` (instantiate1 (V y') sc)
y `isBindingVarIn` (Position _ ex) = y `isBindingVarIn` ex
y `isBindingVarIn` (Paren ex) = y `isBindingVarIn` ex
y `isBindingVarIn` (Paren_ _ ex _) = y `isBindingVarIn` ex
-- TODO: rethink, if this is what we really want!
y `isBindingVarIn` (Case ex matches _)
| y `isBindingVarIn` ex = True
-- -- | y `elem` ns' = True
| y `elem` ns' = False
| y `elem` ns'' = True
-- -- | otherwise = False
| otherwise = any (\match -> y `isThisBindingVarInMatch` match) matches
where
ps = [p | (Match p _) <- matches]
-- scopes = [s | (Match _ s) <- matches]
-- ns' eg. ["Zero", "Succ"]
ns' = name' <$> ps
-- ns'' eg. ["n'"] given some "Succ n'" - rethink
-- ns'' = (name' <$>) $ (fst <$>) $ concat $ argPatterns <$> ps
ns'' = (name' <$>) $ (snd <$>) $ concat $ argPatterns <$> ps
-- instantiate the scope - cf pretty printing of Match
-- maybe this helper function should be more powerful, and cover some of the case above ?
-- c `isThisBindingVarInMatch` (Match p sc) = c `isBindingVarIn` instantiate (\i -> V $ (argPatterns p) !! i ^. _1 & name') sc
c `isThisBindingVarInMatch` (Match p sc) = c `isBindingVarIn` instantiate (\i -> V $ (argPatterns p) !! i ^. _2 & name') sc
-- is this too simple, maybe ?
y `isBindingVarIn` c@(Case_ {}) = y `isBindingVarIn` (forgetExp c)
-- needed, even for the Case doctest cases above
-- but rethink/refine !
-- too simple ?
-- y `isBindingVarIn` (TCon tm args) = False
_ `isBindingVarIn` (TCon {}) = False
_ `isBindingVarIn` (TCon_ {}) = False
_ `isBindingVarIn` (DCon {}) = False
_ `isBindingVarIn` (DCon_ {}) = False
_ `isBindingVarIn` (LitBool {}) = False
_ `isBindingVarIn` (LitBool_ {}) = False
_ `isBindingVarIn` (TyBool {}) = False
_ `isBindingVarIn` (TyBool_ {}) = False
_ `isBindingVarIn` (Refl {}) = False
_ `isBindingVarIn` (Refl_ {}) = False
-- rethink !
y `isBindingVarIn` (Subst ex1 ex2 _) = y `isBindingVarIn` ex1 || y `isBindingVarIn` ex2
y `isBindingVarIn` (Subst_ _ ex1 _ ex2 _) = y `isBindingVarIn` ex1 || y `isBindingVarIn` ex2
y `isBindingVarIn` (Ann ex1 ex2) = y `isBindingVarIn` ex1 || y `isBindingVarIn` ex2
y `isBindingVarIn` (Ann_ ex) = y `isBindingVarIn` ex
y `isBindingVarIn` (TyEq ex1 ex2) = y `isBindingVarIn` ex1 || y `isBindingVarIn` ex2
y `isBindingVarIn` (TyEq_ ex1 _ ex2) = y `isBindingVarIn` ex1 || y `isBindingVarIn` ex2
y `isBindingVarIn` (Let y' ex sc)
| y == y' = True
| y `isBindingVarIn` ex = True
| otherwise = y `isBindingVarIn` (instantiate1 (V y') sc)
-- too simple ?
y `isBindingVarIn` l@(Let_ {}) = y `isBindingVarIn` (forgetExp l)
y `isBindingVarIn` (PiP _ nm ex sc)
| y == nm = True
| y `isBindingVarIn` ex = True
| otherwise = y `isBindingVarIn` (instantiate1 (V nm) sc)
y `isBindingVarIn` (PiP_ _ ex _ sc)
| y == nm = True
| y `isBindingVarIn` ex = True
| otherwise = y `isBindingVarIn` (instantiate1 (V nm) sc)
where nm = name' ex
-- too simple ?
_ `isBindingVarIn` (InferredAnnBnd_ {}) = False
-- too simple ?
_ `isBindingVarIn` (WitnessedAnnBnd_ {}) = False
_ `isBindingVarIn` (WitnessedAnnEx_ {}) = False
y `isBindingVarIn` (Brackets_ _ ex _) = y `isBindingVarIn` ex
_ `isBindingVarIn` (Type_ {}) = False
_ `isBindingVarIn` (Type {}) = False
y `isBindingVarIn` (Contra ex _) = y `isBindingVarIn` ex
y `isBindingVarIn` (Contra_ _ ex _) = y `isBindingVarIn` ex
-- _ `isBindingVarIn` ex = error $ "isBindingVarIn, missing..." ++ ppS ex
_ `isBindingVarIn` ex = trace (show ex) $ error $ "isBindingVarIn, missing..." ++ ppS ex
{-|
helper function, hidden in a where clause in the above @isBindingVarIn@ already, but easier to test separately here
-}
c `isBindingVarInMatch` (Match p sc) = c `isBindingVarIn` instantiate (\i -> V $ (argPatterns p) !! i ^. _1 & name') sc
c `isBindingVarInMatch` m@(Match_ {}) = c `isBindingVarInMatch` (forgetMatch m)
{-|
helper function to create expressions in the
@Either RefactorError@ monad, for convenience in the ghci / cabal repl,
could just use @Right@ instead, but would need
@-XFlexibleContexts@ then
-}
eexpr :: t -> Either RefactorError t
eexpr t = Right t
{-|
renaming expressions, with the simple @renameExpr'@ function (takes just an expression):
>>> renameExpr' "x" "z" $ lam "y" $ V "x"
Right (Lam "y" (Scope (V (F (V "z")))))
or with @renameExpr@ in the @Refactoring@ monad (cf. below):
>>> (eexpr $ lam "y" $ V "x") >>= renameExpr "x" "z"
Right (Lam "y" (Scope (V (F (V "z")))))
>>>
continuing with the simpler @renameExpr'@:
>>> renameExpr' "a" "x" $ lam "y" $ V "x"
Left (NameCaptureFV "x" "\\y . x")
>>> renameExpr' "x" "y" $ lam "y" $ V "x"
Left (NameCaptureBindingVar "y" "\\y . x")
>>> let l = lam "a" $ lam "b" $ lam "c" $ lam "y" $ V "x"
>>> pp l
\a . \b . \c . \y . x
no effect if @y@ is bound
>>> pp $ fromRight' $ renameExpr' "y" "YY" $ l
\a . \b . \c . \y . x
works fine if @x@ is free
>>> pp $ fromRight' $ renameExpr' "x" "zzz" $ l
\a . \b . \c . \y . zzz
detect name capture deep down inside
>>> renameExpr' "x" "b" $ l
Left (NameCaptureBindingVar "b" "\\a . \\b . \\c . \\y . x")
another example:
>>> let l' = lam "a" $ V "foo" :@ (lam "c" $ V "a" :@ V "c")
>>> pp l'
\a . foo (\c . a c)
>>> renameExpr' "a" "c" $ l'
Left (NameCaptureBindingVar "c" "\\a . foo (\\c . a c)")
further example, similar to the above, but parsed and not desugared: @LamPAs@
>>> let l = nopos $ t2s $ parse expr "\\a b . \\c . \\y . x"
>>> l
LamPAs [(RuntimeP,"a",Annot Nothing),(RuntimeP,"b",Annot Nothing)] (Scope (LamPAs [(RuntimeP,"c",Annot Nothing)] (Scope (LamPAs [(RuntimeP,"y",Annot Nothing)] (Scope (V (F (V (F (V (F (V "x"))))))))))))
>>> renameExpr' "a" "c" $ l
Left (NameCaptureBindingVar "c" "\\a b . \\c . \\y . x")
-}
-- Show a at least for debugging `isBindingVar` with Debug.Trace
renameExpr' :: (Disp (Expr a a), Disp a, Eq a, MkVisible a, Show a) =>
a -> a -> Expr a a -> Either RefactorError (Expr a a)
renameExpr' old new lambda@(Lam x sc)
| new `elem` fv sc = Left $ NameCaptureFV (ppS new) (ppS lambda)
| new `isBindingVarIn` lambda = Left $ NameCaptureBindingVar (ppS new) (ppS lambda)
| otherwise, x == old = return $ Lam new sc'
| otherwise, x /= old = return $ Lam x sc'
where (Lam _ sc') = substituteVar old new lambda
renameExpr' old new lambda@(LamPAs_ lamtok xs dot' sc)
-- | new `elem` fv sc = throwError $ NameCaptureFV $ ppS newlam
| new `elem` fv sc = Left $ NameCaptureFV (ppS new) (ppS lambda)
| new `isBindingVarIn` lambda = Left $ NameCaptureBindingVar (ppS new) (ppS lambda)
-- -- | otherwise, old `elem` [x ^. _2 & name' | x <- xs] = return $ LamPAs_ lamtok xsnew dot' sc'
-- -- | otherwise, (not $ old `elem` [x ^. _2 & name' | x <- xs]) = return $ LamPAs_ lamtok xs dot' sc'
-- [July 2016] getting rid of this tracing
-- -- | otherwise, old `elem` [x ^. _2 & name' | x <- xs] = trace "#1" $ return $ bimap (\tt -> if tt==old then new else tt) (\tt -> if tt==old then new else tt) $ LamPAs_ lamtok xsnew dot' sc'
-- -- | otherwise, (not $ old `elem` [x ^. _2 & name' | x <- xs]) = trace "#2" $ return $ bimap (\tt -> if tt==old then new else tt) (\tt -> if tt==old then new else tt) $ LamPAs_ lamtok xs dot' sc'
-- -- this would benefit from a helper function (\tt -> if tt==old then new else tt) I guess
| otherwise, old `elem` [x ^. _2 & name' | x <- xs] = return $ bimap (\tt -> if tt==old then new else tt) (\tt -> if tt==old then new else tt) $ LamPAs_ lamtok xsnew dot' sc'
| otherwise, (not $ old `elem` [x ^. _2 & name' | x <- xs]) = return $ bimap (\tt -> if tt==old then new else tt) (\tt -> if tt==old then new else tt) $ LamPAs_ lamtok xs dot' sc'
where
sc' = scopepl $ substituteVar old new lambda
-- using lens to short cut
-- xsnew = [(eps, if name' bndr == old then bndr `replaceInBndr` new else bndr, ann) | (eps, bndr, ann) <- xs]
xsnew = [over _2 (\b -> if name' b == old then b `replaceInBndr` new else b) x | x <- xs]
bndr `replaceInBndr` n = fmap (\tt -> if tt==old then n else tt) bndr
-- renameExpr' old new l@(Lam' x sc)
-- | new `elem` fv sc = Left $ NameCaptureFV (ppS new) (ppS l)
-- | new `isBindingVarIn` l = Left $ NameCaptureBindingVar (ppS new) (ppS l)
-- | otherwise, x == old = return $ Lam' new sc'
-- | otherwise, x /= old = return $ Lam' x sc'
-- where (Lam' _ sc') = substituteVar old new l
renameExpr' old new expr@(Case {})
| new `isBindingVarIn` expr = Left $ NameCaptureBindingVar (ppS new) (ppS expr)
-- -- | otherwise = return $ expr'
-- -- | otherwise = trace (show "# renameExpr'...(Case {}), in otherwise") $ return $ Case ex' (renameMatch old new <$> matches') annot'
| otherwise = return $ Case ex' (renameMatch old new <$> matches') annot'
where
-- expr' = substituteVar old new expr
(Case ex' matches' annot') = substituteVar old new expr
renameExpr' old new expr
| new `isBindingVarIn` expr = Left $ NameCaptureBindingVar (ppS new) (ppS expr)
| otherwise = return $ expr'
where expr' = substituteVar old new expr
-- Show at least for debugging with Debug.Trace.trace below
{-|
we want @renameExpr@ to work in whatever monad, just requiring the @Refactoring@ interface,
(so @renameZ'@/@renameZ@ can be in line with the other navigation functions)
for examples of its usage cf. the simpler function @renameExpr'@ above
-}
-- not sure if it is a good idea (if we really need) to make renameExpr
-- rely on the simpler renameExpr' function, that works just in Either ?
-- anyway, it makes the doctests above simpler:
-- we *can* use
-- (eexpr $ V "a" :@ V "b") >>= renameExpr "a" "xx"
-- but we can just as well use the simpler:
-- renameExpr' "a" "xx" $ V "a" :@ V "b"
renameExpr :: (Refactoring m, Disp (Expr a a), Disp a, Eq a, MkVisible a, Show a) =>
a -> a -> Expr a a -> m (Expr a a)
renameExpr old new ex
| Left l <- renameExpr' old new ex = rthrow l
| Right r <- renameExpr' old new ex = rsucceed r
-- --------------------------------------------------
-- renaming decls
{-|
rename in declarations, the @Nat@ at position 19 8 of @pitestfiles/Lec3.pi@ to @Natural@ eg., as used throughout, and in (data type) decl 18 in particular:
>>> (pp . fromRight' <$>) $ module_ "pitestfiles/Lec3.pi" >>= \m -> return $ (ezipper $ Mod $ t2s $ nopos m) >>= lineColumn 19 8 >>= repr
Parsing File "pitestfiles/Lec3.pi"
Nat
>>> (pp . fromRight' <$>) $ module_ "pitestfiles/Lec3.pi" >>= \m -> return $ (ezipper $ Mod $ t2s $ nopos m) >>= lineColumn 19 8 >>= upToBinding2 >>= \(old, z) -> renameZ old "Natural" z >>= toDecl 18 >>= repr
Parsing File "pitestfiles/Lec3.pi"
data Beautiful (n : Natural) : Type where
B0 of [n = 0]
B3 of [n = 3]
B5 of [n = 5]
Bsum of (m1:Natural)(m2:Natural)(Beautiful m1)(Beautiful m2)[n = plus m1 m2]
<BLANKLINE>
<BLANKLINE>
likewise @Zero@ occurs at position 21 3 in the context of the definition of @plus@:
>>> (pp . fromRight' <$>) $ module_ "pitestfiles/Lec3.pi" >>= \m -> return $ (ezipper $ Mod $ t2s $ nopos m) >>= lineColumn 21 3 >>= repr
Parsing File "pitestfiles/Lec3.pi"
Zero
>>> (pp . fromRight' <$>) $ module_ "pitestfiles/Lec3.pi" >>= \m -> return $ (ezipper $ Mod $ t2s $ nopos m) >>= lineColumn 21 3 >>= navigate [Up, Up, Up, Up, Up] >>= repr
Parsing File "pitestfiles/Lec3.pi"
plus = \ x y. case x of
Zero -> y
Succ x' -> Succ (plus x' y)
<BLANKLINE>
<BLANKLINE>
and we can rename @Nat@ to @Natural@ again (on the module level), and turn our attention to decl 3, where the naturals are actually defined,
either in white space aware or abstract syntax (forgetZ):
>>> (pp . fromRight' <$>) $ module_ "pitestfiles/Lec3.pi" >>= \m -> return $ (ezipper $ Mod $ t2s $ nopos m) >>= lineColumn 21 3 >>= upToBinding2 >>= \(old, z) -> renameZ old "Natural" z >>= toDecl 3 >>= repr
Parsing File "pitestfiles/Lec3.pi"
data Nat : Type where
Natural
Succ of (Nat)
<BLANKLINE>
<BLANKLINE>
>>> (pp . fromRight' <$>) $ module_ "pitestfiles/Lec3.pi" >>= \m -> return $ (ezipper $ Mod $ t2s $ nopos m) >>= lineColumn 21 3 >>= upToBinding2 >>= \(old, z) -> forgetZ z >>= renameZ old "Natural" >>= toDecl 3 >>= repr
Parsing File "pitestfiles/Lec3.pi"
data Nat : Type where
Natural
Succ of (_1 : Nat)
-}
-- Show at least for debugging with Debug.Trace.trace below
renameDecl :: (Refactoring m, Disp (Expr a a), Disp a, Eq a, MkVisible a, Show a) =>
a -> a -> Decl a a -> m (Decl a a)
renameDecl old new (Def x expr)
| x == old = do {
; expr' <- renameExpr old new expr
; return $ Def new expr'
}
| otherwise = do {
; expr' <- renameExpr old new expr
; return $ Def x expr'
}
renameDecl old new (Def_ nm eq expr)
| name' nm == old = do {
; expr' <- renameExpr old new expr
; return $ Def_ (Nm1_ new ws) eq expr'
}
| otherwise = do {
; expr' <- renameExpr old new expr
; return $ Def_ nm eq expr'
}
where (Nm1_ _ ws) = nm
-- recall:
-- signature decls
-- eg. foo : bar
renameDecl old new (Sig nm ex)
| nm == old = do {
; expr' <- renameExpr old new ex
; return $ Sig new expr'
}
| otherwise = do {
; expr' <- renameExpr old new ex
; return $ Sig nm expr'
}
renameDecl old new (Sig_ nm colontok ex)
| name' nm == old = do {
; expr' <- renameExpr old new ex
; return $ Sig_ (Nm1_ new ws) colontok expr'
}
| otherwise = do {
; expr' <- renameExpr old new ex
; return $ Sig_ nm colontok expr'
}
where (Nm1_ _ ws) = nm
{-
data type decls, eg.
data Nat : Type where
Zero
Succ of (Nat)
-}
-- start out w/ no renaming at all
-- renameDecl _ _ d@(Data {}) = return d
-- renameDecl _ _ d@(Data_ {}) = return d
renameDecl old new (Data tt tele constrdefs) =
-- Data <$> (pure $ if tt == old then new else tt) <*> (renameTele old new tele) <*> pure constrdefs
Data <$> (pure $ if tt == old then new else tt) <*> (renameTele old new tele) <*> pure (renameConstructorDef old new <$> constrdefs)
renameDecl old new (Data_ datatok nm tele colontok ex wheretoken maybo constrdefsAndMaySemiCola maybc ) =
Data_
<$> pure datatok
<*> (pure $ if name' nm == old then (Nm1_ new ws) else nm)
<*> (renameTele old new tele)
<*> pure colontok
<*> pure ex
<*> pure wheretoken
<*> pure maybo
-- need to do renaming in constructor defs as well
<*> pure [(renameConstructorDef old new cd, semi) | (cd, semi) <- constrdefsAndMaySemiCola]
<*> pure maybc
where
(Nm1_ _ ws) = nm
-- TODO to be completed for the remaining constraint constructors
-- -- | ConsWildInParens_ Eps (Token 'ParenOpenTy t) (Binder t) (Expr t a) (Token 'ParenCloseTy t) (Telescope t a)
-- -- | ConsInBrackets_ Eps (Token 'BracketOpenTy t) (Nm t) (Token 'ColonTy t) (Expr t a) (Token 'BracketCloseTy t) (Telescope t a)
-- -- -- need this as well - cf. equal_
-- -- -- should keep = as well
-- -- | Constraint_ (Token 'BracketOpenTy t) (Expr t a) (Token 'EqualTy t) (Expr t a) (Token 'BracketCloseTy t) (Telescope t a)
renameTele _ _ EmptyTele = pure EmptyTele
renameTele old new (Cons eps tt ex tele) =
Cons <$> pure eps <*> pure (if tt==old then new else tt) <*> (renameExpr old new ex) <*> (renameTele old new tele)
renameTele old new (ConsInParens_ eps po nm col ex pc tele) =
ConsInParens_
<$> pure eps
<*> pure po
<*> (pure $ if name' nm == old then (Nm1_ new ws) else nm)
<*> pure col
<*> (renameExpr old new ex)
<*> pure pc
<*> (renameTele old new tele)
where (Nm1_ _ ws) = nm
renameTele old new (Constraint ex1 ex2 tele) =
Constraint <$> (renameExpr old new ex1) <*> (renameExpr old new ex2) <*> (renameTele old new tele)
renameConstructorDef old new cd@(ConstructorDef {}) =
bimap (\tt -> if tt==old then new else tt) (\somea -> if somea==old then new else somea) cd
renameConstructorDef old new cd@(ConstructorDef' {}) =
bimap (\tt -> if tt==old then new else tt) (\somea -> if somea==old then new else somea) cd
renameConstructorDef old new cd@(ConstructorDef_ {}) =
bimap (\tt -> if tt==old then new else tt) (\somea -> if somea==old then new else somea) cd
renameConstructorDef old new cd@(ConstructorDef'_ {}) =
bimap (\tt -> if tt==old then new else tt) (\somea -> if somea==old then new else somea) cd
{-|
rename in matches, looks fine to me:
>>> renameMatch "Zero" "ZZ" <$> [Match (PatCon "Zero" []) (Scope (LitBool True)),Match (PatCon "Succ" [(RuntimeP, PatVar "n")]) (Scope (LitBool False))]
[Match (PatCon "ZZ" []) (Scope (LitBool True)),Match (PatCon "Succ" [(RuntimeP,PatVar "n")]) (Scope (LitBool False))]
and in @Lec3.pi@, at position 21 5, there is @Zero@
>>> (pp . fromRight' <$>) $ module_ "pitestfiles/Lec3.pi" >>= \m -> return $ (ezipper $ Mod $ t2s $ nopos m) >>= lineColumn 21 5 >>= focus
Parsing File "pitestfiles/Lec3.pi"
Zero
this is defined at the module level (somewhere up the syntax tree ie.: @upToBinding2@), and used in the case expression of decl 5 as well:
>>> (pp . fromRight' <$>) $ module_ "pitestfiles/Lec3.pi" >>= \m -> return $ (ezipper $ Mod $ t2s $ nopos m) >>= lineColumn 21 5 >>= upToBinding2 >>= \(old, z) -> forgetZ z >>= toDecl 5 >>= focus
Parsing File "pitestfiles/Lec3.pi"
is_zero = \x .
case x of
Zero -> True
(Succ n) -> False
now renaming @Zero@ to @ZZ@ works fine:
>>> (pp . fromRight' <$>) $ module_ "pitestfiles/Lec3.pi" >>= \m -> return $ (ezipper $ Mod $ t2s $ nopos m) >>= lineColumn 21 5 >>= upToBinding2 >>= \(old, z) -> forgetZ z >>= toDecl 5 >>= renameZ old "ZZ" >>= focus
Parsing File "pitestfiles/Lec3.pi"
is_zero = \x .
case x of
ZZ -> True
(Succ n) -> False
-}
renameMatch old new cd@(Match {}) =
bimap (\tt -> if tt==old then new else tt) (\somea -> if somea==old then new else somea) cd
renameMatch old new cd@(Match_ {}) =
bimap (\tt -> if tt==old then new else tt) (\somea -> if somea==old then new else somea) cd
-- --------------------------------------------------
-- rename in the zipper
-- similar to upToBinding'/upToBinding
-- two function here:
-- renameZ takes an old and a new name
-- renameZ' just takes a new name (and gets the old name from the state)
{-|
@renameZ old new z@
rename an @old@ name to a @new@ new in zipper @z@.
this function does not require @MonadState (RefactorState a) m@
(but two names: the @old@ one, as well as the @new@ one)
some examples, starting with the deliberatly simple @Test.pi@ module:
>>> tst <- (runExceptT $ getModules_ ["samples"] "Test") >>= return . last . fromRight'
Parsing File "samples/Nat.pi"
Parsing File "samples/Nat.pi"
Parsing File "samples/Sample.pi"
Parsing File "samples/Test.pi"
>>> pp $ fromRight' $ refactor $ (rzipper $ Mod $ t2s $ tst) >>= lineColumn 21 9 >>= repr
a
see what @a@ we are talking about (at position 21 9), navigating there step by step:
>>> pp $ fromRight' $ refactor $ (rzipper $ Mod $ t2s $ tst) >>= navigate [Decl 6] >>= repr
j = \y . a (\a . x (\a . a))
<BLANKLINE>
>>> pp $ fromRight' $ refactor $ (rzipper $ Mod $ t2s $ tst) >>= navigate [Decl 6, Rhs] >>= repr
\y . a (\a . x (\a . a))
<BLANKLINE>
>>> pp $ fromRight' $ refactor $ (rzipper $ Mod $ t2s $ tst) >>= navigate [Decl 6, Rhs, Rhs] >>= repr
a (\a . x (\a . a))
<BLANKLINE>
>>> pp $ fromRight' $ refactor $ (rzipper $ Mod $ t2s $ tst) >>= navigate [Decl 6, Rhs, Rhs, Lhs] >>= repr
a
in any case, this is the @a@ bound at the top level, thus renaming changes it throughout the module
(but not the deeper locally bound instances).
>>> (pp . fromRight' <$>) refactor $ (rzipper $ Mod $ t2s $ tst) >>= lineColumn 21 9 >>= upToBinding >>= renameZ "a" "A" >>= focus
<BLANKLINE>
-- leading ws, module copied from M.pi, do not touch though: used in the doctests
<BLANKLINE>
module Main where
<BLANKLINE>
import Nat
import Sample
<BLANKLINE>
A = \x . 2
<BLANKLINE>
b = \x [ y ] z . x 2
<BLANKLINE>
k = \x . frec x
<BLANKLINE>
<BLANKLINE>
f = \x . A x
g = \x . c x
<BLANKLINE>
hh = \ yy . A (\a . x a)
<BLANKLINE>
j = \y . A (\a . x (\a . a))
frec = \y . frec (\a . x (\a . a))
<BLANKLINE>
cf this very example renamed with @renameZ'@ below.
as of February 2016: can now rename by means of @upToBinding2@, which works with the simpler @ezipper@
like so (rename @pred@ to @predecessor@):
@pred@ can be found at position 21 3, its definition at the module level:
>>> (pp . fromRight' <$>) $ module_ "pitestfiles/Nat.pi" >>= \m -> return $ (ezipper $ Mod $ t2s $ nopos m) >>= lineColumn 21 3 >>= focus
Parsing File "pitestfiles/Nat.pi"
pred
>>> (pp . fromRight' <$>) $ module_ "pitestfiles/Nat.pi" >>= \m -> return $ (ezipper $ Mod $ t2s $ nopos m) >>= lineColumn 21 3 >>= upToBinding2 >>= focus
Parsing File "pitestfiles/Nat.pi"
pred
renaming @pred@ to @predecessor@ - throughout the module ie. (the result thus too long to be shown here, but you can try this out yourself):
>>> (pp . fromRight' <$>) $ module_ "pitestfiles/Nat.pi" >>= \m -> return $ (ezipper $ Mod $ t2s $ nopos m) >>= lineColumn 21 3 >>= upToBinding2 >>= \(old, z) -> renameZ old "predecessor" z >>= repr
...
but cf. its usage in decl 12 (@mult@) eg.
>>> (pp . fromRight' <$>) $ module_ "pitestfiles/Nat.pi" >>= \m -> return $ (ezipper $ Mod $ t2s $ nopos m) >>= lineColumn 21 3 >>= upToBinding2 >>= \(old, z) -> renameZ old "predecessor" z >>= toDecl 12 >>= repr
Parsing File "pitestfiles/Nat.pi"
mult = \ n m .
case n of
Zero -> Zero
Succ predecessor -> plus m (mult predecessor m)
<BLANKLINE>
<BLANKLINE>
<BLANKLINE>
replace @pred@ by @predecessor@ (in both: signature + def) in the non white space aware (regular absy) case:
>>> (pp . fromRight' <$>) $ module' "pitestfiles/Nat.pi" >>= \m -> return $ (ezipper $ Mod $ t2s $ nopos m) >>= toDecl 4 >>= left >>= upToBinding2 >>= \(old, z) -> renameZ old "predecessor" z >>= repr
...
likewise in the whitespace aware case (same means of navigation to @pred@ - can navigate there by lineColumn as well, of course, cf. below),
ie. @pred@ -> @predecessor@, in both: sig+def:
>>> (pp . fromRight' <$>) $ module_ "pitestfiles/Nat.pi" >>= \m -> return $ (ezipper $ Mod $ t2s $ nopos m) >>= toDecl 4 >>= left >>= upToBinding2 >>= \(old, z) -> renameZ old "predecessor" z >>= repr
...
as above, but navigate to @pred@ with lineColumn
>>> (pp . fromRight' <$>) $ module_ "pitestfiles/Nat.pi" >>= \m -> return $ (ezipper $ Mod $ t2s $ nopos m) >>= lineColumn 21 3 >>= upToBinding2 >>= \(old, z) -> renameZ old "predecessor" z >>= repr
...
-}
-- Show at least for debugging with Debug.Trace.trace below
renameZ :: (Refactoring m, Disp (Expr a a), Disp a, Eq a, MkVisible a, Show a) =>
a -> a -> Zipper a -> m (Zipper a)
renameZ old new (Dcl decl, bs) = do {
; decl' <- renameDecl old new decl
; rsucceed (Dcl decl', bs)
}
renameZ old new (Exp expr, bs) = do {
; expr' <- renameExpr old new expr
; rsucceed (Exp expr', bs)
}
renameZ old new z@(Aa _, _) = do {
; up z >>= \z' -> renameZ old new z'
}
renameZ old new (Mod (Module nm mimports mdecls mconstrs), bs) = do {
; dcls <- forM mdecls (\decl -> renameDecl old new decl)
; rsucceed (Mod (Module nm mimports dcls mconstrs), bs)
}
renameZ old new (Mod m@(Module_ {..}), bs) = do {
-- any better / simpler - in one line maybe ?
-- ; dcls <- forM ((^. decls) m) (\decl -> renameDecl old new decl)
; dcls <- forM (_decls) (\decl -> renameDecl old new decl)
; rsucceed $ (Mod $ over (decls) (\_-> dcls) m, bs)
}
{-|
@renameZ' new z@
rename a name a zipper @z@, to a @new@ one.
this function requires MonadState (RefactorState a) m
idea/requirement: the name encountered has been recorded as the old name in the state,
@renamaZ'@ thus only needs the @new@ name
continuing with the example above (this time just using the more convenient @module_@ for parsing the @Test@ module):
moving upwards with @upToBinding@ (that records the name encountered)
and then using @renameZ'@ for the renameing (that just needs the new name @AAA@)
>>> tst <- module_ "samples/Test" >>= return
Parsing File "samples/Nat.pi"
Parsing File "samples/Nat.pi"
Parsing File "samples/Sample.pi"
Parsing File "samples/Test.pi"
>>> pp $ fromRight' $ refactor $ (rzipper $ Mod $ t2s $ tst) >>= lineColumn 21 9 >>= upToBinding >>= renameZ' "AAA" >>= repr
<BLANKLINE>
-- leading ws, module copied from M.pi, do not touch though: used in the doctests
<BLANKLINE>
module Main where
<BLANKLINE>
import Nat
import Sample
<BLANKLINE>
AAA = \x . 2
<BLANKLINE>
b = \x [ y ] z . x 2
<BLANKLINE>
k = \x . frec x
<BLANKLINE>
<BLANKLINE>
f = \x . AAA x
g = \x . c x
<BLANKLINE>
hh = \ yy . AAA (\a . x a)
<BLANKLINE>
j = \y . AAA (\a . x (\a . a))
frec = \y . frec (\a . x (\a . a))
<BLANKLINE>
TODO think about if it is possible at all now to get an
"no old name found" error for renameZ, as there is now only
* upToBinding, which does record the name
* upToBinding2, which takes the old name as a param
-}
renameZ' :: (Eq a, Show a, MonadState (RefactorState a) m, MkVisible a, Disp a, Disp (Expr a a), Refactoring m) => a -> Zipper a -> m (Zipper a)
renameZ' new (Dcl decl, bs) = do {
; Just old <- oldNmFound <$> get
; decl' <- renameDecl old new decl
; rsucceed (Dcl decl', bs)
}
renameZ' new (Exp expr, bs) = do {
; Just old <- oldNmFound <$> get
; expr' <- renameExpr old new expr
; rsucceed (Exp expr', bs)
}
renameZ' new z@(Aa _, _) = do {
; Just old <- oldNmFound <$> get
; up z >>= renameZ old new
}
renameZ' new (Mod (Module nm mimports mdecls mconstrs), bs) = do {
; Just old <- oldNmFound <$> get
; dcls <- forM mdecls (\decl -> renameDecl old new decl)
; rsucceed (Mod (Module nm mimports dcls mconstrs), bs)
}
-- reworked in July 2016: consider the possibility of
-- "no old name found"
-- but maybe not necessary, as there is only upToBinding/upToBinding2 left
-- renameZ' new (Mod m@(Module_ {..}), bs) = do {
-- ; Just old <- oldNmFound <$> get
-- ; dcls <- forM (_decls) (\decl -> renameDecl old new decl)
-- ; rsucceed $ (Mod $ over (decls) (\_-> dcls) m, bs)
-- }
renameZ' new (Mod m@(Module_ {..}), bs)
= oldNmFound <$> get >>=
\o -> maybe (rfail "renameZ' new (Mod m@(Module_ {..}), bs): no old name found")
(\old -> do {
; dcls <- forM (_decls) (\decl -> renameDecl old new decl)
; rsucceed $ (Mod $ over (decls) (\_-> dcls) m, bs)
}) o
#ifdef PiForallInstalled
-- typecheckM m
-- stuff st =
-- (do
-- ; silence $ runExceptT $ getModules ["keepme"] "Foo.pi"
-- ; return ()
-- )
-- typecheckM m@(Module {..}) =
-- (do
-- ; let m' = untie m
-- ; r <- runTcMonad emptyEnv (tcModules [m'])
-- ; return r
-- -- ; case r of
-- -- Left x ->
-- -- Right y -> return y
-- )
-- typecheckM_ m@(Module_ {..}) =
-- (do
-- ; let m' = untie m
-- ; runTcMonad emptyEnv (tcModules [m'])
-- ; return ()
-- )
#endif
-- instance Rename (Zipper a) a where
-- rename = renameZ'
-- renameTo :: (Eq a, MonadState (RefactorState a) m, Disp a, Refactoring m) =>
-- a -> Zipper a -> m (Zipper a)
-- renameTo new zipper@(Ex expr, bs) =
-- (do
-- ; st <- get
-- ; case oldNmFound st of
-- Nothing -> rfail $ "no old name found to rename"
-- Just old -> renameZ' old new zipper >>= \renamed -> return renamed
-- ) ;
-- renameTo new zipper@(Mod (Module nm imports decls constrs), bs) =
-- (do
-- ; st <- get
-- ; case oldNmFound st of
-- Nothing -> rfail $ "no old name found to rename"
-- Just old -> renameZ' old new zipper >>= \renamed -> return renamed
-- )
-- -- rnm :: (Refactoring m, MonadWriter String m)
-- -- => Module String String -> Path -> String -> m (Module String String)
-- rnm mdl path new =
-- (do
-- ; let zipper = (Mod mdl, [])
-- ; when (new `L.elem` topLevelVars mdl)
-- )
| reuleaux/pire | src/Pire/Refactor/Refactor.hs | bsd-3-clause | 34,059 | 18 | 17 | 7,871 | 5,777 | 3,125 | 2,652 | 246 | 9 |
module PatBind1 where
main :: Int
main = sum xs
x :: Int
xs :: [Int]
(x:xs) = [1, 2, 3]
y :: Int
ys :: [Int]
(y:ys) = [4, 5]
| roberth/uu-helium | test/correct/PatBind1.hs | gpl-3.0 | 130 | 0 | 6 | 36 | 84 | 50 | 34 | 9 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -Wall #-}
module Language.Fortran.Model.Op.Meta.Core where
-- import Data.Int (Int16, Int32, Int64, Int8)
-- import Data.Word (Word8)
-- import Control.Monad.Reader.Class (MonadReader (ask))
-- import Data.SBV
-- import Data.SBV.Dynamic
-- import Data.SBV.Internals (SBV (..))
-- import Language.Expression
-- import Language.Expression.Pretty
-- import Language.Fortran.Model.EvalPrim
-- import Language.Fortran.Model.Types
-- import Language.Fortran.Model.MetaOp.Repr
| dorchard/camfort | src/Language/Fortran/Model/Op/Meta/Core.hs | apache-2.0 | 1,113 | 0 | 3 | 360 | 33 | 31 | 2 | 13 | 0 |
{-# LANGUAGE TemplateHaskell, ScopedTypeVariables, TypeOperators, GADTs, EmptyDataDecls, PatternGuards #-}
module Reflex.Dynamic.TH (qDyn, unqDyn, mkDyn) where
import Reflex.Dynamic
import Language.Haskell.TH
import qualified Language.Haskell.TH.Syntax as TH
import Language.Haskell.TH.Quote
import Data.Data
import Control.Monad.State
import qualified Language.Haskell.Exts as Hs
import qualified Language.Haskell.Meta.Syntax.Translate as Hs
import Data.Monoid
import Data.Generics
-- | Quote a Dynamic expression. Within the quoted expression, you can use '$(unqDyn [| x |])' to refer to any expression 'x' of type 'Dynamic t a'; the unquoted result will be of type 'a'
qDyn :: Q Exp -> Q Exp
qDyn qe = do
e <- qe
let f :: forall d. Data d => d -> StateT [(Name, Exp)] Q d
f d = case eqT of
Just (Refl :: d :~: Exp)
| AppE (VarE m) eInner <- d
, m == 'unqMarker
-> do n <- lift $ newName "dyn"
modify ((n, eInner):)
return $ VarE n
_ -> gmapM f d
(e', exprsReversed) <- runStateT (gmapM f e) []
let exprs = reverse exprsReversed
arg = foldr (\a b -> ConE 'FHCons `AppE` a `AppE` b) (ConE 'FHNil) $ map snd exprs
param = foldr (\a b -> ConP 'HCons [VarP a, b]) (ConP 'HNil []) $ map fst exprs
[| mapDyn $(return $ LamE [param] e') =<< distributeFHListOverDyn $(return arg) |]
unqDyn :: Q Exp -> Q Exp
unqDyn e = [| unqMarker $e |]
-- | This type represents an occurrence of unqDyn before it has been processed by qDyn. If you see it in a type error, it probably means that unqDyn has been used outside of a qDyn context.
data UnqDyn
-- unqMarker must not be exported; it is used only as a way of smuggling data from unqDyn to qDyn
--TODO: It would be much nicer if the TH AST was extensible to support this kind of thing without trickery
unqMarker :: a -> UnqDyn
unqMarker = error "An unqDyn expression was used outside of a qDyn expression"
mkDyn :: QuasiQuoter
mkDyn = QuasiQuoter
{ quoteExp = mkDynExp
, quotePat = error "mkDyn: pattern splices are not supported"
, quoteType = error "mkDyn: type splices are not supported"
, quoteDec = error "mkDyn: declaration splices are not supported"
}
mkDynExp :: String -> Q Exp
mkDynExp s = case Hs.parseExpWithMode (Hs.defaultParseMode { Hs.extensions = [ Hs.EnableExtension Hs.TemplateHaskell ] }) s of
Hs.ParseFailed (Hs.SrcLoc _ l c) err -> fail $ "mkDyn:" <> show l <> ":" <> show c <> ": " <> err
Hs.ParseOk e -> qDyn $ return $ everywhere (id `extT` reinstateUnqDyn) $ Hs.toExp $ everywhere (id `extT` antiE) e
where TH.Name (TH.OccName occName) (TH.NameG _ _ (TH.ModName modName)) = 'unqMarker
antiE x = case x of
Hs.SpliceExp se ->
Hs.App (Hs.Var $ Hs.Qual (Hs.ModuleName modName) (Hs.Ident occName)) $ case se of
Hs.IdSplice v -> Hs.Var $ Hs.UnQual $ Hs.Ident v
Hs.ParenSplice ps -> ps
_ -> x
reinstateUnqDyn (TH.Name (TH.OccName occName') (TH.NameQ (TH.ModName modName')))
| modName == modName' && occName == occName' = 'unqMarker
reinstateUnqDyn x = x
| k0001/reflex | src/Reflex/Dynamic/TH.hs | bsd-3-clause | 3,156 | 0 | 20 | 744 | 912 | 481 | 431 | -1 | -1 |
-- Command-line based Flapjax compiler. Run without any options for usage
-- information.
module Main where
import Control.Monad
import qualified Data.List as L
import System.Exit
import System.IO
import System.Console.GetOpt
import System.Environment hiding (withArgs)
import System.Directory
import BrownPLT.Html (renderHtml)
import Text.PrettyPrint.HughesPJ
import Text.ParserCombinators.Parsec(ParseError,parseFromFile)
import Flapjax.HtmlEmbedding()
import Flapjax.Parser(parseScript) -- for standalone mode
import BrownPLT.Html.PermissiveParser (parseHtmlFromString)
import Flapjax.Compiler
import BrownPLT.JavaScript.Parser (parseExpression)
import BrownPLT.JavaScript.Lexer
import BrownPLT.JavaScript.PrettyPrint
import Text.ParserCombinators.Parsec hiding (getInput)
import BrownPLT.Flapjax.CompilerMessage
import BrownPLT.Flapjax.Interface
import Text.XHtml (showHtml,toHtml,HTML)
data Option
= Usage
| Flapjax String
| Stdin
| Output String
| Stdout
| ExprMode
| WebMode
deriving (Eq,Ord)
options:: [OptDescr Option]
options =
[ Option ['h'] ["help"] (NoArg Usage) "shows this help message"
, Option ['f'] ["flapjax-path"] (ReqArg Flapjax "URL") "url of flapjax.js"
, Option ['o'] ["output"] (ReqArg Output "FILE") "output path"
, Option [] ["stdout"] (NoArg Stdout) "write to standard output"
, Option [] ["stdin"] (NoArg Stdin) "read from standard input"
, Option [] ["expression"] (NoArg ExprMode) "compile a single expression"
, Option [] ["web-mode"] (NoArg WebMode) "web-compiler mode"
]
checkUsage (Usage:_) = do
putStrLn "Flapjax Compiler (fxc-2.0)"
putStrLn (usageInfo "Usage: fxc [OPTION ...] file" options)
exitSuccess
checkUsage _ = return ()
getFlapjaxPath :: [Option] -> IO (String,[Option])
getFlapjaxPath ((Flapjax s):rest) = return (s,rest)
getFlapjaxPath rest = do
s <- getInstalledFlapjaxPath
return ("file://" ++ s,rest)
getInput :: [String] -> [Option] -> IO (Handle,String,[Option])
getInput [] (Stdin:rest) = return (stdin,"stdin",rest)
getInput [path] options = do
h <- openFile path ReadMode
return (h,path,options)
getInput [] _ = do
hPutStrLn stderr "neither --stdin nor an input file was specified"
exitFailure
getInput (_:_) _ = do
hPutStrLn stderr "multiple input files specified"
exitFailure
getOutput :: String -> [Option] -> IO (Handle,[Option])
getOutput _ (Stdout:rest) = return (stdout,rest)
getOutput _ ((Output outputName):rest) = do
h <- openFile outputName WriteMode
return (h,rest)
getOutput inputName options = do
h <- openFile (inputName ++ ".html") WriteMode
return (h,options)
getWebMode :: [Option] -> IO (Bool,[Option])
getWebMode (WebMode:[]) = return (True, [])
getWebMode (WebMode:_) = do
hPutStrLn stderr "invalid arguments, use -h for help"
exitFailure
getWebMode options = return (False,options)
getExprMode (ExprMode:[]) =
return (True, [])
getExprMode (ExprMode:_) = do
hPutStrLn stderr "invalid arguments, use -h for help"
exitFailure
getExprMode args =
return (False, args)
parseExpr = do
whiteSpace
e <- parseExpression
eof
return e
main = do
argv <- getArgs
let (permutedArgs,files,errors) = getOpt Permute options argv
unless (null errors) $ do
mapM_ (hPutStrLn stderr) errors
exitFailure
let args = L.sort permutedArgs
checkUsage args
(fxPath,args) <- getFlapjaxPath args
(inputHandle,inputName,args) <- getInput files args
(outputHandle,args) <- getOutput inputName args
(isExprMode, args) <- getExprMode args
(isWebMode, args) <- getWebMode args
unless (null args) $ do
hPutStrLn stderr "invalid arguments, use -h for help"
exitFailure
-- monomorphism restriction, I think
let showErr :: (Show a, HTML a) => a -> String
showErr = if isWebMode then showHtml.toHtml else show
inputText <- hGetContents inputHandle
case isExprMode of
True -> case parse parseExpr "web request" inputText of
Left _ -> do
hPutStr outputHandle "throw \'parse error\'"
exitFailure
Right fxExpr -> do
jsExpr <- compileExpr defaults fxExpr
hPutStr outputHandle (renderExpression jsExpr)
exitSuccess
False -> case parseHtmlFromString inputName inputText of
Left err -> do -- TODO: web mode is different
hPutStrLn stderr (showErr err)
exitFailure
Right (html,_) -> do -- ignoring all warnings
(msgs,outHtml) <- compilePage (defaults { flapjaxPath = fxPath }) html
-- TODO: web mode is different
mapM_ (hPutStrLn stderr . showErr) msgs
hPutStrLn outputHandle (renderHtml outHtml)
hClose outputHandle
exitSuccess
| ducis/flapjax-fixed | flapjax/trunk/compiler/src/Fxc.hs | bsd-3-clause | 4,654 | 0 | 19 | 859 | 1,497 | 776 | 721 | 125 | 5 |
{-# LANGUAGE CPP #-}
-- |
-- Module : Network.TLS.Backend
-- License : BSD-style
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : experimental
-- Portability : unknown
--
-- A Backend represents a unified way to do IO on different
-- types without burdening our calling API with multiple
-- ways to initialize a new context.
--
-- Typically, a backend provides:
-- * a way to read data
-- * a way to write data
-- * a way to close the stream
-- * a way to flush the stream
--
module Network.TLS.Backend
( HasBackend(..)
, Backend(..)
) where
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import System.IO (Handle, hSetBuffering, BufferMode(..), hFlush, hClose)
#ifdef INCLUDE_NETWORK
import Control.Monad
import qualified Network.Socket as Network (Socket, sClose)
import qualified Network.Socket.ByteString as Network
#endif
#ifdef INCLUDE_HANS
import qualified Data.ByteString.Lazy as L
import qualified Hans.NetworkStack as Hans
#endif
-- | Connection IO backend
data Backend = Backend
{ backendFlush :: IO () -- ^ Flush the connection sending buffer, if any.
, backendClose :: IO () -- ^ Close the connection.
, backendSend :: ByteString -> IO () -- ^ Send a bytestring through the connection.
, backendRecv :: Int -> IO ByteString -- ^ Receive specified number of bytes from the connection.
}
class HasBackend a where
initializeBackend :: a -> IO ()
getBackend :: a -> Backend
instance HasBackend Backend where
initializeBackend _ = return ()
getBackend = id
#ifdef INCLUDE_NETWORK
instance HasBackend Network.Socket where
initializeBackend _ = return ()
getBackend sock = Backend (return ()) (Network.sClose sock) (Network.sendAll sock) recvAll
where recvAll n = B.concat `fmap` loop n
where loop 0 = return []
loop left = do
r <- Network.recv sock left
if B.null r
then return []
else liftM (r:) (loop (left - B.length r))
#endif
#ifdef INCLUDE_HANS
instance HasBackend Hans.Socket where
initializeBackend _ = return ()
getBackend sock = Backend (return ()) (Hans.close sock) sendAll recvAll
where sendAll x = do
amt <- fromIntegral `fmap` Hans.sendBytes sock (L.fromStrict x)
if (amt == 0) || (amt == B.length x)
then return ()
else sendAll (B.drop amt x)
recvAll n = loop (fromIntegral n) L.empty
loop 0 acc = return (L.toStrict acc)
loop left acc = do
r <- Hans.recvBytes sock left
if L.null r
then loop 0 acc
else loop (left - L.length r) (acc `L.append` r)
#endif
instance HasBackend Handle where
initializeBackend handle = hSetBuffering handle NoBuffering
getBackend handle = Backend (hFlush handle) (hClose handle) (B.hPut handle) (B.hGet handle)
| beni55/hs-tls | core/Network/TLS/Backend.hs | bsd-3-clause | 3,057 | 0 | 19 | 864 | 759 | 414 | 345 | 21 | 0 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Yesod.WebSockets
( -- * Core API
WebSocketsT
, webSockets
, webSocketsWith
, webSocketsOptions
, webSocketsOptionsWith
, receiveData
, receiveDataE
, receiveDataMessageE
, sendPing
, sendPingE
, sendClose
, sendCloseE
, sendTextData
, sendTextDataE
, sendBinaryData
, sendBinaryDataE
, sendDataMessageE
-- * Conduit API
, sourceWS
, sinkWSText
, sinkWSBinary
-- * Async helpers
, race
, race_
, concurrently
, concurrently_
-- * Re-exports from websockets
, WS.defaultConnectionOptions
, WS.ConnectionOptions (..)
) where
import Control.Monad (forever, when)
import Control.Monad.Reader (ReaderT, runReaderT, MonadReader, ask)
import Conduit
import qualified Network.Wai.Handler.WebSockets as WaiWS
import qualified Network.WebSockets as WS
import qualified Yesod.Core as Y
import UnliftIO (SomeException, tryAny, MonadIO, liftIO, MonadUnliftIO, withRunInIO, race, race_, concurrently, concurrently_)
-- | A transformer for a WebSockets handler.
--
-- Since 0.1.0
type WebSocketsT = ReaderT WS.Connection
-- | Attempt to run a WebSockets handler. This function first checks if the
-- client initiated a WebSockets connection and, if so, runs the provided
-- application, short-circuiting the rest of your handler. If the client did
-- not request a WebSockets connection, the rest of your handler will be called
-- instead.
--
-- Since 0.1.0
webSockets
:: (MonadUnliftIO m, Y.MonadHandler m)
=> WebSocketsT m ()
-> m ()
webSockets = webSocketsOptions WS.defaultConnectionOptions
-- | Varient of 'webSockets' which allows you to specify
-- the WS.ConnectionOptions setttings when upgrading to a websocket connection.
--
-- Since 0.2.5
webSocketsOptions
:: (MonadUnliftIO m, Y.MonadHandler m)
=> WS.ConnectionOptions
-> WebSocketsT m ()
-> m ()
webSocketsOptions opts = webSocketsOptionsWith opts $ const $ return $ Just $ WS.AcceptRequest Nothing []
-- | Varient of 'webSockets' which allows you to specify the 'WS.AcceptRequest'
-- setttings when upgrading to a websocket connection.
--
-- Since 0.2.4
webSocketsWith :: (MonadUnliftIO m, Y.MonadHandler m)
=> (WS.RequestHead -> m (Maybe WS.AcceptRequest))
-- ^ A Nothing indicates that the websocket upgrade request should not happen
-- and instead the rest of the handler will be called instead. This allows
-- you to use 'WS.getRequestSubprotocols' and only accept the request if
-- a compatible subprotocol is given. Also, the action runs before upgrading
-- the request to websockets, so you can also use short-circuiting handler
-- actions such as 'Y.invalidArgs'.
-> WebSocketsT m ()
-> m ()
webSocketsWith = webSocketsOptionsWith WS.defaultConnectionOptions
-- | Varient of 'webSockets' which allows you to specify both
-- the WS.ConnectionOptions and the 'WS.AcceptRequest'
-- setttings when upgrading to a websocket connection.
--
-- Since 0.2.5
webSocketsOptionsWith :: (MonadUnliftIO m, Y.MonadHandler m)
=> WS.ConnectionOptions
-- ^ Custom websockets options
-> (WS.RequestHead -> m (Maybe WS.AcceptRequest))
-- ^ A Nothing indicates that the websocket upgrade request should not happen
-- and instead the rest of the handler will be called instead. This allows
-- you to use 'WS.getRequestSubprotocols' and only accept the request if
-- a compatible subprotocol is given. Also, the action runs before upgrading
-- the request to websockets, so you can also use short-circuiting handler
-- actions such as 'Y.invalidArgs'.
-> WebSocketsT m ()
-> m ()
webSocketsOptionsWith wsConnOpts buildAr inner = do
req <- Y.waiRequest
when (WaiWS.isWebSocketsReq req) $ do
let rhead = WaiWS.getRequestHead req
mar <- buildAr rhead
case mar of
Nothing -> return ()
Just ar ->
Y.sendRawResponseNoConduit
$ \src sink -> withRunInIO $ \runInIO -> WaiWS.runWebSockets
wsConnOpts
rhead
(\pconn -> do
conn <- WS.acceptRequestWith pconn ar
WS.forkPingThread conn 30
runInIO $ runReaderT inner conn)
src
sink
-- | Wrapper for capturing exceptions
wrapWSE :: (MonadIO m, MonadReader WS.Connection m)
=> (WS.Connection -> a -> IO ())
-> a
-> m (Either SomeException ())
wrapWSE ws x = do
conn <- ask
liftIO $ tryAny $ ws conn x
wrapWS :: (MonadIO m, MonadReader WS.Connection m)
=> (WS.Connection -> a -> IO ())
-> a
-> m ()
wrapWS ws x = do
conn <- ask
liftIO $ ws conn x
-- | Receive a piece of data from the client.
--
-- Since 0.1.0
receiveData
:: (MonadIO m, MonadReader WS.Connection m, WS.WebSocketsData a)
=> m a
receiveData = do
conn <- ask
liftIO $ WS.receiveData conn
-- | Receive a piece of data from the client.
-- Capture SomeException as the result or operation
-- Since 0.2.2
receiveDataE
:: (MonadIO m, MonadReader WS.Connection m, WS.WebSocketsData a)
=> m (Either SomeException a)
receiveDataE = do
conn <- ask
liftIO $ tryAny $ WS.receiveData conn
-- | Receive an application message.
-- Capture SomeException as the result or operation
-- Since 0.2.3
receiveDataMessageE
:: (MonadIO m, MonadReader WS.Connection m)
=> m (Either SomeException WS.DataMessage)
receiveDataMessageE = do
conn <- ask
liftIO $ tryAny $ WS.receiveDataMessage conn
-- | Send a textual message to the client.
--
-- Since 0.1.0
sendTextData
:: (MonadIO m, WS.WebSocketsData a, MonadReader WS.Connection m)
=> a
-> m ()
sendTextData = wrapWS WS.sendTextData
-- | Send a textual message to the client.
-- Capture SomeException as the result or operation
-- and can be used like
-- `either handle_exception return =<< sendTextDataE ("Welcome" :: Text)`
-- Since 0.2.2
sendTextDataE
:: (MonadIO m, WS.WebSocketsData a, MonadReader WS.Connection m)
=> a
-> m (Either SomeException ())
sendTextDataE = wrapWSE WS.sendTextData
-- | Send a binary message to the client.
--
-- Since 0.1.0
sendBinaryData
:: (MonadIO m, WS.WebSocketsData a, MonadReader WS.Connection m)
=> a
-> m ()
sendBinaryData = wrapWS WS.sendBinaryData
-- | Send a binary message to the client.
-- Capture SomeException as the result of operation
-- Since 0.2.2
sendBinaryDataE
:: (MonadIO m, WS.WebSocketsData a, MonadReader WS.Connection m)
=> a
-> m (Either SomeException ())
sendBinaryDataE = wrapWSE WS.sendBinaryData
-- | Send a ping message to the client.
--
-- Since 0.2.2
sendPing
:: (MonadIO m, WS.WebSocketsData a, MonadReader WS.Connection m)
=> a
-> WebSocketsT m ()
sendPing = wrapWS WS.sendPing
-- | Send a ping message to the client.
-- Capture SomeException as the result of operation
-- Since 0.2.2
sendPingE
:: (MonadIO m, WS.WebSocketsData a, MonadReader WS.Connection m)
=> a
-> m (Either SomeException ())
sendPingE = wrapWSE WS.sendPing
-- | Send a DataMessage to the client.
-- Capture SomeException as the result of operation
-- Since 0.2.3
sendDataMessageE
:: (MonadIO m, MonadReader WS.Connection m)
=> WS.DataMessage
-> m (Either SomeException ())
sendDataMessageE x = do
conn <- ask
liftIO $ tryAny $ WS.sendDataMessage conn x
-- | Send a close request to the client.
--
-- Since 0.2.2
sendClose
:: (MonadIO m, WS.WebSocketsData a, MonadReader WS.Connection m)
=> a
-> WebSocketsT m ()
sendClose = wrapWS WS.sendClose
-- | Send a close request to the client.
-- Capture SomeException as the result of operation
-- Since 0.2.2
sendCloseE
:: (MonadIO m, WS.WebSocketsData a, MonadReader WS.Connection m)
=> a
-> m (Either SomeException ())
sendCloseE = wrapWSE WS.sendClose
-- | A @Source@ of WebSockets data from the user.
--
-- Since 0.1.0
sourceWS
:: (MonadIO m, WS.WebSocketsData a, MonadReader WS.Connection m)
=> ConduitT i a m ()
sourceWS = forever $ lift receiveData >>= yield
-- | A @Sink@ for sending textual data to the user.
--
-- Since 0.1.0
sinkWSText
:: (MonadIO m, WS.WebSocketsData a, MonadReader WS.Connection m)
=> ConduitT a o m ()
sinkWSText = mapM_C sendTextData
-- | A @Sink@ for sending binary data to the user.
--
-- Since 0.1.0
sinkWSBinary
:: (MonadIO m, WS.WebSocketsData a, MonadReader WS.Connection m)
=> ConduitT a o m ()
sinkWSBinary = mapM_C sendBinaryData
| psibi/yesod | yesod-websockets/Yesod/WebSockets.hs | mit | 8,878 | 0 | 24 | 2,203 | 1,785 | 964 | 821 | 168 | 2 |
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable #-}
module Faktor.Param where
-- $Id$
import Autolib.Reader
import Autolib.ToDoc
import Data.Typeable
import Autolib.Set
data Param =
Param { von :: Int
, bis :: Int
, anzahl :: Int
}
deriving ( Typeable )
p :: Param
p = Param { von = 100
, bis = 1000
, anzahl = 3
}
$(derives [makeReader, makeToDoc] [''Param])
instance Show Param where show = render . toDoc
instance Read Param where readsPrec = parsec_readsPrec
| florianpilz/autotool | src/Faktor/Param.hs | gpl-2.0 | 506 | 4 | 9 | 120 | 149 | 88 | 61 | 18 | 1 |
module Layout00014 where
instance Indexed (Pull sh a) where
Pull ixf _ ! i = ixf i
| charleso/intellij-haskforce | tests/gold/parser/Layout00014.hs | apache-2.0 | 88 | 0 | 7 | 22 | 39 | 19 | 20 | 3 | 0 |
{-# LANGUAGE RecordWildCards, GADTs #-}
module CmmLayoutStack (
cmmLayoutStack, setInfoTableStackMap
) where
import StgCmmUtils ( callerSaveVolatileRegs ) -- XXX layering violation
import StgCmmForeign ( saveThreadState, loadThreadState ) -- XXX layering violation
import BasicTypes
import Cmm
import CmmInfo
import BlockId
import CLabel
import CmmUtils
import MkGraph
import ForeignCall
import CmmLive
import CmmProcPoint
import SMRep
import Hoopl
import UniqSupply
import Maybes
import UniqFM
import Util
import DynFlags
import FastString
import Outputable
import qualified Data.Set as Set
import Control.Monad.Fix
import Data.Array as Array
import Data.Bits
import Data.List (nub)
import Control.Monad (liftM)
#include "HsVersions.h"
{- Note [Stack Layout]
The job of this pass is to
- replace references to abstract stack Areas with fixed offsets from Sp.
- replace the CmmHighStackMark constant used in the stack check with
the maximum stack usage of the proc.
- save any variables that are live across a call, and reload them as
necessary.
Before stack allocation, local variables remain live across native
calls (CmmCall{ cmm_cont = Just _ }), and after stack allocation local
variables are clobbered by native calls.
We want to do stack allocation so that as far as possible
- stack use is minimized, and
- unnecessary stack saves and loads are avoided.
The algorithm we use is a variant of linear-scan register allocation,
where the stack is our register file.
- First, we do a liveness analysis, which annotates every block with
the variables live on entry to the block.
- We traverse blocks in reverse postorder DFS; that is, we visit at
least one predecessor of a block before the block itself. The
stack layout flowing from the predecessor of the block will
determine the stack layout on entry to the block.
- We maintain a data structure
Map Label StackMap
which describes the contents of the stack and the stack pointer on
entry to each block that is a successor of a block that we have
visited.
- For each block we visit:
- Look up the StackMap for this block.
- If this block is a proc point (or a call continuation, if we
aren't splitting proc points), emit instructions to reload all
the live variables from the stack, according to the StackMap.
- Walk forwards through the instructions:
- At an assignment x = Sp[loc]
- Record the fact that Sp[loc] contains x, so that we won't
need to save x if it ever needs to be spilled.
- At an assignment x = E
- If x was previously on the stack, it isn't any more
- At the last node, if it is a call or a jump to a proc point
- Lay out the stack frame for the call (see setupStackFrame)
- emit instructions to save all the live variables
- Remember the StackMaps for all the successors
- emit an instruction to adjust Sp
- If the last node is a branch, then the current StackMap is the
StackMap for the successors.
- Manifest Sp: replace references to stack areas in this block
with real Sp offsets. We cannot do this until we have laid out
the stack area for the successors above.
In this phase we also eliminate redundant stores to the stack;
see elimStackStores.
- There is one important gotcha: sometimes we'll encounter a control
transfer to a block that we've already processed (a join point),
and in that case we might need to rearrange the stack to match
what the block is expecting. (exactly the same as in linear-scan
register allocation, except here we have the luxury of an infinite
supply of temporary variables).
- Finally, we update the magic CmmHighStackMark constant with the
stack usage of the function, and eliminate the whole stack check
if there was no stack use. (in fact this is done as part of the
main traversal, by feeding the high-water-mark output back in as
an input. I hate cyclic programming, but it's just too convenient
sometimes.)
There are plenty of tricky details: update frames, proc points, return
addresses, foreign calls, and some ad-hoc optimisations that are
convenient to do here and effective in common cases. Comments in the
code below explain these.
-}
-- All stack locations are expressed as positive byte offsets from the
-- "base", which is defined to be the address above the return address
-- on the stack on entry to this CmmProc.
--
-- Lower addresses have higher StackLocs.
--
type StackLoc = ByteOff
{-
A StackMap describes the stack at any given point. At a continuation
it has a particular layout, like this:
| | <- base
|-------------|
| ret0 | <- base + 8
|-------------|
. upd frame . <- base + sm_ret_off
|-------------|
| |
. vars .
. (live/dead) .
| | <- base + sm_sp - sm_args
|-------------|
| ret1 |
. ret vals . <- base + sm_sp (<--- Sp points here)
|-------------|
Why do we include the final return address (ret0) in our stack map? I
have absolutely no idea, but it seems to be done that way consistently
in the rest of the code generator, so I played along here. --SDM
Note that we will be constructing an info table for the continuation
(ret1), which needs to describe the stack down to, but not including,
the update frame (or ret0, if there is no update frame).
-}
data StackMap = StackMap
{ sm_sp :: StackLoc
-- ^ the offset of Sp relative to the base on entry
-- to this block.
, sm_args :: ByteOff
-- ^ the number of bytes of arguments in the area for this block
-- Defn: the offset of young(L) relative to the base is given by
-- (sm_sp - sm_args) of the StackMap for block L.
, sm_ret_off :: ByteOff
-- ^ Number of words of stack that we do not describe with an info
-- table, because it contains an update frame.
, sm_regs :: UniqFM (LocalReg,StackLoc)
-- ^ regs on the stack
}
instance Outputable StackMap where
ppr StackMap{..} =
text "Sp = " <> int sm_sp $$
text "sm_args = " <> int sm_args $$
text "sm_ret_off = " <> int sm_ret_off $$
text "sm_regs = " <> ppr (eltsUFM sm_regs)
cmmLayoutStack :: DynFlags -> ProcPointSet -> ByteOff -> CmmGraph
-> UniqSM (CmmGraph, BlockEnv StackMap)
cmmLayoutStack dflags procpoints entry_args
graph0@(CmmGraph { g_entry = entry })
= do
-- pprTrace "cmmLayoutStack" (ppr entry_args) $ return ()
-- We need liveness info. We could do removeDeadAssignments at
-- the same time, but it buys nothing over doing cmmSink later,
-- and costs a lot more than just cmmLocalLiveness.
-- (graph, liveness) <- removeDeadAssignments graph0
let (graph, liveness) = (graph0, cmmLocalLiveness dflags graph0)
-- pprTrace "liveness" (ppr liveness) $ return ()
let blocks = postorderDfs graph
(final_stackmaps, _final_high_sp, new_blocks) <-
mfix $ \ ~(rec_stackmaps, rec_high_sp, _new_blocks) ->
layout dflags procpoints liveness entry entry_args
rec_stackmaps rec_high_sp blocks
new_blocks' <- mapM (lowerSafeForeignCall dflags) new_blocks
-- pprTrace ("Sp HWM") (ppr _final_high_sp) $ return ()
return (ofBlockList entry new_blocks', final_stackmaps)
layout :: DynFlags
-> BlockSet -- proc points
-> BlockEnv CmmLocalLive -- liveness
-> BlockId -- entry
-> ByteOff -- stack args on entry
-> BlockEnv StackMap -- [final] stack maps
-> ByteOff -- [final] Sp high water mark
-> [CmmBlock] -- [in] blocks
-> UniqSM
( BlockEnv StackMap -- [out] stack maps
, ByteOff -- [out] Sp high water mark
, [CmmBlock] -- [out] new blocks
)
layout dflags procpoints liveness entry entry_args final_stackmaps final_sp_high blocks
= go blocks init_stackmap entry_args []
where
(updfr, cont_info) = collectContInfo blocks
init_stackmap = mapSingleton entry StackMap{ sm_sp = entry_args
, sm_args = entry_args
, sm_ret_off = updfr
, sm_regs = emptyUFM
}
go [] acc_stackmaps acc_hwm acc_blocks
= return (acc_stackmaps, acc_hwm, acc_blocks)
go (b0 : bs) acc_stackmaps acc_hwm acc_blocks
= do
let (entry0@(CmmEntry entry_lbl), middle0, last0) = blockSplit b0
let stack0@StackMap { sm_sp = sp0 }
= mapFindWithDefault
(pprPanic "no stack map for" (ppr entry_lbl))
entry_lbl acc_stackmaps
-- pprTrace "layout" (ppr entry_lbl <+> ppr stack0) $ return ()
-- (a) Update the stack map to include the effects of
-- assignments in this block
let stack1 = foldBlockNodesF (procMiddle acc_stackmaps) middle0 stack0
-- (b) Insert assignments to reload all the live variables if this
-- block is a proc point
let middle1 = if entry_lbl `setMember` procpoints
then foldr blockCons middle0 (insertReloads stack0)
else middle0
-- (c) Look at the last node and if we are making a call or
-- jumping to a proc point, we must save the live
-- variables, adjust Sp, and construct the StackMaps for
-- each of the successor blocks. See handleLastNode for
-- details.
(middle2, sp_off, last1, fixup_blocks, out)
<- handleLastNode dflags procpoints liveness cont_info
acc_stackmaps stack1 middle0 last0
-- pprTrace "layout(out)" (ppr out) $ return ()
-- (d) Manifest Sp: run over the nodes in the block and replace
-- CmmStackSlot with CmmLoad from Sp with a concrete offset.
--
-- our block:
-- middle1 -- the original middle nodes
-- middle2 -- live variable saves from handleLastNode
-- Sp = Sp + sp_off -- Sp adjustment goes here
-- last1 -- the last node
--
let middle_pre = blockToList $ foldl blockSnoc middle1 middle2
final_blocks = manifestSp dflags final_stackmaps stack0 sp0 final_sp_high entry0
middle_pre sp_off last1 fixup_blocks
acc_stackmaps' = mapUnion acc_stackmaps out
-- If this block jumps to the GC, then we do not take its
-- stack usage into account for the high-water mark.
-- Otherwise, if the only stack usage is in the stack-check
-- failure block itself, we will do a redundant stack
-- check. The stack has a buffer designed to accommodate
-- the largest amount of stack needed for calling the GC.
--
this_sp_hwm | isGcJump last0 = 0
| otherwise = sp0 - sp_off
hwm' = maximum (acc_hwm : this_sp_hwm : map sm_sp (mapElems out))
go bs acc_stackmaps' hwm' (final_blocks ++ acc_blocks)
-- -----------------------------------------------------------------------------
-- Not foolproof, but GCFun is the culprit we most want to catch
isGcJump :: CmmNode O C -> Bool
isGcJump (CmmCall { cml_target = CmmReg (CmmGlobal l) })
= l == GCFun || l == GCEnter1
isGcJump _something_else = False
-- -----------------------------------------------------------------------------
-- This doesn't seem right somehow. We need to find out whether this
-- proc will push some update frame material at some point, so that we
-- can avoid using that area of the stack for spilling. The
-- updfr_space field of the CmmProc *should* tell us, but it doesn't
-- (I think maybe it gets filled in later when we do proc-point
-- splitting).
--
-- So we'll just take the max of all the cml_ret_offs. This could be
-- unnecessarily pessimistic, but probably not in the code we
-- generate.
collectContInfo :: [CmmBlock] -> (ByteOff, BlockEnv ByteOff)
collectContInfo blocks
= (maximum ret_offs, mapFromList (catMaybes mb_argss))
where
(mb_argss, ret_offs) = mapAndUnzip get_cont blocks
get_cont :: Block CmmNode x C -> (Maybe (Label, ByteOff), ByteOff)
get_cont b =
case lastNode b of
CmmCall { cml_cont = Just l, .. }
-> (Just (l, cml_ret_args), cml_ret_off)
CmmForeignCall { .. }
-> (Just (succ, ret_args), ret_off)
_other -> (Nothing, 0)
-- -----------------------------------------------------------------------------
-- Updating the StackMap from middle nodes
-- Look for loads from stack slots, and update the StackMap. This is
-- purely for optimisation reasons, so that we can avoid saving a
-- variable back to a different stack slot if it is already on the
-- stack.
--
-- This happens a lot: for example when function arguments are passed
-- on the stack and need to be immediately saved across a call, we
-- want to just leave them where they are on the stack.
--
procMiddle :: BlockEnv StackMap -> CmmNode e x -> StackMap -> StackMap
procMiddle stackmaps node sm
= case node of
CmmAssign (CmmLocal r) (CmmLoad (CmmStackSlot area off) _)
-> sm { sm_regs = addToUFM (sm_regs sm) r (r,loc) }
where loc = getStackLoc area off stackmaps
CmmAssign (CmmLocal r) _other
-> sm { sm_regs = delFromUFM (sm_regs sm) r }
_other
-> sm
getStackLoc :: Area -> ByteOff -> BlockEnv StackMap -> StackLoc
getStackLoc Old n _ = n
getStackLoc (Young l) n stackmaps =
case mapLookup l stackmaps of
Nothing -> pprPanic "getStackLoc" (ppr l)
Just sm -> sm_sp sm - sm_args sm + n
-- -----------------------------------------------------------------------------
-- Handling stack allocation for a last node
-- We take a single last node and turn it into:
--
-- C1 (some statements)
-- Sp = Sp + N
-- C2 (some more statements)
-- call f() -- the actual last node
--
-- plus possibly some more blocks (we may have to add some fixup code
-- between the last node and the continuation).
--
-- C1: is the code for saving the variables across this last node onto
-- the stack, if the continuation is a call or jumps to a proc point.
--
-- C2: if the last node is a safe foreign call, we have to inject some
-- extra code that goes *after* the Sp adjustment.
handleLastNode
:: DynFlags -> ProcPointSet -> BlockEnv CmmLocalLive -> BlockEnv ByteOff
-> BlockEnv StackMap -> StackMap
-> Block CmmNode O O
-> CmmNode O C
-> UniqSM
( [CmmNode O O] -- nodes to go *before* the Sp adjustment
, ByteOff -- amount to adjust Sp
, CmmNode O C -- new last node
, [CmmBlock] -- new blocks
, BlockEnv StackMap -- stackmaps for the continuations
)
handleLastNode dflags procpoints liveness cont_info stackmaps
stack0@StackMap { sm_sp = sp0 } middle last
= case last of
-- At each return / tail call,
-- adjust Sp to point to the last argument pushed, which
-- is cml_args, after popping any other junk from the stack.
CmmCall{ cml_cont = Nothing, .. } -> do
let sp_off = sp0 - cml_args
return ([], sp_off, last, [], mapEmpty)
-- At each CmmCall with a continuation:
CmmCall{ cml_cont = Just cont_lbl, .. } ->
return $ lastCall cont_lbl cml_args cml_ret_args cml_ret_off
CmmForeignCall{ succ = cont_lbl, .. } -> do
return $ lastCall cont_lbl (wORD_SIZE dflags) ret_args ret_off
-- one word of args: the return address
CmmBranch{..} -> handleBranches
CmmCondBranch{..} -> handleBranches
CmmSwitch{..} -> handleBranches
where
-- Calls and ForeignCalls are handled the same way:
lastCall :: BlockId -> ByteOff -> ByteOff -> ByteOff
-> ( [CmmNode O O]
, ByteOff
, CmmNode O C
, [CmmBlock]
, BlockEnv StackMap
)
lastCall lbl cml_args cml_ret_args cml_ret_off
= ( assignments
, spOffsetForCall sp0 cont_stack cml_args
, last
, [] -- no new blocks
, mapSingleton lbl cont_stack )
where
(assignments, cont_stack) = prepareStack lbl cml_ret_args cml_ret_off
prepareStack lbl cml_ret_args cml_ret_off
| Just cont_stack <- mapLookup lbl stackmaps
-- If we have already seen this continuation before, then
-- we just have to make the stack look the same:
= (fixupStack stack0 cont_stack, cont_stack)
-- Otherwise, we have to allocate the stack frame
| otherwise
= (save_assignments, new_cont_stack)
where
(new_cont_stack, save_assignments)
= setupStackFrame dflags lbl liveness cml_ret_off cml_ret_args stack0
-- For other last nodes (branches), if any of the targets is a
-- proc point, we have to set up the stack to match what the proc
-- point is expecting.
--
handleBranches :: UniqSM ( [CmmNode O O]
, ByteOff
, CmmNode O C
, [CmmBlock]
, BlockEnv StackMap )
handleBranches
-- Note [diamond proc point]
| Just l <- futureContinuation middle
, (nub $ filter (`setMember` procpoints) $ successors last) == [l]
= do
let cont_args = mapFindWithDefault 0 l cont_info
(assigs, cont_stack) = prepareStack l cont_args (sm_ret_off stack0)
out = mapFromList [ (l', cont_stack)
| l' <- successors last ]
return ( assigs
, spOffsetForCall sp0 cont_stack (wORD_SIZE dflags)
, last
, []
, out)
| otherwise = do
pps <- mapM handleBranch (successors last)
let lbl_map :: LabelMap Label
lbl_map = mapFromList [ (l,tmp) | (l,tmp,_,_) <- pps ]
fix_lbl l = mapFindWithDefault l l lbl_map
return ( []
, 0
, mapSuccessors fix_lbl last
, concat [ blk | (_,_,_,blk) <- pps ]
, mapFromList [ (l, sm) | (l,_,sm,_) <- pps ] )
-- For each successor of this block
handleBranch :: BlockId -> UniqSM (BlockId, BlockId, StackMap, [CmmBlock])
handleBranch l
-- (a) if the successor already has a stackmap, we need to
-- shuffle the current stack to make it look the same.
-- We have to insert a new block to make this happen.
| Just stack2 <- mapLookup l stackmaps
= do
let assigs = fixupStack stack0 stack2
(tmp_lbl, block) <- makeFixupBlock dflags sp0 l stack2 assigs
return (l, tmp_lbl, stack2, block)
-- (b) if the successor is a proc point, save everything
-- on the stack.
| l `setMember` procpoints
= do
let cont_args = mapFindWithDefault 0 l cont_info
(stack2, assigs) =
--pprTrace "first visit to proc point"
-- (ppr l <+> ppr stack1) $
setupStackFrame dflags l liveness (sm_ret_off stack0)
cont_args stack0
--
(tmp_lbl, block) <- makeFixupBlock dflags sp0 l stack2 assigs
return (l, tmp_lbl, stack2, block)
-- (c) otherwise, the current StackMap is the StackMap for
-- the continuation. But we must remember to remove any
-- variables from the StackMap that are *not* live at
-- the destination, because this StackMap might be used
-- by fixupStack if this is a join point.
| otherwise = return (l, l, stack1, [])
where live = mapFindWithDefault (panic "handleBranch") l liveness
stack1 = stack0 { sm_regs = filterUFM is_live (sm_regs stack0) }
is_live (r,_) = r `elemRegSet` live
makeFixupBlock :: DynFlags -> ByteOff -> Label -> StackMap -> [CmmNode O O]
-> UniqSM (Label, [CmmBlock])
makeFixupBlock dflags sp0 l stack assigs
| null assigs && sp0 == sm_sp stack = return (l, [])
| otherwise = do
tmp_lbl <- liftM mkBlockId $ getUniqueM
let sp_off = sp0 - sm_sp stack
block = blockJoin (CmmEntry tmp_lbl)
(maybeAddSpAdj dflags sp_off (blockFromList assigs))
(CmmBranch l)
return (tmp_lbl, [block])
-- Sp is currently pointing to current_sp,
-- we want it to point to
-- (sm_sp cont_stack - sm_args cont_stack + args)
-- so the difference is
-- sp0 - (sm_sp cont_stack - sm_args cont_stack + args)
spOffsetForCall :: ByteOff -> StackMap -> ByteOff -> ByteOff
spOffsetForCall current_sp cont_stack args
= current_sp - (sm_sp cont_stack - sm_args cont_stack + args)
-- | create a sequence of assignments to establish the new StackMap,
-- given the old StackMap.
fixupStack :: StackMap -> StackMap -> [CmmNode O O]
fixupStack old_stack new_stack = concatMap move new_locs
where
old_map = sm_regs old_stack
new_locs = stackSlotRegs new_stack
move (r,n)
| Just (_,m) <- lookupUFM old_map r, n == m = []
| otherwise = [CmmStore (CmmStackSlot Old n)
(CmmReg (CmmLocal r))]
setupStackFrame
:: DynFlags
-> BlockId -- label of continuation
-> BlockEnv CmmLocalLive -- liveness
-> ByteOff -- updfr
-> ByteOff -- bytes of return values on stack
-> StackMap -- current StackMap
-> (StackMap, [CmmNode O O])
setupStackFrame dflags lbl liveness updfr_off ret_args stack0
= (cont_stack, assignments)
where
-- get the set of LocalRegs live in the continuation
live = mapFindWithDefault Set.empty lbl liveness
-- the stack from the base to updfr_off is off-limits.
-- our new stack frame contains:
-- * saved live variables
-- * the return address [young(C) + 8]
-- * the args for the call,
-- which are replaced by the return values at the return
-- point.
-- everything up to updfr_off is off-limits
-- stack1 contains updfr_off, plus everything we need to save
(stack1, assignments) = allocate dflags updfr_off live stack0
-- And the Sp at the continuation is:
-- sm_sp stack1 + ret_args
cont_stack = stack1{ sm_sp = sm_sp stack1 + ret_args
, sm_args = ret_args
, sm_ret_off = updfr_off
}
-- -----------------------------------------------------------------------------
-- Note [diamond proc point]
--
-- This special case looks for the pattern we get from a typical
-- tagged case expression:
--
-- Sp[young(L1)] = L1
-- if (R1 & 7) != 0 goto L1 else goto L2
-- L2:
-- call [R1] returns to L1
-- L1: live: {y}
-- x = R1
--
-- If we let the generic case handle this, we get
--
-- Sp[-16] = L1
-- if (R1 & 7) != 0 goto L1a else goto L2
-- L2:
-- Sp[-8] = y
-- Sp = Sp - 16
-- call [R1] returns to L1
-- L1a:
-- Sp[-8] = y
-- Sp = Sp - 16
-- goto L1
-- L1:
-- x = R1
--
-- The code for saving the live vars is duplicated in each branch, and
-- furthermore there is an extra jump in the fast path (assuming L1 is
-- a proc point, which it probably is if there is a heap check).
--
-- So to fix this we want to set up the stack frame before the
-- conditional jump. How do we know when to do this, and when it is
-- safe? The basic idea is, when we see the assignment
--
-- Sp[young(L)] = L
--
-- we know that
-- * we are definitely heading for L
-- * there can be no more reads from another stack area, because young(L)
-- overlaps with it.
--
-- We don't necessarily know that everything live at L is live now
-- (some might be assigned between here and the jump to L). So we
-- simplify and only do the optimisation when we see
--
-- (1) a block containing an assignment of a return address L
-- (2) ending in a branch where one (and only) continuation goes to L,
-- and no other continuations go to proc points.
--
-- then we allocate the stack frame for L at the end of the block,
-- before the branch.
--
-- We could generalise (2), but that would make it a bit more
-- complicated to handle, and this currently catches the common case.
futureContinuation :: Block CmmNode O O -> Maybe BlockId
futureContinuation middle = foldBlockNodesB f middle Nothing
where f :: CmmNode a b -> Maybe BlockId -> Maybe BlockId
f (CmmStore (CmmStackSlot (Young l) _) (CmmLit (CmmBlock _))) _
= Just l
f _ r = r
-- -----------------------------------------------------------------------------
-- Saving live registers
-- | Given a set of live registers and a StackMap, save all the registers
-- on the stack and return the new StackMap and the assignments to do
-- the saving.
--
allocate :: DynFlags -> ByteOff -> LocalRegSet -> StackMap
-> (StackMap, [CmmNode O O])
allocate dflags ret_off live stackmap@StackMap{ sm_sp = sp0
, sm_regs = regs0 }
=
-- pprTrace "allocate" (ppr live $$ ppr stackmap) $
-- we only have to save regs that are not already in a slot
let to_save = filter (not . (`elemUFM` regs0)) (Set.elems live)
regs1 = filterUFM (\(r,_) -> elemRegSet r live) regs0
in
-- make a map of the stack
let stack = reverse $ Array.elems $
accumArray (\_ x -> x) Empty (1, toWords dflags (max sp0 ret_off)) $
ret_words ++ live_words
where ret_words =
[ (x, Occupied)
| x <- [ 1 .. toWords dflags ret_off] ]
live_words =
[ (toWords dflags x, Occupied)
| (r,off) <- eltsUFM regs1,
let w = localRegBytes dflags r,
x <- [ off, off - wORD_SIZE dflags .. off - w + 1] ]
in
-- Pass over the stack: find slots to save all the new live variables,
-- choosing the oldest slots first (hence a foldr).
let
save slot ([], stack, n, assigs, regs) -- no more regs to save
= ([], slot:stack, plusW dflags n 1, assigs, regs)
save slot (to_save, stack, n, assigs, regs)
= case slot of
Occupied -> (to_save, Occupied:stack, plusW dflags n 1, assigs, regs)
Empty
| Just (stack', r, to_save') <-
select_save to_save (slot:stack)
-> let assig = CmmStore (CmmStackSlot Old n')
(CmmReg (CmmLocal r))
n' = plusW dflags n 1
in
(to_save', stack', n', assig : assigs, (r,(r,n')):regs)
| otherwise
-> (to_save, slot:stack, plusW dflags n 1, assigs, regs)
-- we should do better here: right now we'll fit the smallest first,
-- but it would make more sense to fit the biggest first.
select_save :: [LocalReg] -> [StackSlot]
-> Maybe ([StackSlot], LocalReg, [LocalReg])
select_save regs stack = go regs []
where go [] _no_fit = Nothing
go (r:rs) no_fit
| Just rest <- dropEmpty words stack
= Just (replicate words Occupied ++ rest, r, rs++no_fit)
| otherwise
= go rs (r:no_fit)
where words = localRegWords dflags r
-- fill in empty slots as much as possible
(still_to_save, save_stack, n, save_assigs, save_regs)
= foldr save (to_save, [], 0, [], []) stack
-- push any remaining live vars on the stack
(push_sp, push_assigs, push_regs)
= foldr push (n, [], []) still_to_save
where
push r (n, assigs, regs)
= (n', assig : assigs, (r,(r,n')) : regs)
where
n' = n + localRegBytes dflags r
assig = CmmStore (CmmStackSlot Old n')
(CmmReg (CmmLocal r))
trim_sp
| not (null push_regs) = push_sp
| otherwise
= plusW dflags n (- length (takeWhile isEmpty save_stack))
final_regs = regs1 `addListToUFM` push_regs
`addListToUFM` save_regs
in
-- XXX should be an assert
if ( n /= max sp0 ret_off ) then pprPanic "allocate" (ppr n <+> ppr sp0 <+> ppr ret_off) else
if (trim_sp .&. (wORD_SIZE dflags - 1)) /= 0 then pprPanic "allocate2" (ppr trim_sp <+> ppr final_regs <+> ppr push_sp) else
( stackmap { sm_regs = final_regs , sm_sp = trim_sp }
, push_assigs ++ save_assigs )
-- -----------------------------------------------------------------------------
-- Manifesting Sp
-- | Manifest Sp: turn all the CmmStackSlots into CmmLoads from Sp. The
-- block looks like this:
--
-- middle_pre -- the middle nodes
-- Sp = Sp + sp_off -- Sp adjustment goes here
-- last -- the last node
--
-- And we have some extra blocks too (that don't contain Sp adjustments)
--
-- The adjustment for middle_pre will be different from that for
-- middle_post, because the Sp adjustment intervenes.
--
manifestSp
:: DynFlags
-> BlockEnv StackMap -- StackMaps for other blocks
-> StackMap -- StackMap for this block
-> ByteOff -- Sp on entry to the block
-> ByteOff -- SpHigh
-> CmmNode C O -- first node
-> [CmmNode O O] -- middle
-> ByteOff -- sp_off
-> CmmNode O C -- last node
-> [CmmBlock] -- new blocks
-> [CmmBlock] -- final blocks with Sp manifest
manifestSp dflags stackmaps stack0 sp0 sp_high
first middle_pre sp_off last fixup_blocks
= final_block : fixup_blocks'
where
area_off = getAreaOff stackmaps
adj_pre_sp, adj_post_sp :: CmmNode e x -> CmmNode e x
adj_pre_sp = mapExpDeep (areaToSp dflags sp0 sp_high area_off)
adj_post_sp = mapExpDeep (areaToSp dflags (sp0 - sp_off) sp_high area_off)
final_middle = maybeAddSpAdj dflags sp_off $
blockFromList $
map adj_pre_sp $
elimStackStores stack0 stackmaps area_off $
middle_pre
final_last = optStackCheck (adj_post_sp last)
final_block = blockJoin first final_middle final_last
fixup_blocks' = map (mapBlock3' (id, adj_post_sp, id)) fixup_blocks
getAreaOff :: BlockEnv StackMap -> (Area -> StackLoc)
getAreaOff _ Old = 0
getAreaOff stackmaps (Young l) =
case mapLookup l stackmaps of
Just sm -> sm_sp sm - sm_args sm
Nothing -> pprPanic "getAreaOff" (ppr l)
maybeAddSpAdj :: DynFlags -> ByteOff -> Block CmmNode O O -> Block CmmNode O O
maybeAddSpAdj _ 0 block = block
maybeAddSpAdj dflags sp_off block
= block `blockSnoc` CmmAssign spReg (cmmOffset dflags (CmmReg spReg) sp_off)
{-
Sp(L) is the Sp offset on entry to block L relative to the base of the
OLD area.
SpArgs(L) is the size of the young area for L, i.e. the number of
arguments.
- in block L, each reference to [old + N] turns into
[Sp + Sp(L) - N]
- in block L, each reference to [young(L') + N] turns into
[Sp + Sp(L) - Sp(L') + SpArgs(L') - N]
- be careful with the last node of each block: Sp has already been adjusted
to be Sp + Sp(L) - Sp(L')
-}
areaToSp :: DynFlags -> ByteOff -> ByteOff -> (Area -> StackLoc) -> CmmExpr -> CmmExpr
areaToSp dflags sp_old _sp_hwm area_off (CmmStackSlot area n)
= cmmOffset dflags (CmmReg spReg) (sp_old - area_off area - n)
-- Replace (CmmStackSlot area n) with an offset from Sp
areaToSp dflags _ sp_hwm _ (CmmLit CmmHighStackMark)
= mkIntExpr dflags sp_hwm
-- Replace CmmHighStackMark with the number of bytes of stack used,
-- the sp_hwm. See Note [Stack usage] in StgCmmHeap
areaToSp dflags _ _ _ (CmmMachOp (MO_U_Lt _)
[CmmMachOp (MO_Sub _)
[ CmmRegOff (CmmGlobal Sp) x_off
, CmmLit (CmmInt y_lit _)],
CmmReg (CmmGlobal SpLim)])
| fromIntegral x_off >= y_lit
= zeroExpr dflags
-- Replace a stack-overflow test that cannot fail with a no-op
-- See Note [Always false stack check]
areaToSp _ _ _ _ other = other
-- Note [Always false stack check]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- We can optimise stack checks of the form
--
-- if ((Sp + x) - y < SpLim) then .. else ..
--
-- where are non-negative integer byte offsets. Since we know that
-- SpLim <= Sp (remember the stack grows downwards), this test must
-- yield False if (x >= y), so we can rewrite the comparison to False.
-- A subsequent sinking pass will later drop the dead code.
-- Optimising this away depends on knowing that SpLim <= Sp, so it is
-- really the job of the stack layout algorithm, hence we do it now.
optStackCheck :: CmmNode O C -> CmmNode O C
optStackCheck n = -- Note [null stack check]
case n of
CmmCondBranch (CmmLit (CmmInt 0 _)) _true false -> CmmBranch false
other -> other
-- -----------------------------------------------------------------------------
-- | Eliminate stores of the form
--
-- Sp[area+n] = r
--
-- when we know that r is already in the same slot as Sp[area+n]. We
-- could do this in a later optimisation pass, but that would involve
-- a separate analysis and we already have the information to hand
-- here. It helps clean up some extra stack stores in common cases.
--
-- Note that we may have to modify the StackMap as we walk through the
-- code using procMiddle, since an assignment to a variable in the
-- StackMap will invalidate its mapping there.
--
elimStackStores :: StackMap
-> BlockEnv StackMap
-> (Area -> ByteOff)
-> [CmmNode O O]
-> [CmmNode O O]
elimStackStores stackmap stackmaps area_off nodes
= go stackmap nodes
where
go _stackmap [] = []
go stackmap (n:ns)
= case n of
CmmStore (CmmStackSlot area m) (CmmReg (CmmLocal r))
| Just (_,off) <- lookupUFM (sm_regs stackmap) r
, area_off area + m == off
-> -- pprTrace "eliminated a node!" (ppr r) $
go stackmap ns
_otherwise
-> n : go (procMiddle stackmaps n stackmap) ns
-- -----------------------------------------------------------------------------
-- Update info tables to include stack liveness
setInfoTableStackMap :: DynFlags -> BlockEnv StackMap -> CmmDecl -> CmmDecl
setInfoTableStackMap dflags stackmaps (CmmProc top_info@TopInfo{..} l v g)
= CmmProc top_info{ info_tbls = mapMapWithKey fix_info info_tbls } l v g
where
fix_info lbl info_tbl@CmmInfoTable{ cit_rep = StackRep _ } =
info_tbl { cit_rep = StackRep (get_liveness lbl) }
fix_info _ other = other
get_liveness :: BlockId -> Liveness
get_liveness lbl
= case mapLookup lbl stackmaps of
Nothing -> pprPanic "setInfoTableStackMap" (ppr lbl <+> ppr info_tbls)
Just sm -> stackMapToLiveness dflags sm
setInfoTableStackMap _ _ d = d
stackMapToLiveness :: DynFlags -> StackMap -> Liveness
stackMapToLiveness dflags StackMap{..} =
reverse $ Array.elems $
accumArray (\_ x -> x) True (toWords dflags sm_ret_off + 1,
toWords dflags (sm_sp - sm_args)) live_words
where
live_words = [ (toWords dflags off, False)
| (r,off) <- eltsUFM sm_regs, isGcPtrType (localRegType r) ]
-- -----------------------------------------------------------------------------
-- Lowering safe foreign calls
{-
Note [Lower safe foreign calls]
We start with
Sp[young(L1)] = L1
,-----------------------
| r1 = foo(x,y,z) returns to L1
'-----------------------
L1:
R1 = r1 -- copyIn, inserted by mkSafeCall
...
the stack layout algorithm will arrange to save and reload everything
live across the call. Our job now is to expand the call so we get
Sp[young(L1)] = L1
,-----------------------
| SAVE_THREAD_STATE()
| token = suspendThread(BaseReg, interruptible)
| r = foo(x,y,z)
| BaseReg = resumeThread(token)
| LOAD_THREAD_STATE()
| R1 = r -- copyOut
| jump Sp[0]
'-----------------------
L1:
r = R1 -- copyIn, inserted by mkSafeCall
...
Note the copyOut, which saves the results in the places that L1 is
expecting them (see Note {safe foreign call convention]). Note also
that safe foreign call is replace by an unsafe one in the Cmm graph.
-}
lowerSafeForeignCall :: DynFlags -> CmmBlock -> UniqSM CmmBlock
lowerSafeForeignCall dflags block
| (entry, middle, CmmForeignCall { .. }) <- blockSplit block
= do
-- Both 'id' and 'new_base' are KindNonPtr because they're
-- RTS-only objects and are not subject to garbage collection
id <- newTemp (bWord dflags)
new_base <- newTemp (cmmRegType dflags (CmmGlobal BaseReg))
let (caller_save, caller_load) = callerSaveVolatileRegs dflags
load_tso <- newTemp (gcWord dflags)
load_stack <- newTemp (gcWord dflags)
let suspend = saveThreadState dflags <*>
caller_save <*>
mkMiddle (callSuspendThread dflags id intrbl)
midCall = mkUnsafeCall tgt res args
resume = mkMiddle (callResumeThread new_base id) <*>
-- Assign the result to BaseReg: we
-- might now have a different Capability!
mkAssign (CmmGlobal BaseReg) (CmmReg (CmmLocal new_base)) <*>
caller_load <*>
loadThreadState dflags load_tso load_stack
(_, regs, copyout) =
copyOutOflow dflags NativeReturn Jump (Young succ)
(map (CmmReg . CmmLocal) res)
ret_off []
-- NB. after resumeThread returns, the top-of-stack probably contains
-- the stack frame for succ, but it might not: if the current thread
-- received an exception during the call, then the stack might be
-- different. Hence we continue by jumping to the top stack frame,
-- not by jumping to succ.
jump = CmmCall { cml_target = entryCode dflags $
CmmLoad (CmmReg spReg) (bWord dflags)
, cml_cont = Just succ
, cml_args_regs = regs
, cml_args = widthInBytes (wordWidth dflags)
, cml_ret_args = ret_args
, cml_ret_off = ret_off }
graph' <- lgraphOfAGraph $ suspend <*>
midCall <*>
resume <*>
copyout <*>
mkLast jump
case toBlockList graph' of
[one] -> let (_, middle', last) = blockSplit one
in return (blockJoin entry (middle `blockAppend` middle') last)
_ -> panic "lowerSafeForeignCall0"
-- Block doesn't end in a safe foreign call:
| otherwise = return block
foreignLbl :: FastString -> CmmExpr
foreignLbl name = CmmLit (CmmLabel (mkForeignLabel name Nothing ForeignLabelInExternalPackage IsFunction))
newTemp :: CmmType -> UniqSM LocalReg
newTemp rep = getUniqueM >>= \u -> return (LocalReg u rep)
callSuspendThread :: DynFlags -> LocalReg -> Bool -> CmmNode O O
callSuspendThread dflags id intrbl =
CmmUnsafeForeignCall
(ForeignTarget (foreignLbl (fsLit "suspendThread"))
(ForeignConvention CCallConv [AddrHint, NoHint] [AddrHint] CmmMayReturn))
[id] [CmmReg (CmmGlobal BaseReg), mkIntExpr dflags (fromEnum intrbl)]
callResumeThread :: LocalReg -> LocalReg -> CmmNode O O
callResumeThread new_base id =
CmmUnsafeForeignCall
(ForeignTarget (foreignLbl (fsLit "resumeThread"))
(ForeignConvention CCallConv [AddrHint] [AddrHint] CmmMayReturn))
[new_base] [CmmReg (CmmLocal id)]
-- -----------------------------------------------------------------------------
plusW :: DynFlags -> ByteOff -> WordOff -> ByteOff
plusW dflags b w = b + w * wORD_SIZE dflags
data StackSlot = Occupied | Empty
-- Occupied: a return address or part of an update frame
instance Outputable StackSlot where
ppr Occupied = ptext (sLit "XXX")
ppr Empty = ptext (sLit "---")
dropEmpty :: WordOff -> [StackSlot] -> Maybe [StackSlot]
dropEmpty 0 ss = Just ss
dropEmpty n (Empty : ss) = dropEmpty (n-1) ss
dropEmpty _ _ = Nothing
isEmpty :: StackSlot -> Bool
isEmpty Empty = True
isEmpty _ = False
localRegBytes :: DynFlags -> LocalReg -> ByteOff
localRegBytes dflags r
= roundUpToWords dflags (widthInBytes (typeWidth (localRegType r)))
localRegWords :: DynFlags -> LocalReg -> WordOff
localRegWords dflags = toWords dflags . localRegBytes dflags
toWords :: DynFlags -> ByteOff -> WordOff
toWords dflags x = x `quot` wORD_SIZE dflags
insertReloads :: StackMap -> [CmmNode O O]
insertReloads stackmap =
[ CmmAssign (CmmLocal r) (CmmLoad (CmmStackSlot Old sp)
(localRegType r))
| (r,sp) <- stackSlotRegs stackmap
]
stackSlotRegs :: StackMap -> [(LocalReg, StackLoc)]
stackSlotRegs sm = eltsUFM (sm_regs sm)
| lukexi/ghc-7.8-arm64 | compiler/cmm/CmmLayoutStack.hs | bsd-3-clause | 42,050 | 1 | 25 | 12,506 | 7,182 | 3,847 | 3,335 | 497 | 6 |