summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorsimonpj@microsoft.com <unknown>2007-12-20 11:13:00 +0000
committersimonpj@microsoft.com <unknown>2007-12-20 11:13:00 +0000
commit67cb409159fa9136dff942b8baaec25909416022 (patch)
tree2de192f967b2d012b7bc1e8e0b72fd97a8f55a30
parentfe784e7dfffa8b876ed738306a82bf4bdcfd8be7 (diff)
downloadhaskell-67cb409159fa9136dff942b8baaec25909416022.tar.gz
Implement generalised list comprehensions
This patch implements generalised list comprehensions, as described in the paper "Comprehensive comprehensions" (Peyton Jones & Wadler, Haskell Workshop 2007). If you don't use the new comprehensions, nothing should change. The syntax is not exactly as in the paper; see the user manual entry for details. You need an accompanying patch to the base library for this stuff to work. The patch is the work of Max Bolingbroke [batterseapower@hotmail.com], with some advice from Simon PJ. The related GHC Wiki page is http://hackage.haskell.org/trac/ghc/wiki/SQLLikeComprehensions
-rw-r--r--compiler/deSugar/Coverage.lhs49
-rw-r--r--compiler/deSugar/DsArrows.lhs82
-rw-r--r--compiler/deSugar/DsBinds.lhs2
-rw-r--r--compiler/deSugar/DsListComp.lhs447
-rw-r--r--compiler/deSugar/DsUtils.lhs216
-rw-r--r--compiler/hsSyn/HsExpr.lhs32
-rw-r--r--compiler/hsSyn/HsUtils.lhs9
-rw-r--r--compiler/main/Constants.lhs4
-rw-r--r--compiler/main/DynFlags.hs4
-rw-r--r--compiler/parser/Lexer.x14
-rw-r--r--compiler/parser/Parser.y.pp88
-rw-r--r--compiler/prelude/PrelNames.lhs18
-rw-r--r--compiler/rename/RnEnv.lhs8
-rw-r--r--compiler/rename/RnExpr.lhs246
-rw-r--r--compiler/typecheck/TcExpr.lhs2
-rw-r--r--compiler/typecheck/TcHsSyn.lhs34
-rw-r--r--compiler/typecheck/TcMatches.lhs69
-rw-r--r--compiler/utils/Panic.lhs2
-rw-r--r--docs/users_guide/flags.xml6
-rw-r--r--docs/users_guide/glasgow_exts.xml160
20 files changed, 1111 insertions, 381 deletions
diff --git a/compiler/deSugar/Coverage.lhs b/compiler/deSugar/Coverage.lhs
index 7b58a95e08..d8de3285ba 100644
--- a/compiler/deSugar/Coverage.lhs
+++ b/compiler/deSugar/Coverage.lhs
@@ -65,7 +65,7 @@ addCoverageTicksToBinds
:: DynFlags
-> Module
-> ModLocation -- of the current module
- -> [TyCon] -- type constructor in this module
+ -> [TyCon] -- type constructor in this module
-> LHsBinds Id
-> IO (LHsBinds Id, HpcInfo, ModBreaks)
@@ -442,23 +442,34 @@ addTickStmt isGuard (BindStmt pat e bind fail) = do
(addTickSyntaxExpr hpcSrcSpan fail)
addTickStmt isGuard (ExprStmt e bind' ty) = do
liftM3 ExprStmt
- (addTick e)
+ (addTick isGuard e)
(addTickSyntaxExpr hpcSrcSpan bind')
(return ty)
- where
- addTick e | Just fn <- isGuard = addBinTickLHsExpr fn e
- | otherwise = addTickLHsExprAlways e
-
addTickStmt isGuard (LetStmt binds) = do
liftM LetStmt
(addTickHsLocalBinds binds)
addTickStmt isGuard (ParStmt pairs) = do
- liftM ParStmt (mapM process pairs)
- where
- process (stmts,ids) =
- liftM2 (,)
- (addTickLStmts isGuard stmts)
- (return ids)
+ liftM ParStmt
+ (mapM (addTickStmtAndBinders isGuard) pairs)
+addTickStmt isGuard (TransformStmt (stmts, ids) usingExpr maybeByExpr) = do
+ liftM3 TransformStmt
+ (addTickStmtAndBinders isGuard (stmts, ids))
+ (addTickLHsExprAlways usingExpr)
+ (addTickMaybeByLHsExpr maybeByExpr)
+addTickStmt isGuard (GroupStmt (stmts, binderMap) groupByClause) = do
+ liftM2 GroupStmt
+ (addTickStmtAndBinders isGuard (stmts, binderMap))
+ (case groupByClause of
+ GroupByNothing usingExpr -> addTickLHsExprAlways usingExpr >>= (return . GroupByNothing)
+ GroupBySomething eitherUsingExpr byExpr -> do
+ eitherUsingExpr' <- mapEitherM addTickLHsExprAlways (addTickSyntaxExpr hpcSrcSpan) eitherUsingExpr
+ byExpr' <- addTickLHsExprAlways byExpr
+ return $ GroupBySomething eitherUsingExpr' byExpr')
+ where
+ mapEitherM f g x = do
+ case x of
+ Left a -> f a >>= (return . Left)
+ Right b -> g b >>= (return . Right)
addTickStmt isGuard (RecStmt stmts ids1 ids2 tys dictbinds) = do
liftM5 RecStmt
(addTickLStmts isGuard stmts)
@@ -467,6 +478,20 @@ addTickStmt isGuard (RecStmt stmts ids1 ids2 tys dictbinds) = do
(return tys)
(addTickDictBinds dictbinds)
+addTick isGuard e | Just fn <- isGuard = addBinTickLHsExpr fn e
+ | otherwise = addTickLHsExprAlways e
+
+addTickStmtAndBinders isGuard (stmts, ids) =
+ liftM2 (,)
+ (addTickLStmts isGuard stmts)
+ (return ids)
+
+addTickMaybeByLHsExpr :: Maybe (LHsExpr Id) -> TM (Maybe (LHsExpr Id))
+addTickMaybeByLHsExpr maybeByExpr =
+ case maybeByExpr of
+ Nothing -> return Nothing
+ Just byExpr -> addTickLHsExprAlways byExpr >>= (return . Just)
+
addTickHsLocalBinds :: HsLocalBinds Id -> TM (HsLocalBinds Id)
addTickHsLocalBinds (HsValBinds binds) =
liftM HsValBinds
diff --git a/compiler/deSugar/DsArrows.lhs b/compiler/deSugar/DsArrows.lhs
index 7500111f4c..d828976f11 100644
--- a/compiler/deSugar/DsArrows.lhs
+++ b/compiler/deSugar/DsArrows.lhs
@@ -164,7 +164,7 @@ with s1 being the "top", the first one to be matched with a lambda.
\begin{code}
envStackType :: [Id] -> [Type] -> Type
-envStackType ids stack_tys = foldl mkCorePairTy (mkTupleType ids) stack_tys
+envStackType ids stack_tys = foldl mkCorePairTy (mkBigCoreVarTupTy ids) stack_tys
----------------------------------------------
-- buildEnvStack
@@ -173,7 +173,7 @@ envStackType ids stack_tys = foldl mkCorePairTy (mkTupleType ids) stack_tys
buildEnvStack :: [Id] -> [Id] -> CoreExpr
buildEnvStack env_ids stack_ids
- = foldl mkCorePairExpr (mkTupleExpr env_ids) (map Var stack_ids)
+ = foldl mkCorePairExpr (mkBigCoreVarTup env_ids) (map Var stack_ids)
----------------------------------------------
-- matchEnvStack
@@ -193,7 +193,7 @@ matchEnvStack :: [Id] -- x1..xn
-> DsM CoreExpr
matchEnvStack env_ids stack_ids body
= newUniqueSupply `thenDs` \ uniqs ->
- newSysLocalDs (mkTupleType env_ids) `thenDs` \ tup_var ->
+ newSysLocalDs (mkBigCoreVarTupTy env_ids) `thenDs` \ tup_var ->
matchVarStack tup_var stack_ids
(coreCaseTuple uniqs tup_var env_ids body)
@@ -257,11 +257,11 @@ dsProcExpr pat (L _ (HsCmdTop cmd [] cmd_ty ids))
dsfixCmd meth_ids locals [] cmd_ty cmd
`thenDs` \ (core_cmd, free_vars, env_ids) ->
let
- env_ty = mkTupleType env_ids
+ env_ty = mkBigCoreVarTupTy env_ids
in
mkFailExpr ProcExpr env_ty `thenDs` \ fail_expr ->
selectSimpleMatchVarL pat `thenDs` \ var ->
- matchSimply (Var var) ProcExpr pat (mkTupleExpr env_ids) fail_expr
+ matchSimply (Var var) ProcExpr pat (mkBigCoreVarTup env_ids) fail_expr
`thenDs` \ match_code ->
let
pat_ty = hsLPatType pat
@@ -303,7 +303,7 @@ dsCmd ids local_vars env_ids stack res_ty
= let
(a_arg_ty, _res_ty') = tcSplitAppTy arrow_ty
(_a_ty, arg_ty) = tcSplitAppTy a_arg_ty
- env_ty = mkTupleType env_ids
+ env_ty = mkBigCoreVarTupTy env_ids
in
dsLExpr arrow `thenDs` \ core_arrow ->
dsLExpr arg `thenDs` \ core_arg ->
@@ -331,7 +331,7 @@ dsCmd ids local_vars env_ids stack res_ty
= let
(a_arg_ty, _res_ty') = tcSplitAppTy arrow_ty
(_a_ty, arg_ty) = tcSplitAppTy a_arg_ty
- env_ty = mkTupleType env_ids
+ env_ty = mkBigCoreVarTupTy env_ids
in
dsLExpr arrow `thenDs` \ core_arrow ->
dsLExpr arg `thenDs` \ core_arg ->
@@ -587,7 +587,7 @@ dsCmd ids local_vars env_ids [] res_ty (HsDo _ctxt stmts body _)
dsCmd _ids local_vars env_ids _stack _res_ty (HsArrForm op _ args)
= let
- env_ty = mkTupleType env_ids
+ env_ty = mkBigCoreVarTupTy env_ids
in
dsLExpr op `thenDs` \ core_op ->
mapAndUnzipDs (dsTrimCmdArg local_vars env_ids) args
@@ -683,8 +683,8 @@ dsCmdDo ids local_vars env_ids res_ty (stmt:stmts) body
dsCmdLStmt ids local_vars env_ids env_ids' stmt
`thenDs` \ (core_stmt, fv_stmt) ->
returnDs (do_compose ids
- (mkTupleType env_ids)
- (mkTupleType env_ids')
+ (mkBigCoreVarTupTy env_ids)
+ (mkBigCoreVarTupTy env_ids')
res_ty
core_stmt
core_stmts,
@@ -721,12 +721,12 @@ dsCmdStmt ids local_vars env_ids out_ids (ExprStmt cmd _ c_ty)
= dsfixCmd ids local_vars [] c_ty cmd
`thenDs` \ (core_cmd, fv_cmd, env_ids1) ->
matchEnvStack env_ids []
- (mkCorePairExpr (mkTupleExpr env_ids1) (mkTupleExpr out_ids))
+ (mkCorePairExpr (mkBigCoreVarTup env_ids1) (mkBigCoreVarTup out_ids))
`thenDs` \ core_mux ->
let
- in_ty = mkTupleType env_ids
- in_ty1 = mkTupleType env_ids1
- out_ty = mkTupleType out_ids
+ in_ty = mkBigCoreVarTupTy env_ids
+ in_ty1 = mkBigCoreVarTupTy env_ids1
+ out_ty = mkBigCoreVarTupTy out_ids
before_c_ty = mkCorePairTy in_ty1 out_ty
after_c_ty = mkCorePairTy c_ty out_ty
in
@@ -756,14 +756,14 @@ dsCmdStmt ids local_vars env_ids out_ids (BindStmt pat cmd _ _)
pat_ty = hsLPatType pat
pat_vars = mkVarSet (collectPatBinders pat)
env_ids2 = varSetElems (mkVarSet out_ids `minusVarSet` pat_vars)
- env_ty2 = mkTupleType env_ids2
+ env_ty2 = mkBigCoreVarTupTy env_ids2
in
-- multiplexing function
-- \ (xs) -> ((xs1),(xs2))
matchEnvStack env_ids []
- (mkCorePairExpr (mkTupleExpr env_ids1) (mkTupleExpr env_ids2))
+ (mkCorePairExpr (mkBigCoreVarTup env_ids1) (mkBigCoreVarTup env_ids2))
`thenDs` \ core_mux ->
-- projection function
@@ -773,8 +773,8 @@ dsCmdStmt ids local_vars env_ids out_ids (BindStmt pat cmd _ _)
newUniqueSupply `thenDs` \ uniqs ->
let
after_c_ty = mkCorePairTy pat_ty env_ty2
- out_ty = mkTupleType out_ids
- body_expr = coreCaseTuple uniqs env_id env_ids2 (mkTupleExpr out_ids)
+ out_ty = mkBigCoreVarTupTy out_ids
+ body_expr = coreCaseTuple uniqs env_id env_ids2 (mkBigCoreVarTup out_ids)
in
mkFailExpr (StmtCtxt DoExpr) out_ty `thenDs` \ fail_expr ->
selectSimpleMatchVarL pat `thenDs` \ pat_id ->
@@ -787,9 +787,9 @@ dsCmdStmt ids local_vars env_ids out_ids (BindStmt pat cmd _ _)
-- put it all together
let
- in_ty = mkTupleType env_ids
- in_ty1 = mkTupleType env_ids1
- in_ty2 = mkTupleType env_ids2
+ in_ty = mkBigCoreVarTupTy env_ids
+ in_ty1 = mkBigCoreVarTupTy env_ids1
+ in_ty2 = mkBigCoreVarTupTy env_ids2
before_c_ty = mkCorePairTy in_ty1 in_ty2
in
returnDs (do_map_arrow ids in_ty before_c_ty out_ty core_mux $
@@ -806,12 +806,12 @@ dsCmdStmt ids local_vars env_ids out_ids (BindStmt pat cmd _ _)
dsCmdStmt ids local_vars env_ids out_ids (LetStmt binds)
-- build a new environment using the let bindings
- = dsLocalBinds binds (mkTupleExpr out_ids) `thenDs` \ core_binds ->
+ = dsLocalBinds binds (mkBigCoreVarTup out_ids) `thenDs` \ core_binds ->
-- match the old environment against the input
matchEnvStack env_ids [] core_binds `thenDs` \ core_map ->
returnDs (do_arr ids
- (mkTupleType env_ids)
- (mkTupleType out_ids)
+ (mkBigCoreVarTupTy env_ids)
+ (mkBigCoreVarTupTy out_ids)
core_map,
exprFreeVars core_binds `intersectVarSet` local_vars)
@@ -833,7 +833,7 @@ dsCmdStmt ids local_vars env_ids out_ids (RecStmt stmts later_ids rec_ids rhss b
= let -- ToDo: ****** binds not desugared; ROSS PLEASE FIX ********
env2_id_set = mkVarSet out_ids `minusVarSet` mkVarSet later_ids
env2_ids = varSetElems env2_id_set
- env2_ty = mkTupleType env2_ids
+ env2_ty = mkBigCoreVarTupTy env2_ids
in
-- post_loop_fn = \((later_ids),(env2_ids)) -> (out_ids)
@@ -841,9 +841,9 @@ dsCmdStmt ids local_vars env_ids out_ids (RecStmt stmts later_ids rec_ids rhss b
newUniqueSupply `thenDs` \ uniqs ->
newSysLocalDs env2_ty `thenDs` \ env2_id ->
let
- later_ty = mkTupleType later_ids
+ later_ty = mkBigCoreVarTupTy later_ids
post_pair_ty = mkCorePairTy later_ty env2_ty
- post_loop_body = coreCaseTuple uniqs env2_id env2_ids (mkTupleExpr out_ids)
+ post_loop_body = coreCaseTuple uniqs env2_id env2_ids (mkBigCoreVarTup out_ids)
in
matchEnvStack later_ids [env2_id] post_loop_body
`thenDs` \ post_loop_fn ->
@@ -856,10 +856,10 @@ dsCmdStmt ids local_vars env_ids out_ids (RecStmt stmts later_ids rec_ids rhss b
-- pre_loop_fn = \(env_ids) -> ((env1_ids),(env2_ids))
let
- env1_ty = mkTupleType env1_ids
+ env1_ty = mkBigCoreVarTupTy env1_ids
pre_pair_ty = mkCorePairTy env1_ty env2_ty
- pre_loop_body = mkCorePairExpr (mkTupleExpr env1_ids)
- (mkTupleExpr env2_ids)
+ pre_loop_body = mkCorePairExpr (mkBigCoreVarTup env1_ids)
+ (mkBigCoreVarTup env2_ids)
in
matchEnvStack env_ids [] pre_loop_body
@@ -868,8 +868,8 @@ dsCmdStmt ids local_vars env_ids out_ids (RecStmt stmts later_ids rec_ids rhss b
-- arr pre_loop_fn >>> first (loop (...)) >>> arr post_loop_fn
let
- env_ty = mkTupleType env_ids
- out_ty = mkTupleType out_ids
+ env_ty = mkBigCoreVarTupTy env_ids
+ out_ty = mkBigCoreVarTupTy out_ids
core_body = do_map_arrow ids env_ty pre_pair_ty out_ty
pre_loop_fn
(do_compose ids pre_pair_ty post_pair_ty out_ty
@@ -888,7 +888,7 @@ dsRecCmd ids local_vars stmts later_ids rec_ids rhss
= let
rec_id_set = mkVarSet rec_ids
out_ids = varSetElems (mkVarSet later_ids `unionVarSet` rec_id_set)
- out_ty = mkTupleType out_ids
+ out_ty = mkBigCoreVarTupTy out_ids
local_vars' = local_vars `unionVarSet` rec_id_set
in
@@ -896,10 +896,10 @@ dsRecCmd ids local_vars stmts later_ids rec_ids rhss
mappM dsExpr rhss `thenDs` \ core_rhss ->
let
- later_tuple = mkTupleExpr later_ids
- later_ty = mkTupleType later_ids
+ later_tuple = mkBigCoreVarTup later_ids
+ later_ty = mkBigCoreVarTupTy later_ids
rec_tuple = mkBigCoreTup core_rhss
- rec_ty = mkTupleType rec_ids
+ rec_ty = mkBigCoreVarTupTy rec_ids
out_pair = mkCorePairExpr later_tuple rec_tuple
out_pair_ty = mkCorePairTy later_ty rec_ty
in
@@ -917,7 +917,7 @@ dsRecCmd ids local_vars stmts later_ids rec_ids rhss
let
env1_id_set = fv_stmts `minusVarSet` rec_id_set
env1_ids = varSetElems env1_id_set
- env1_ty = mkTupleType env1_ids
+ env1_ty = mkBigCoreVarTupTy env1_ids
in_pair_ty = mkCorePairTy env1_ty rec_ty
core_body = mkBigCoreTup (map selectVar env_ids)
where
@@ -932,7 +932,7 @@ dsRecCmd ids local_vars stmts later_ids rec_ids rhss
-- loop (arr squash_pair_fn >>> ss >>> arr mk_pair_fn)
let
- env_ty = mkTupleType env_ids
+ env_ty = mkBigCoreVarTupTy env_ids
core_loop = do_loop ids env1_ty later_ty rec_ty
(do_map_arrow ids in_pair_ty env_ty out_pair_ty
squash_pair_fn
@@ -984,9 +984,9 @@ dsCmdStmts ids local_vars env_ids out_ids (stmt:stmts)
dsCmdLStmt ids local_vars env_ids env_ids' stmt
`thenDs` \ (core_stmt, fv_stmt) ->
returnDs (do_compose ids
- (mkTupleType env_ids)
- (mkTupleType env_ids')
- (mkTupleType out_ids)
+ (mkBigCoreVarTupTy env_ids)
+ (mkBigCoreVarTupTy env_ids')
+ (mkBigCoreVarTupTy out_ids)
core_stmt
core_stmts,
fv_stmt)
diff --git a/compiler/deSugar/DsBinds.lhs b/compiler/deSugar/DsBinds.lhs
index f03877360e..5540dd806b 100644
--- a/compiler/deSugar/DsBinds.lhs
+++ b/compiler/deSugar/DsBinds.lhs
@@ -175,7 +175,7 @@ dsHsBind auto_scc rest (AbsBinds all_tyvars dicts exports binds)
-- Rec because of mixed-up dictionary bindings
core_bind = Rec (map do_one core_prs)
- tup_expr = mkTupleExpr locals
+ tup_expr = mkBigCoreVarTup locals
tup_ty = exprType tup_expr
poly_tup_expr = mkLams all_tyvars $ mkLams dicts $
Let core_bind tup_expr
diff --git a/compiler/deSugar/DsListComp.lhs b/compiler/deSugar/DsListComp.lhs
index 68c5249c33..f4ab7b3d9c 100644
--- a/compiler/deSugar/DsListComp.lhs
+++ b/compiler/deSugar/DsListComp.lhs
@@ -38,6 +38,9 @@ import PrelNames
import PrelInfo
import SrcLoc
import Panic
+import Outputable
+
+import Control.Monad ( liftM2 )
\end{code}
List comprehensions may be desugared in one of two ways: ``ordinary''
@@ -51,35 +54,127 @@ dsListComp :: [LStmt Id]
-> LHsExpr Id
-> Type -- Type of list elements
-> DsM CoreExpr
-dsListComp lquals body elt_ty
- = getDOptsDs `thenDs` \dflags ->
- let
- quals = map unLoc lquals
- in
+dsListComp lquals body elt_ty = do
+ dflags <- getDOptsDs
+ let quals = map unLoc lquals
+
if not (dopt Opt_RewriteRules dflags) || dopt Opt_IgnoreInterfacePragmas dflags
- -- Either rules are switched off, or we are ignoring what there are;
- -- Either way foldr/build won't happen, so use the more efficient
- -- Wadler-style desugaring
- || isParallelComp quals
- -- Foldr-style desugaring can't handle
- -- parallel list comprehensions
- then deListComp quals body (mkNilExpr elt_ty)
-
- else -- Foldr/build should be enabled, so desugar
- -- into foldrs and builds
- newTyVarsDs [alphaTyVar] `thenDs` \ [n_tyvar] ->
- let
- n_ty = mkTyVarTy n_tyvar
- c_ty = mkFunTys [elt_ty, n_ty] n_ty
- in
- newSysLocalsDs [c_ty,n_ty] `thenDs` \ [c, n] ->
- dfListComp c n quals body `thenDs` \ result ->
- dsLookupGlobalId buildName `thenDs` \ build_id ->
- returnDs (Var build_id `App` Type elt_ty
- `App` mkLams [n_tyvar, c, n] result)
-
- where isParallelComp (ParStmt bndrstmtss : _) = True
- isParallelComp _ = False
+ -- Either rules are switched off, or we are ignoring what there are;
+ -- Either way foldr/build won't happen, so use the more efficient
+ -- Wadler-style desugaring
+ || isParallelComp quals
+ -- Foldr-style desugaring can't handle parallel list comprehensions
+ then deListComp quals body (mkNilExpr elt_ty)
+ else do -- Foldr/build should be enabled, so desugar
+ -- into foldrs and builds
+ [n_tyvar] <- newTyVarsDs [alphaTyVar]
+
+ let n_ty = mkTyVarTy n_tyvar
+ c_ty = mkFunTys [elt_ty, n_ty] n_ty
+ [c, n] <- newSysLocalsDs [c_ty, n_ty]
+
+ result <- dfListComp c n quals body
+ build_id <- dsLookupGlobalId buildName
+ returnDs (Var build_id `App` Type elt_ty `App` mkLams [n_tyvar, c, n] result)
+
+ where
+ -- We must test for ParStmt anywhere, not just at the head, because an extension
+ -- to list comprehensions would be to add brackets to specify the associativity
+ -- of qualifier lists. This is really easy to do by adding extra ParStmts into the
+ -- mix of possibly a single element in length, so we do this to leave the possibility open
+ isParallelComp = any isParallelStmt
+
+ isParallelStmt (ParStmt _) = True
+ isParallelStmt _ = False
+
+
+-- This function lets you desugar a inner list comprehension and a list of the binders
+-- of that comprehension that we need in the outer comprehension into such an expression
+-- and the type of the elements that it outputs (tuples of binders)
+dsInnerListComp :: ([LStmt Id], [Id]) -> DsM (CoreExpr, Type)
+dsInnerListComp (stmts, bndrs) = do
+ expr <- dsListComp stmts (mkBigLHsVarTup bndrs) bndrs_tuple_type
+ return (expr, bndrs_tuple_type)
+ where
+ bndrs_types = map idType bndrs
+ bndrs_tuple_type = mkBigCoreTupTy bndrs_types
+
+
+-- This function factors out commonality between the desugaring strategies for TransformStmt.
+-- Given such a statement it gives you back an expression representing how to compute the transformed
+-- list and the tuple that you need to bind from that list in order to proceed with your desugaring
+dsTransformStmt :: Stmt Id -> DsM (CoreExpr, LPat Id)
+dsTransformStmt (TransformStmt (stmts, binders) usingExpr maybeByExpr) = do
+ (expr, binders_tuple_type) <- dsInnerListComp (stmts, binders)
+ usingExpr' <- dsLExpr usingExpr
+
+ using_args <-
+ case maybeByExpr of
+ Nothing -> return [expr]
+ Just byExpr -> do
+ byExpr' <- dsLExpr byExpr
+
+ us <- newUniqueSupply
+ [tuple_binder] <- newSysLocalsDs [binders_tuple_type]
+ let byExprWrapper = mkTupleCase us binders byExpr' tuple_binder (Var tuple_binder)
+
+ return [Lam tuple_binder byExprWrapper, expr]
+
+ let inner_list_expr = mkApps usingExpr' ((Type binders_tuple_type) : using_args)
+
+ let pat = mkBigLHsVarPatTup binders
+ return (inner_list_expr, pat)
+
+-- This function factors out commonality between the desugaring strategies for GroupStmt.
+-- Given such a statement it gives you back an expression representing how to compute the transformed
+-- list and the tuple that you need to bind from that list in order to proceed with your desugaring
+dsGroupStmt :: Stmt Id -> DsM (CoreExpr, LPat Id)
+dsGroupStmt (GroupStmt (stmts, binderMap) groupByClause) = do
+ let (fromBinders, toBinders) = unzip binderMap
+
+ fromBindersTypes = map idType fromBinders
+ toBindersTypes = map idType toBinders
+
+ toBindersTupleType = mkBigCoreTupTy toBindersTypes
+
+ -- Desugar an inner comprehension which outputs a list of tuples of the "from" binders
+ (expr, fromBindersTupleType) <- dsInnerListComp (stmts, fromBinders)
+
+ -- Work out what arguments should be supplied to that expression: i.e. is an extraction
+ -- function required? If so, create that desugared function and add to arguments
+ (usingExpr', usingArgs) <-
+ case groupByClause of
+ GroupByNothing usingExpr -> liftM2 (,) (dsLExpr usingExpr) (return [expr])
+ GroupBySomething usingExpr byExpr -> do
+ usingExpr' <- dsLExpr (either id noLoc usingExpr)
+
+ byExpr' <- dsLExpr byExpr
+
+ us <- newUniqueSupply
+ [fromBindersTuple] <- newSysLocalsDs [fromBindersTupleType]
+ let byExprWrapper = mkTupleCase us fromBinders byExpr' fromBindersTuple (Var fromBindersTuple)
+
+ return (usingExpr', [Lam fromBindersTuple byExprWrapper, expr])
+
+ -- Create an unzip function for the appropriate arity and element types and find "map"
+ (unzip_fn, unzip_rhs) <- mkUnzipBind fromBindersTypes
+ map_id <- dsLookupGlobalId mapName
+
+ -- Generate the expressions to build the grouped list
+ let -- First we apply the grouping function to the inner list
+ inner_list_expr = mkApps usingExpr' ((Type fromBindersTupleType) : usingArgs)
+ -- Then we map our "unzip" across it to turn the lists of tuples into tuples of lists
+ -- We make sure we instantiate the type variable "a" to be a list of "from" tuples and
+ -- the "b" to be a tuple of "to" lists!
+ unzipped_inner_list_expr = mkApps (Var map_id)
+ [Type (mkListTy fromBindersTupleType), Type toBindersTupleType, Var unzip_fn, inner_list_expr]
+ -- Then finally we bind the unzip function around that expression
+ bound_unzipped_inner_list_expr = Let (Rec [(unzip_fn, unzip_rhs)]) unzipped_inner_list_expr
+
+ -- Build a pattern that ensures the consumer binds into the NEW binders, which hold lists rather than single values
+ let pat = mkBigLHsVarPatTup toBinders
+ return (bound_unzipped_inner_list_expr, pat)
+
\end{code}
%************************************************************************
@@ -147,11 +242,15 @@ The introduced tuples are Boxed, but only because I couldn't get it to work
with the Unboxed variety.
\begin{code}
+
deListComp :: [Stmt Id] -> LHsExpr Id -> CoreExpr -> DsM CoreExpr
deListComp (ParStmt stmtss_w_bndrs : quals) body list
- = mappM do_list_comp stmtss_w_bndrs `thenDs` \ exps ->
- mkZipBind qual_tys `thenDs` \ (zip_fn, zip_rhs) ->
+ = do
+ exps_and_qual_tys <- mappM dsInnerListComp stmtss_w_bndrs
+ let (exps, qual_tys) = unzip exps_and_qual_tys
+
+ (zip_fn, zip_rhs) <- mkZipBind qual_tys
-- Deal with [e | pat <- zip l1 .. ln] in example above
deBindComp pat (Let (Rec [(zip_fn, zip_rhs)]) (mkApps (Var zip_fn) exps))
@@ -161,17 +260,8 @@ deListComp (ParStmt stmtss_w_bndrs : quals) body list
bndrs_s = map snd stmtss_w_bndrs
-- pat is the pattern ((x1,..,xn), (y1,..,ym)) in the example above
- pat = mkTuplePat pats
- pats = map mk_hs_tuple_pat bndrs_s
-
- -- Types of (x1,..,xn), (y1,..,yn) etc
- qual_tys = map mk_bndrs_tys bndrs_s
-
- do_list_comp (stmts, bndrs)
- = dsListComp stmts (mk_hs_tuple_expr bndrs)
- (mk_bndrs_tys bndrs)
-
- mk_bndrs_tys bndrs = mkCoreTupTy (map idType bndrs)
+ pat = mkBigLHsPatTup pats
+ pats = map mkBigLHsVarPatTup bndrs_s
-- Last: the one to return
deListComp [] body list -- Figure 7.4, SLPJ, p 135, rule C above
@@ -189,6 +279,14 @@ deListComp (LetStmt binds : quals) body list
= deListComp quals body list `thenDs` \ core_rest ->
dsLocalBinds binds core_rest
+deListComp (stmt@(TransformStmt _ _ _) : quals) body list = do
+ (inner_list_expr, pat) <- dsTransformStmt stmt
+ deBindComp pat inner_list_expr quals body list
+
+deListComp (stmt@(GroupStmt _ _) : quals) body list = do
+ (inner_list_expr, pat) <- dsGroupStmt stmt
+ deBindComp pat inner_list_expr quals body list
+
deListComp (BindStmt pat list1 _ _ : quals) body core_list2 -- rule A' above
= dsLExpr list1 `thenDs` \ core_list1 ->
deBindComp pat core_list1 quals body core_list2
@@ -196,81 +294,36 @@ deListComp (BindStmt pat list1 _ _ : quals) body core_list2 -- rule A' above
\begin{code}
-deBindComp pat core_list1 quals body core_list2
- = let
- u3_ty@u1_ty = exprType core_list1 -- two names, same thing
+deBindComp pat core_list1 quals body core_list2 = do
+ let
+ u3_ty@u1_ty = exprType core_list1 -- two names, same thing
- -- u1_ty is a [alpha] type, and u2_ty = alpha
- u2_ty = hsLPatType pat
+ -- u1_ty is a [alpha] type, and u2_ty = alpha
+ u2_ty = hsLPatType pat
- res_ty = exprType core_list2
- h_ty = u1_ty `mkFunTy` res_ty
- in
- newSysLocalsDs [h_ty, u1_ty, u2_ty, u3_ty] `thenDs` \ [h, u1, u2, u3] ->
+ res_ty = exprType core_list2
+ h_ty = u1_ty `mkFunTy` res_ty
+
+ [h, u1, u2, u3] <- newSysLocalsDs [h_ty, u1_ty, u2_ty, u3_ty]
-- the "fail" value ...
let
- core_fail = App (Var h) (Var u3)
- letrec_body = App (Var h) core_list1
- in
- deListComp quals body core_fail `thenDs` \ rest_expr ->
- matchSimply (Var u2) (StmtCtxt ListComp) pat
- rest_expr core_fail `thenDs` \ core_match ->
+ core_fail = App (Var h) (Var u3)
+ letrec_body = App (Var h) core_list1
+
+ rest_expr <- deListComp quals body core_fail
+ core_match <- matchSimply (Var u2) (StmtCtxt ListComp) pat rest_expr core_fail
+
let
- rhs = Lam u1 $
+ rhs = Lam u1 $
Case (Var u1) u1 res_ty
[(DataAlt nilDataCon, [], core_list2),
(DataAlt consDataCon, [u2, u3], core_match)]
-- Increasing order of tag
- in
- returnDs (Let (Rec [(h, rhs)]) letrec_body)
-\end{code}
-
-
-\begin{code}
-mkZipBind :: [Type] -> DsM (Id, CoreExpr)
--- mkZipBind [t1, t2]
--- = (zip, \as1:[t1] as2:[t2]
--- -> case as1 of
--- [] -> []
--- (a1:as'1) -> case as2 of
--- [] -> []
--- (a2:as'2) -> (a2,a2) : zip as'1 as'2)]
-
-mkZipBind elt_tys
- = mappM newSysLocalDs list_tys `thenDs` \ ass ->
- mappM newSysLocalDs elt_tys `thenDs` \ as' ->
- mappM newSysLocalDs list_tys `thenDs` \ as's ->
- newSysLocalDs zip_fn_ty `thenDs` \ zip_fn ->
- let
- inner_rhs = mkConsExpr ret_elt_ty
- (mkCoreTup (map Var as'))
- (mkVarApps (Var zip_fn) as's)
- zip_body = foldr mk_case inner_rhs (zip3 ass as' as's)
- in
- returnDs (zip_fn, mkLams ass zip_body)
- where
- list_tys = map mkListTy elt_tys
- ret_elt_ty = mkCoreTupTy elt_tys
- list_ret_ty = mkListTy ret_elt_ty
- zip_fn_ty = mkFunTys list_tys list_ret_ty
-
- mk_case (as, a', as') rest
- = Case (Var as) as list_ret_ty
- [(DataAlt nilDataCon, [], mkNilExpr ret_elt_ty),
- (DataAlt consDataCon, [a', as'], rest)]
- -- Increasing order of tag
--- Helper functions that makes an HsTuple only for non-1-sized tuples
-mk_hs_tuple_expr :: [Id] -> LHsExpr Id
-mk_hs_tuple_expr [] = nlHsVar unitDataConId
-mk_hs_tuple_expr [id] = nlHsVar id
-mk_hs_tuple_expr ids = noLoc $ ExplicitTuple [ nlHsVar i | i <- ids ] Boxed
-
-mk_hs_tuple_pat :: [Id] -> LPat Id
-mk_hs_tuple_pat bs = mkTuplePat (map nlVarPat bs)
+
+ return (Let (Rec [(h, rhs)]) letrec_body)
\end{code}
-
%************************************************************************
%* *
\subsection[DsListComp-foldr-build]{Foldr/Build desugaring of list comprehensions}
@@ -291,10 +344,10 @@ TE[ e | p <- l , q ] c n = let
\end{verbatim}
\begin{code}
-dfListComp :: Id -> Id -- 'c' and 'n'
- -> [Stmt Id] -- the rest of the qual's
- -> LHsExpr Id
- -> DsM CoreExpr
+dfListComp :: Id -> Id -- 'c' and 'n'
+ -> [Stmt Id] -- the rest of the qual's
+ -> LHsExpr Id
+ -> DsM CoreExpr
-- Last: the one to return
dfListComp c_id n_id [] body
@@ -312,34 +365,144 @@ dfListComp c_id n_id (LetStmt binds : quals) body
= dfListComp c_id n_id quals body `thenDs` \ core_rest ->
dsLocalBinds binds core_rest
-dfListComp c_id n_id (BindStmt pat list1 _ _ : quals) body
+dfListComp c_id n_id (stmt@(TransformStmt _ _ _) : quals) body = do
+ (inner_list_expr, pat) <- dsTransformStmt stmt
+ -- Anyway, we bind the newly transformed list via the generic binding function
+ dfBindComp c_id n_id (pat, inner_list_expr) quals body
+
+dfListComp c_id n_id (stmt@(GroupStmt _ _) : quals) body = do
+ (inner_list_expr, pat) <- dsGroupStmt stmt
+ -- Anyway, we bind the newly grouped list via the generic binding function
+ dfBindComp c_id n_id (pat, inner_list_expr) quals body
+
+dfListComp c_id n_id (BindStmt pat list1 _ _ : quals) body = do
-- evaluate the two lists
- = dsLExpr list1 `thenDs` \ core_list1 ->
-
+ core_list1 <- dsLExpr list1
+
+ -- Do the rest of the work in the generic binding builder
+ dfBindComp c_id n_id (pat, core_list1) quals body
+
+dfBindComp :: Id -> Id -- 'c' and 'n'
+ -> (LPat Id, CoreExpr)
+ -> [Stmt Id] -- the rest of the qual's
+ -> LHsExpr Id
+ -> DsM CoreExpr
+dfBindComp c_id n_id (pat, core_list1) quals body = do
-- find the required type
let x_ty = hsLPatType pat
- b_ty = idType n_id
- in
+ b_ty = idType n_id
-- create some new local id's
- newSysLocalsDs [b_ty,x_ty] `thenDs` \ [b,x] ->
+ [b, x] <- newSysLocalsDs [b_ty, x_ty]
-- build rest of the comprehesion
- dfListComp c_id b quals body `thenDs` \ core_rest ->
+ core_rest <- dfListComp c_id b quals body
-- build the pattern match
- matchSimply (Var x) (StmtCtxt ListComp)
- pat core_rest (Var b) `thenDs` \ core_expr ->
+ core_expr <- matchSimply (Var x) (StmtCtxt ListComp)
+ pat core_rest (Var b)
-- now build the outermost foldr, and return
- dsLookupGlobalId foldrName `thenDs` \ foldr_id ->
- returnDs (
- Var foldr_id `App` Type x_ty
- `App` Type b_ty
- `App` mkLams [x, b] core_expr
- `App` Var n_id
- `App` core_list1
- )
+ foldr_id <- dsLookupGlobalId foldrName
+ return (Var foldr_id `App` Type x_ty
+ `App` Type b_ty
+ `App` mkLams [x, b] core_expr
+ `App` Var n_id
+ `App` core_list1)
+
+\end{code}
+
+%************************************************************************
+%* *
+\subsection[DsFunGeneration]{Generation of zip/unzip functions for use in desugaring}
+%* *
+%************************************************************************
+
+\begin{code}
+
+mkZipBind :: [Type] -> DsM (Id, CoreExpr)
+-- mkZipBind [t1, t2]
+-- = (zip, \as1:[t1] as2:[t2]
+-- -> case as1 of
+-- [] -> []
+-- (a1:as'1) -> case as2 of
+-- [] -> []
+-- (a2:as'2) -> (a1, a2) : zip as'1 as'2)]
+
+mkZipBind elt_tys = do
+ ass <- mappM newSysLocalDs elt_list_tys
+ as' <- mappM newSysLocalDs elt_tys
+ as's <- mappM newSysLocalDs elt_list_tys
+
+ zip_fn <- newSysLocalDs zip_fn_ty
+
+ let inner_rhs = mkConsExpr elt_tuple_ty
+ (mkBigCoreVarTup as')
+ (mkVarApps (Var zip_fn) as's)
+ zip_body = foldr mk_case inner_rhs (zip3 ass as' as's)
+
+ return (zip_fn, mkLams ass zip_body)
+ where
+ elt_list_tys = map mkListTy elt_tys
+ elt_tuple_ty = mkBigCoreTupTy elt_tys
+ elt_tuple_list_ty = mkListTy elt_tuple_ty
+
+ zip_fn_ty = mkFunTys elt_list_tys elt_tuple_list_ty
+
+ mk_case (as, a', as') rest
+ = Case (Var as) as elt_tuple_list_ty
+ [(DataAlt nilDataCon, [], mkNilExpr elt_tuple_ty),
+ (DataAlt consDataCon, [a', as'], rest)]
+ -- Increasing order of tag
+
+
+mkUnzipBind :: [Type] -> DsM (Id, CoreExpr)
+-- mkUnzipBind [t1, t2]
+-- = (unzip, \ys :: [(t1, t2)] -> foldr (\ax :: (t1, t2) axs :: ([t1], [t2])
+-- -> case ax of
+-- (x1, x2) -> case axs of
+-- (xs1, xs2) -> (x1 : xs1, x2 : xs2))
+-- ([], [])
+-- ys)
+--
+-- We use foldr here in all cases, even if rules are turned off, because we may as well!
+mkUnzipBind elt_tys = do
+ ax <- newSysLocalDs elt_tuple_ty
+ axs <- newSysLocalDs elt_list_tuple_ty
+ ys <- newSysLocalDs elt_tuple_list_ty
+ xs <- mappM newSysLocalDs elt_tys
+ xss <- mappM newSysLocalDs elt_list_tys
+
+ unzip_fn <- newSysLocalDs unzip_fn_ty
+
+ foldr_id <- dsLookupGlobalId foldrName
+ [us1, us2] <- sequence [newUniqueSupply, newUniqueSupply]
+
+ let nil_tuple = mkBigCoreTup (map mkNilExpr elt_tys)
+
+ concat_expressions = map mkConcatExpression (zip3 elt_tys (map Var xs) (map Var xss))
+ tupled_concat_expression = mkBigCoreTup concat_expressions
+
+ folder_body_inner_case = mkTupleCase us1 xss tupled_concat_expression axs (Var axs)
+ folder_body_outer_case = mkTupleCase us2 xs folder_body_inner_case ax (Var ax)
+ folder_body = mkLams [ax, axs] folder_body_outer_case
+
+ unzip_body = mkApps (Var foldr_id) [Type elt_tuple_ty, Type elt_list_tuple_ty, folder_body, nil_tuple, Var ys]
+ unzip_body_saturated = mkLams [ys] unzip_body
+
+ return (unzip_fn, unzip_body_saturated)
+ where
+ elt_tuple_ty = mkBigCoreTupTy elt_tys
+ elt_tuple_list_ty = mkListTy elt_tuple_ty
+ elt_list_tys = map mkListTy elt_tys
+ elt_list_tuple_ty = mkBigCoreTupTy elt_list_tys
+
+ unzip_fn_ty = elt_tuple_list_ty `mkFunTy` elt_list_tuple_ty
+
+ mkConcatExpression (list_element_ty, head, tail) = mkConsExpr list_element_ty head tail
+
+
+
\end{code}
%************************************************************************
@@ -354,10 +517,10 @@ dfListComp c_id n_id (BindStmt pat list1 _ _ : quals) body
--
-- [:e | qss:] = <<[:e | qss:]>> () [:():]
--
-dsPArrComp :: [Stmt Id]
- -> LHsExpr Id
- -> Type -- Don't use; called with `undefined' below
- -> DsM CoreExpr
+dsPArrComp :: [Stmt Id]
+ -> LHsExpr Id
+ -> Type -- Don't use; called with `undefined' below
+ -> DsM CoreExpr
dsPArrComp [ParStmt qss] body _ = -- parallel comprehension
dePArrParComp qss body
dsPArrComp qs body _ = -- no ParStmt in `qs'
@@ -365,7 +528,7 @@ dsPArrComp qs body _ = -- no ParStmt in `qs'
let unitArray = mkApps (Var sglP) [Type unitTy,
mkCoreTup []]
in
- dePArrComp qs body (mkTuplePat []) unitArray
+ dePArrComp qs body (mkLHsPatTup []) unitArray
@@ -426,7 +589,7 @@ dePArrComp (BindStmt p e _ _ : qs) body pa cea =
mkLambda ety'cea pa cef `thenDs` \(clam,
_ ) ->
let ety'cef = ety'ce -- filter doesn't change the element type
- pa' = mkTuplePat [pa, p]
+ pa' = mkLHsPatTup [pa, p]
in
dePArrComp qs body pa' (mkApps (Var crossMapP)
[Type ety'cea, Type ety'cef, cea, clam])
@@ -452,7 +615,7 @@ dePArrComp (LetStmt ds : qs) body pa cea =
in
mkErrorAppDs pAT_ERROR_ID errTy errMsg `thenDs` \cerr ->
matchSimply (Var v) (StmtCtxt PArrComp) pa projBody cerr`thenDs` \ccase ->
- let pa' = mkTuplePat [pa, mkTuplePat (map nlVarPat xs)]
+ let pa' = mkLHsPatTup [pa, mkLHsPatTup (map nlVarPat xs)]
proj = mkLams [v] ccase
in
dePArrComp qs body pa' (mkApps (Var mapP)
@@ -480,17 +643,17 @@ dePArrParComp qss body =
-- empty parallel statement lists have no source representation
panic "DsListComp.dePArrComp: Empty parallel list comprehension"
deParStmt ((qs, xs):qss) = -- first statement
- let res_expr = mkExplicitTuple (map nlHsVar xs)
+ let res_expr = mkLHsVarTup xs
in
dsPArrComp (map unLoc qs) res_expr undefined `thenDs` \cqs ->
- parStmts qss (mkTuplePat (map nlVarPat xs)) cqs
+ parStmts qss (mkLHsVarPatTup xs) cqs
---
parStmts [] pa cea = return (pa, cea)
parStmts ((qs, xs):qss) pa cea = -- subsequent statements (zip'ed)
dsLookupGlobalId zipPName `thenDs` \zipP ->
- let pa' = mkTuplePat [pa, mkTuplePat (map nlVarPat xs)]
+ let pa' = mkLHsPatTup [pa, mkLHsVarPatTup xs]
ty'cea = parrElemType cea
- res_expr = mkExplicitTuple (map nlHsVar xs)
+ res_expr = mkLHsVarTup xs
in
dsPArrComp (map unLoc qs) res_expr undefined `thenDs` \cqs ->
let ty'cqs = parrElemType cqs
@@ -532,16 +695,4 @@ parrElemType e =
Just (tycon, [ty]) | tycon == parrTyCon -> ty
_ -> panic
"DsListComp.parrElemType: not a parallel array type"
-
--- Smart constructor for source tuple patterns
---
-mkTuplePat :: [LPat Id] -> LPat Id
-mkTuplePat [lpat] = lpat
-mkTuplePat lpats = noLoc $ mkVanillaTuplePat lpats Boxed
-
--- Smart constructor for source tuple expressions
---
-mkExplicitTuple :: [LHsExpr id] -> LHsExpr id
-mkExplicitTuple [lexp] = lexp
-mkExplicitTuple lexps = noLoc $ ExplicitTuple lexps Boxed
\end{code}
diff --git a/compiler/deSugar/DsUtils.lhs b/compiler/deSugar/DsUtils.lhs
index 9d787add26..27e0be4d38 100644
--- a/compiler/deSugar/DsUtils.lhs
+++ b/compiler/deSugar/DsUtils.lhs
@@ -8,12 +8,6 @@ Utilities for desugaring
This module exports some utility functions of no great interest.
\begin{code}
-{-# OPTIONS -w #-}
--- The above warning supression flag is a temporary kludge.
--- While working on this module you are encouraged to remove it and fix
--- any warnings in the module. See
--- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#Warnings
--- for details
module DsUtils (
EquationInfo(..),
@@ -34,9 +28,19 @@ module DsUtils (
mkIntExpr, mkCharExpr,
mkStringExpr, mkStringExprFS, mkIntegerExpr,
- mkSelectorBinds, mkTupleExpr, mkTupleSelector,
- mkTupleType, mkTupleCase, mkBigCoreTup,
- mkCoreTup, mkCoreTupTy, seqVar,
+ seqVar,
+
+ -- Core tuples
+ mkCoreVarTup, mkCoreTup, mkCoreVarTupTy, mkCoreTupTy,
+ mkBigCoreVarTup, mkBigCoreTup, mkBigCoreVarTupTy, mkBigCoreTupTy,
+
+ -- LHs tuples
+ mkLHsVarTup, mkLHsTup, mkLHsVarPatTup, mkLHsPatTup,
+ mkBigLHsVarTup, mkBigLHsTup, mkBigLHsVarPatTup, mkBigLHsPatTup,
+
+ -- Tuple bindings
+ mkSelectorBinds, mkTupleSelector,
+ mkSmallTupleCase, mkTupleCase,
dsSyntaxTable, lookupEvidence,
@@ -151,17 +155,18 @@ mkDsApps :: CoreExpr -> [CoreExpr] -> CoreExpr
mkDsApps fun args
= go fun (exprType fun) args
where
- go fun fun_ty [] = fun
+ go fun _ [] = fun
go fun fun_ty (Type ty : args) = go (App fun (Type ty)) (applyTy fun_ty ty) args
go fun fun_ty (arg : args) = go (mk_val_app fun arg arg_ty res_ty) res_ty args
where
(arg_ty, res_ty) = splitFunTy fun_ty
-----------
-mk_val_app fun arg arg_ty res_ty -- See Note [CoreSyn let/app invariant]
+mk_val_app :: CoreExpr -> CoreExpr -> Type -> Type -> CoreExpr
+mk_val_app fun arg arg_ty _ -- See Note [CoreSyn let/app invariant]
| not (isUnLiftedType arg_ty) || exprOkForSpeculation arg
= App fun arg -- The vastly common case
-mk_val_app (Var f `App` Type ty1 `App` Type ty2 `App` arg1) arg2 _ res_ty
+mk_val_app (Var f `App` Type ty1 `App` Type _ `App` arg1) arg2 _ res_ty
| f == seqId -- Note [Desugaring seq]
= Case arg1 (mkWildId ty1) res_ty [(DEFAULT,[],arg2)]
@@ -227,11 +232,12 @@ selectSimpleMatchVarL pat = selectMatchVar (unLoc pat)
selectMatchVars :: [Pat Id] -> DsM [Id]
selectMatchVars ps = mapM selectMatchVar ps
+selectMatchVar :: Pat Id -> DsM Id
selectMatchVar (BangPat pat) = selectMatchVar (unLoc pat)
selectMatchVar (LazyPat pat) = selectMatchVar (unLoc pat)
selectMatchVar (ParPat pat) = selectMatchVar (unLoc pat)
selectMatchVar (VarPat var) = return var
-selectMatchVar (AsPat var pat) = return (unLoc var)
+selectMatchVar (AsPat var _) = return (unLoc var)
selectMatchVar other_pat = newSysLocalDs (hsPatType other_pat)
-- OK, better make up one...
\end{code}
@@ -267,10 +273,10 @@ alwaysFailMatchResult :: MatchResult
alwaysFailMatchResult = MatchResult CanFail (\fail -> returnDs fail)
cantFailMatchResult :: CoreExpr -> MatchResult
-cantFailMatchResult expr = MatchResult CantFail (\ ignore -> returnDs expr)
+cantFailMatchResult expr = MatchResult CantFail (\_ -> returnDs expr)
extractMatchResult :: MatchResult -> CoreExpr -> DsM CoreExpr
-extractMatchResult (MatchResult CantFail match_fn) fail_expr
+extractMatchResult (MatchResult CantFail match_fn) _
= match_fn (error "It can't fail!")
extractMatchResult (MatchResult CanFail match_fn) fail_expr
@@ -289,7 +295,7 @@ combineMatchResults (MatchResult CanFail body_fn1)
body_fn1 duplicatable_expr `thenDs` \ body1 ->
returnDs (Let fail_bind body1)
-combineMatchResults match_result1@(MatchResult CantFail body_fn1) match_result2
+combineMatchResults match_result1@(MatchResult CantFail _) _
= match_result1
adjustMatchResult :: DsWrapper -> MatchResult -> MatchResult
@@ -330,7 +336,7 @@ mkEvalMatchResult var ty
= adjustMatchResult (\e -> Case (Var var) var ty [(DEFAULT, [], e)])
mkGuardedMatchResult :: CoreExpr -> MatchResult -> MatchResult
-mkGuardedMatchResult pred_expr (MatchResult can_it_fail body_fn)
+mkGuardedMatchResult pred_expr (MatchResult _ body_fn)
= MatchResult CanFail (\fail -> body_fn fail `thenDs` \ body ->
returnDs (mkIfThenElse pred_expr body fail))
@@ -430,8 +436,8 @@ mkCoAlgCaseMatchResult var ty match_alts
case (isPArrFakeCon dcon, isPArrFakeAlts alts) of
(True , True ) -> True
(False, False) -> False
- _ ->
- panic "DsUtils: You may not mix `[:...:]' with `PArr' patterns"
+ _ -> panic "DsUtils: you may not mix `[:...:]' with `PArr' patterns"
+ isPArrFakeAlts [] = panic "DsUtils: unexpectedly found an empty list of PArr fake alternatives"
--
mk_parrCase fail =
dsLookupGlobalId lengthPName `thenDs` \lengthP ->
@@ -540,6 +546,7 @@ mkIntegerExpr i
in
returnDs (horner tARGET_MAX_INT i)
+mkSmallIntegerLit :: DataCon -> Integer -> CoreExpr
mkSmallIntegerLit small_integer_data_con i = mkConApp small_integer_data_con [mkIntLit i]
mkStringExpr str = mkStringExprFS (mkFastString str)
@@ -643,7 +650,7 @@ mkSelectorBinds pat val_expr
returnDs ( (tuple_var, tuple_expr) : map mk_tup_bind binders )
where
binders = collectPatBinders pat
- local_tuple = mkTupleExpr binders
+ local_tuple = mkBigCoreVarTup binders
tuple_ty = exprType local_tuple
mk_bind scrut_var err_var bndr_var
@@ -662,44 +669,28 @@ mkSelectorBinds pat val_expr
is_simple_pat (TuplePat ps Boxed _) = all is_triv_lpat ps
is_simple_pat (ConPatOut{ pat_args = ps }) = all is_triv_lpat (hsConPatArgs ps)
is_simple_pat (VarPat _) = True
- is_simple_pat (ParPat p) = is_simple_lpat p
- is_simple_pat other = False
+ is_simple_pat (ParPat p) = is_simple_lpat p
+ is_simple_pat _ = False
is_triv_lpat p = is_triv_pat (unLoc p)
- is_triv_pat (VarPat v) = True
+ is_triv_pat (VarPat _) = True
is_triv_pat (WildPat _) = True
is_triv_pat (ParPat p) = is_triv_lpat p
- is_triv_pat other = False
+ is_triv_pat _ = False
\end{code}
%************************************************************************
%* *
- Tuples
+ Big Tuples
%* *
%************************************************************************
-@mkTupleExpr@ builds a tuple; the inverse to @mkTupleSelector@.
-
-* If it has only one element, it is the identity function.
-
-* If there are more elements than a big tuple can have, it nests
- the tuples.
-
Nesting policy. Better a 2-tuple of 10-tuples (3 objects) than
a 10-tuple of 2-tuples (11 objects). So we want the leaves to be big.
\begin{code}
-mkTupleExpr :: [Id] -> CoreExpr
-mkTupleExpr ids = mkBigCoreTup (map Var ids)
-
--- corresponding type
-mkTupleType :: [Id] -> Type
-mkTupleType ids = mkBigTuple mkCoreTupTy (map idType ids)
-
-mkBigCoreTup :: [CoreExpr] -> CoreExpr
-mkBigCoreTup = mkBigTuple mkCoreTup
mkBigTuple :: ([a] -> a) -> [a] -> a
mkBigTuple small_tuple as = mk_big_tuple (chunkify as)
@@ -713,11 +704,99 @@ chunkify :: [a] -> [[a]]
-- But there may be more than mAX_TUPLE_SIZE sub-lists
chunkify xs
| n_xs <= mAX_TUPLE_SIZE = {- pprTrace "Small" (ppr n_xs) -} [xs]
- | otherwise = {- pprTrace "Big" (ppr n_xs) -} (split xs)
+ | otherwise = {- pprTrace "Big" (ppr n_xs) -} (split xs)
where
n_xs = length xs
split [] = []
split xs = take mAX_TUPLE_SIZE xs : split (drop mAX_TUPLE_SIZE xs)
+
+\end{code}
+
+Creating tuples and their types for Core expressions
+
+@mkBigCoreVarTup@ builds a tuple; the inverse to @mkTupleSelector@.
+
+* If it has only one element, it is the identity function.
+
+* If there are more elements than a big tuple can have, it nests
+ the tuples.
+
+\begin{code}
+
+-- Small tuples: build exactly the specified tuple
+mkCoreVarTup :: [Id] -> CoreExpr
+mkCoreVarTup ids = mkCoreTup (map Var ids)
+
+mkCoreVarTupTy :: [Id] -> Type
+mkCoreVarTupTy ids = mkCoreTupTy (map idType ids)
+
+
+mkCoreTup :: [CoreExpr] -> CoreExpr
+mkCoreTup [] = Var unitDataConId
+mkCoreTup [c] = c
+mkCoreTup cs = mkConApp (tupleCon Boxed (length cs))
+ (map (Type . exprType) cs ++ cs)
+
+mkCoreTupTy :: [Type] -> Type
+mkCoreTupTy [ty] = ty
+mkCoreTupTy tys = mkTupleTy Boxed (length tys) tys
+
+
+
+-- Big tuples
+mkBigCoreVarTup :: [Id] -> CoreExpr
+mkBigCoreVarTup ids = mkBigCoreTup (map Var ids)
+
+mkBigCoreVarTupTy :: [Id] -> Type
+mkBigCoreVarTupTy ids = mkBigCoreTupTy (map idType ids)
+
+
+mkBigCoreTup :: [CoreExpr] -> CoreExpr
+mkBigCoreTup = mkBigTuple mkCoreTup
+
+mkBigCoreTupTy :: [Type] -> Type
+mkBigCoreTupTy = mkBigTuple mkCoreTupTy
+
+\end{code}
+
+Creating tuples and their types for full Haskell expressions
+
+\begin{code}
+
+-- Smart constructors for source tuple expressions
+mkLHsVarTup :: [Id] -> LHsExpr Id
+mkLHsVarTup ids = mkLHsTup (map nlHsVar ids)
+
+mkLHsTup :: [LHsExpr Id] -> LHsExpr Id
+mkLHsTup [] = nlHsVar unitDataConId
+mkLHsTup [lexp] = lexp
+mkLHsTup lexps = noLoc $ ExplicitTuple lexps Boxed
+
+
+-- Smart constructors for source tuple patterns
+mkLHsVarPatTup :: [Id] -> LPat Id
+mkLHsVarPatTup bs = mkLHsPatTup (map nlVarPat bs)
+
+mkLHsPatTup :: [LPat Id] -> LPat Id
+mkLHsPatTup [lpat] = lpat
+mkLHsPatTup lpats = noLoc $ mkVanillaTuplePat lpats Boxed -- Handles the case where lpats = [] gracefully
+
+
+-- The Big equivalents for the source tuple expressions
+mkBigLHsVarTup :: [Id] -> LHsExpr Id
+mkBigLHsVarTup ids = mkBigLHsTup (map nlHsVar ids)
+
+mkBigLHsTup :: [LHsExpr Id] -> LHsExpr Id
+mkBigLHsTup = mkBigTuple mkLHsTup
+
+
+-- The Big equivalents for the source tuple patterns
+mkBigLHsVarPatTup :: [Id] -> LPat Id
+mkBigLHsVarPatTup bs = mkBigLHsPatTup (map nlVarPat bs)
+
+mkBigLHsPatTup :: [LPat Id] -> LPat Id
+mkBigLHsPatTup = mkBigTuple mkLHsPatTup
+
\end{code}
@@ -790,20 +869,21 @@ mkTupleCase
mkTupleCase uniqs vars body scrut_var scrut
= mk_tuple_case uniqs (chunkify vars) body
where
- mk_tuple_case us [vars] body
+ -- This is the case where don't need any nesting
+ mk_tuple_case _ [vars] body
= mkSmallTupleCase vars body scrut_var scrut
+
+ -- This is the case where we must make nest tuples at least once
mk_tuple_case us vars_s body
- = let
- (us', vars', body') = foldr one_tuple_case (us, [], body) vars_s
- in
- mk_tuple_case us' (chunkify vars') body'
+ = let (us', vars', body') = foldr one_tuple_case (us, [], body) vars_s
+ in mk_tuple_case us' (chunkify vars') body'
+
one_tuple_case chunk_vars (us, vs, body)
- = let
- (us1, us2) = splitUniqSupply us
- scrut_var = mkSysLocal FSLIT("ds") (uniqFromSupply us1)
- (mkCoreTupTy (map idType chunk_vars))
- body' = mkSmallTupleCase chunk_vars body scrut_var (Var scrut_var)
- in (us2, scrut_var:vs, body')
+ = let (us1, us2) = splitUniqSupply us
+ scrut_var = mkSysLocal FSLIT("ds") (uniqFromSupply us1)
+ (mkCoreTupTy (map idType chunk_vars))
+ body' = mkSmallTupleCase chunk_vars body scrut_var (Var scrut_var)
+ in (us2, scrut_var:vs, body')
\end{code}
The same, but with a tuple small enough not to need nesting.
@@ -841,33 +921,21 @@ mkConsExpr ty hd tl = mkConApp consDataCon [Type ty, hd, tl]
mkListExpr :: Type -> [CoreExpr] -> CoreExpr
mkListExpr ty xs = foldr (mkConsExpr ty) (mkNilExpr ty) xs
-
-
--- The next three functions make tuple types, constructors and selectors,
--- with the rule that a 1-tuple is represented by the thing itselg
-mkCoreTupTy :: [Type] -> Type
-mkCoreTupTy [ty] = ty
-mkCoreTupTy tys = mkTupleTy Boxed (length tys) tys
-
-mkCoreTup :: [CoreExpr] -> CoreExpr
--- Builds exactly the specified tuple.
--- No fancy business for big tuples
-mkCoreTup [] = Var unitDataConId
-mkCoreTup [c] = c
-mkCoreTup cs = mkConApp (tupleCon Boxed (length cs))
- (map (Type . exprType) cs ++ cs)
mkCoreSel :: [Id] -- The tuple args
- -> Id -- The selected one
- -> Id -- A variable of the same type as the scrutinee
+ -> Id -- The selected one
+ -> Id -- A variable of the same type as the scrutinee
-> CoreExpr -- Scrutinee
-> CoreExpr
--- mkCoreSel [x,y,z] x v e
--- ===> case e of v { (x,y,z) -> x
-mkCoreSel [var] should_be_the_same_var scrut_var scrut
+
+-- mkCoreSel [x] x v e
+-- ===> e
+mkCoreSel [var] should_be_the_same_var _ scrut
= ASSERT(var == should_be_the_same_var)
scrut
+-- mkCoreSel [x,y,z] x v e
+-- ===> case e of v { (x,y,z) -> x
mkCoreSel vars the_var scrut_var scrut
= ASSERT( notNull vars )
Case scrut scrut_var (idType the_var)
@@ -977,9 +1045,7 @@ mkTickBox ix vars e = do
mkBinaryTickBox :: Int -> Int -> CoreExpr -> DsM CoreExpr
mkBinaryTickBox ixT ixF e = do
- mod <- getModuleDs
uq <- newUnique
- mod <- getModuleDs
let bndr1 = mkSysLocal FSLIT("t1") uq boolTy
falseBox <- mkTickBox ixF [] $ Var falseDataConId
trueBox <- mkTickBox ixT [] $ Var trueDataConId
diff --git a/compiler/hsSyn/HsExpr.lhs b/compiler/hsSyn/HsExpr.lhs
index c2e4c8adbd..b3e78ac47d 100644
--- a/compiler/hsSyn/HsExpr.lhs
+++ b/compiler/hsSyn/HsExpr.lhs
@@ -755,6 +755,12 @@ type LStmtLR idL idR = Located (StmtLR idL idR)
type Stmt id = StmtLR id id
+data GroupByClause id = GroupByNothing (LHsExpr id) -- Using expression, i.e. "then group using f" ==> GroupByNothing f
+ | GroupBySomething (Either (LHsExpr id) (SyntaxExpr id))
+ (LHsExpr id)
+ -- "then group using f by e" ==> GroupBySomething (Left f) e
+ -- "then group by e" ==> GroupBySomething (Right _) e: in this case the expression is filled in by the renamer
+
-- The SyntaxExprs in here are used *only* for do-notation, which
-- has rebindable syntax. Otherwise they are unused.
data StmtLR idL idR
@@ -772,8 +778,17 @@ data StmtLR idL idR
| LetStmt (HsLocalBindsLR idL idR)
-- ParStmts only occur in a list comprehension
- | ParStmt [([LStmt idL], [idR])] -- After renaming, the ids are the binders
- -- bound by the stmts and used subsequently
+ | ParStmt [([LStmt idL], [idR])]
+ -- After renaming, the ids are the binders bound by the stmts and used after them
+
+ | TransformStmt ([LStmt idL], [idR]) (LHsExpr idR) (Maybe (LHsExpr idR))
+ -- After renaming, the IDs are the binders occurring within this transform statement that are used after it
+ -- "qs, then f by e" ==> TransformStmt (qs, binders) f (Just e)
+ -- "qs, then f" ==> TransformStmt (qs, binders) f Nothing
+
+ | GroupStmt ([LStmt idL], [(idR, idR)]) (GroupByClause idR)
+ -- After renaming, the IDs are the binders occurring within this transform statement that are used after it
+ -- which are paired with the names which they group over in statements
-- Recursive statement (see Note [RecStmt] below)
| RecStmt [LStmtLR idL idR]
@@ -853,8 +868,18 @@ pprStmt (BindStmt pat expr _ _) = hsep [ppr pat, ptext SLIT("<-"), ppr expr]
pprStmt (LetStmt binds) = hsep [ptext SLIT("let"), pprBinds binds]
pprStmt (ExprStmt expr _ _) = ppr expr
pprStmt (ParStmt stmtss) = hsep (map (\stmts -> ptext SLIT("| ") <> ppr stmts) stmtss)
+pprStmt (TransformStmt (stmts, bndrs) usingExpr maybeByExpr) = (hsep [stmtsDoc, ptext SLIT("then"), ppr usingExpr, byExprDoc])
+ where stmtsDoc = interpp'SP stmts
+ byExprDoc = maybe empty (\byExpr -> hsep [ptext SLIT("by"), ppr byExpr]) maybeByExpr
+pprStmt (GroupStmt (stmts, bndrs) groupByClause) = (hsep [stmtsDoc, ptext SLIT("then group"), pprGroupByClause groupByClause])
+ where stmtsDoc = interpp'SP stmts
pprStmt (RecStmt segment _ _ _ _) = ptext SLIT("rec") <+> braces (vcat (map ppr segment))
+pprGroupByClause :: (OutputableBndr id) => GroupByClause id -> SDoc
+pprGroupByClause (GroupByNothing usingExpr) = hsep [ptext SLIT("using"), ppr usingExpr]
+pprGroupByClause (GroupBySomething eitherUsingExpr byExpr) = hsep [ptext SLIT("by"), ppr byExpr, usingExprDoc]
+ where usingExprDoc = either (\usingExpr -> hsep [ptext SLIT("using"), ppr usingExpr]) (const empty) eitherUsingExpr
+
pprDo :: OutputableBndr id => HsStmtContext any -> [LStmt id] -> LHsExpr id -> SDoc
pprDo DoExpr stmts body = ptext SLIT("do") <+> pprDeeperList vcat (map ppr stmts ++ [ppr body])
pprDo (MDoExpr _) stmts body = ptext SLIT("mdo") <+> pprDeeperList vcat (map ppr stmts ++ [ppr body])
@@ -968,6 +993,7 @@ data HsStmtContext id
| PArrComp -- Parallel array comprehension
| PatGuard (HsMatchContext id) -- Pattern guard for specified thing
| ParStmtCtxt (HsStmtContext id) -- A branch of a parallel stmt
+ | TransformStmtCtxt (HsStmtContext id) -- A branch of a transform stmt
\end{code}
\begin{code}
@@ -1002,6 +1028,7 @@ pprMatchContext ProcExpr = ptext SLIT("an arrow abstraction")
pprMatchContext (StmtCtxt ctxt) = ptext SLIT("a pattern binding in") $$ pprStmtContext ctxt
pprStmtContext (ParStmtCtxt c) = sep [ptext SLIT("a parallel branch of"), pprStmtContext c]
+pprStmtContext (TransformStmtCtxt c) = sep [ptext SLIT("a transformed branch of"), pprStmtContext c]
pprStmtContext (PatGuard ctxt) = ptext SLIT("a pattern guard for") $$ pprMatchContext ctxt
pprStmtContext DoExpr = ptext SLIT("a 'do' expression")
pprStmtContext (MDoExpr _) = ptext SLIT("an 'mdo' expression")
@@ -1031,6 +1058,7 @@ matchContextErrString RecUpd = "record update"
matchContextErrString LambdaExpr = "lambda"
matchContextErrString ProcExpr = "proc"
matchContextErrString (StmtCtxt (ParStmtCtxt c)) = matchContextErrString (StmtCtxt c)
+matchContextErrString (StmtCtxt (TransformStmtCtxt c)) = matchContextErrString (StmtCtxt c)
matchContextErrString (StmtCtxt (PatGuard _)) = "pattern guard"
matchContextErrString (StmtCtxt DoExpr) = "'do' expression"
matchContextErrString (StmtCtxt (MDoExpr _)) = "'mdo' expression"
diff --git a/compiler/hsSyn/HsUtils.lhs b/compiler/hsSyn/HsUtils.lhs
index 3eaae63e64..5d106f191d 100644
--- a/compiler/hsSyn/HsUtils.lhs
+++ b/compiler/hsSyn/HsUtils.lhs
@@ -139,6 +139,13 @@ mkHsDo ctxt stmts body = HsDo ctxt stmts body placeHolderType
mkNPat lit neg = NPat lit neg noSyntaxExpr
mkNPlusKPat id lit = NPlusKPat id lit noSyntaxExpr noSyntaxExpr
+mkTransformStmt stmts usingExpr = TransformStmt (stmts, []) usingExpr Nothing
+mkTransformByStmt stmts usingExpr byExpr = TransformStmt (stmts, []) usingExpr (Just byExpr)
+
+mkGroupUsingStmt stmts usingExpr = GroupStmt (stmts, []) (GroupByNothing usingExpr)
+mkGroupByStmt stmts byExpr = GroupStmt (stmts, []) (GroupBySomething (Right noSyntaxExpr) byExpr)
+mkGroupByUsingStmt stmts byExpr usingExpr = GroupStmt (stmts, []) (GroupBySomething (Left usingExpr) byExpr)
+
mkExprStmt expr = ExprStmt expr noSyntaxExpr placeHolderType
mkBindStmt pat expr = BindStmt pat expr noSyntaxExpr noSyntaxExpr
mkRecStmt stmts = RecStmt stmts [] [] [] emptyLHsBinds
@@ -351,6 +358,8 @@ collectStmtBinders (LetStmt binds) = collectLocalBinders binds
collectStmtBinders (ExprStmt _ _ _) = []
collectStmtBinders (ParStmt xs) = collectLStmtsBinders
$ concatMap fst xs
+collectStmtBinders (TransformStmt (stmts, _) _ _) = collectLStmtsBinders stmts
+collectStmtBinders (GroupStmt (stmts, _) _) = collectLStmtsBinders stmts
collectStmtBinders (RecStmt ss _ _ _ _) = collectLStmtsBinders ss
\end{code}
diff --git a/compiler/main/Constants.lhs b/compiler/main/Constants.lhs
index 8dc94d19db..5db909d631 100644
--- a/compiler/main/Constants.lhs
+++ b/compiler/main/Constants.lhs
@@ -32,8 +32,8 @@ import Data.Bits (shiftL)
All pretty arbitrary:
\begin{code}
-mAX_TUPLE_SIZE = (62 :: Int) -- Should really match the number
- -- of decls in Data.Tuple
+mAX_TUPLE_SIZE = (62 :: Int) -- Should really match the number
+ -- of decls in Data.Tuple
mAX_CONTEXT_REDUCTION_DEPTH = (20 :: Int)
\end{code}
diff --git a/compiler/main/DynFlags.hs b/compiler/main/DynFlags.hs
index 44e2aea8a3..cae2afb5a4 100644
--- a/compiler/main/DynFlags.hs
+++ b/compiler/main/DynFlags.hs
@@ -225,6 +225,7 @@ data DynFlag
| Opt_KindSignatures
| Opt_PatternSignatures
| Opt_ParallelListComp
+ | Opt_TransformListComp
| Opt_GeneralizedNewtypeDeriving
| Opt_RecursiveDo
| Opt_PatternGuards
@@ -1284,9 +1285,10 @@ xFlags = [
( "PatternSignatures", Opt_PatternSignatures ),
( "EmptyDataDecls", Opt_EmptyDataDecls ),
( "ParallelListComp", Opt_ParallelListComp ),
+ ( "TransformListComp", Opt_TransformListComp ),
( "ForeignFunctionInterface", Opt_ForeignFunctionInterface ),
( "UnliftedFFITypes", Opt_UnliftedFFITypes ),
- ( "LiberalTypeSynonyms", Opt_LiberalTypeSynonyms ),
+ ( "LiberalTypeSynonyms", Opt_LiberalTypeSynonyms ),
( "Rank2Types", Opt_Rank2Types ),
( "RankNTypes", Opt_RankNTypes ),
( "TypeOperators", Opt_TypeOperators ),
diff --git a/compiler/parser/Lexer.x b/compiler/parser/Lexer.x
index 2f6b7329a9..2f3ef4b41b 100644
--- a/compiler/parser/Lexer.x
+++ b/compiler/parser/Lexer.x
@@ -450,6 +450,9 @@ data Token
| ITdotnet
| ITmdo
| ITfamily
+ | ITgroup
+ | ITby
+ | ITusing
-- Pragmas
| ITinline_prag Bool -- True <=> INLINE, False <=> NOINLINE
@@ -583,6 +586,9 @@ isSpecial ITccallconv = True
isSpecial ITstdcallconv = True
isSpecial ITmdo = True
isSpecial ITfamily = True
+isSpecial ITgroup = True
+isSpecial ITby = True
+isSpecial ITusing = True
isSpecial _ = False
-- the bitmap provided as the third component indicates whether the
@@ -621,9 +627,12 @@ reservedWordsFM = listToUFM $
( "where", ITwhere, 0 ),
( "_scc_", ITscc, 0 ), -- ToDo: remove
- ( "forall", ITforall, bit explicitForallBit),
+ ( "forall", ITforall, bit explicitForallBit),
( "mdo", ITmdo, bit recursiveDoBit),
( "family", ITfamily, bit tyFamBit),
+ ( "group", ITgroup, bit transformComprehensionsBit),
+ ( "by", ITby, bit transformComprehensionsBit),
+ ( "using", ITusing, bit transformComprehensionsBit),
( "foreign", ITforeign, bit ffiBit),
( "export", ITexport, bit ffiBit),
@@ -1510,6 +1519,7 @@ recursiveDoBit = 13 -- mdo
unicodeSyntaxBit = 14 -- the forall symbol, arrow symbols, etc
unboxedTuplesBit = 15 -- (# and #)
standaloneDerivingBit = 16 -- standalone instance deriving declarations
+transformComprehensionsBit = 17
genericsEnabled, ffiEnabled, parrEnabled :: Int -> Bool
always _ = True
@@ -1529,6 +1539,7 @@ recursiveDoEnabled flags = testBit flags recursiveDoBit
unicodeSyntaxEnabled flags = testBit flags unicodeSyntaxBit
unboxedTuplesEnabled flags = testBit flags unboxedTuplesBit
standaloneDerivingEnabled flags = testBit flags standaloneDerivingBit
+transformComprehensionsEnabled flags = testBit flags transformComprehensionsBit
-- PState for parsing options pragmas
--
@@ -1590,6 +1601,7 @@ mkPState buf loc flags =
.|. unicodeSyntaxBit `setBitIf` dopt Opt_UnicodeSyntax flags
.|. unboxedTuplesBit `setBitIf` dopt Opt_UnboxedTuples flags
.|. standaloneDerivingBit `setBitIf` dopt Opt_StandaloneDeriving flags
+ .|. transformComprehensionsBit `setBitIf` dopt Opt_TransformListComp flags
--
setBitIf :: Int -> Bool -> Int
b `setBitIf` cond | cond = bit b
diff --git a/compiler/parser/Parser.y.pp b/compiler/parser/Parser.y.pp
index 8256b4d535..6de95f8278 100644
--- a/compiler/parser/Parser.y.pp
+++ b/compiler/parser/Parser.y.pp
@@ -243,6 +243,9 @@ incorrect.
'dotnet' { L _ ITdotnet }
'proc' { L _ ITproc } -- for arrow notation extension
'rec' { L _ ITrec } -- for arrow notation extension
+ 'group' { L _ ITgroup } -- for list transform extension
+ 'by' { L _ ITby } -- for list transform extension
+ 'using' { L _ ITusing } -- for list transform extension
'{-# INLINE' { L _ (ITinline_prag _) }
'{-# SPECIALISE' { L _ ITspec_prag }
@@ -1229,7 +1232,7 @@ gdrhs :: { Located [LGRHS RdrName] }
| gdrh { L1 [$1] }
gdrh :: { LGRHS RdrName }
- : '|' quals '=' exp { sL (comb2 $1 $>) $ GRHS (reverse (unLoc $2)) $4 }
+ : '|' guardquals '=' exp { sL (comb2 $1 $>) $ GRHS (unLoc $2) $4 }
sigdecl :: { Located (OrdList (LHsDecl RdrName)) }
: infixexp '::' sigtypedoc
@@ -1423,7 +1426,7 @@ list :: { LHsExpr RdrName }
| texp ',' exp '..' { LL $ ArithSeq noPostTcExpr (FromThen $1 $3) }
| texp '..' exp { LL $ ArithSeq noPostTcExpr (FromTo $1 $3) }
| texp ',' exp '..' exp { LL $ ArithSeq noPostTcExpr (FromThenTo $1 $3 $5) }
- | texp pquals { sL (comb2 $1 $>) $ mkHsDo ListComp (reverse (unLoc $2)) $1 }
+ | texp '|' flattenedpquals { sL (comb2 $1 $>) $ mkHsDo ListComp (unLoc $3) $1 }
lexps :: { Located [LHsExpr RdrName] }
: lexps ',' texp { LL ($3 : unLoc $1) }
@@ -1432,23 +1435,50 @@ lexps :: { Located [LHsExpr RdrName] }
-----------------------------------------------------------------------------
-- List Comprehensions
-pquals :: { Located [LStmt RdrName] } -- Either a singleton ParStmt,
- -- or a reversed list of Stmts
- : pquals1 { case unLoc $1 of
- [qs] -> L1 qs
- qss -> L1 [L1 (ParStmt stmtss)]
- where
- stmtss = [ (reverse qs, undefined)
- | qs <- qss ]
- }
-
+flattenedpquals :: { Located [LStmt RdrName] }
+ : pquals { case (unLoc $1) of
+ ParStmt [(qs, _)] -> L1 qs
+ -- We just had one thing in our "parallel" list so
+ -- we simply return that thing directly
+
+ _ -> L1 [$1]
+ -- We actually found some actual parallel lists so
+ -- we leave them into as a ParStmt
+ }
+
+pquals :: { LStmt RdrName }
+ : pquals1 { L1 (ParStmt [(qs, undefined) | qs <- (reverse (unLoc $1))]) }
+
pquals1 :: { Located [[LStmt RdrName]] }
- : pquals1 '|' quals { LL (unLoc $3 : unLoc $1) }
- | '|' quals { L (getLoc $2) [unLoc $2] }
+ : pquals1 '|' squals { LL (unLoc $3 : unLoc $1) }
+ | squals { L (getLoc $1) [unLoc $1] }
+
+squals :: { Located [LStmt RdrName] }
+ : squals1 { L (getLoc $1) (reverse (unLoc $1)) }
+
+squals1 :: { Located [LStmt RdrName] }
+ : transformquals1 { LL (unLoc $1) }
+
+transformquals1 :: { Located [LStmt RdrName] }
+ : transformquals1 ',' transformqual { LL $ [LL ((unLoc $3) (unLoc $1))] }
+ | transformquals1 ',' qual { LL ($3 : unLoc $1) }
+-- | transformquals1 ',' '{|' pquals '|}' { LL ($4 : unLoc $1) }
+ | transformqual { LL $ [LL ((unLoc $1) [])] }
+ | qual { L1 [$1] }
+-- | '{|' pquals '|}' { L1 [$2] }
+
-quals :: { Located [LStmt RdrName] }
- : quals ',' qual { LL ($3 : unLoc $1) }
- | qual { L1 [$1] }
+-- It is possible to enable bracketing (associating) qualifier lists by uncommenting the lines with {| |}
+-- above. Due to a lack of consensus on the syntax, this feature is not being used until we get user
+-- demand. Note that the {| |} symbols are reused from -XGenerics and hence if you want to compile
+-- a program that makes use of this temporary syntax you must supply that flag to GHC
+
+transformqual :: { Located ([LStmt RdrName] -> Stmt RdrName) }
+ : 'then' exp { LL $ \leftStmts -> (mkTransformStmt (reverse leftStmts) $2) }
+ | 'then' exp 'by' exp { LL $ \leftStmts -> (mkTransformByStmt (reverse leftStmts) $2 $4) }
+ | 'then' 'group' 'by' exp { LL $ \leftStmts -> (mkGroupByStmt (reverse leftStmts) $4) }
+ | 'then' 'group' 'using' exp { LL $ \leftStmts -> (mkGroupUsingStmt (reverse leftStmts) $4) }
+ | 'then' 'group' 'by' exp 'using' exp { LL $ \leftStmts -> (mkGroupByUsingStmt (reverse leftStmts) $4 $6) }
-----------------------------------------------------------------------------
-- Parallel array expressions
@@ -1465,9 +1495,19 @@ parr :: { LHsExpr RdrName }
(reverse (unLoc $1)) }
| texp '..' exp { LL $ PArrSeq noPostTcExpr (FromTo $1 $3) }
| texp ',' exp '..' exp { LL $ PArrSeq noPostTcExpr (FromThenTo $1 $3 $5) }
- | texp pquals { sL (comb2 $1 $>) $ mkHsDo PArrComp (reverse (unLoc $2)) $1 }
+ | texp '|' flattenedpquals { LL $ mkHsDo PArrComp (unLoc $3) $1 }
+
+-- We are reusing `lexps' and `flattenedpquals' from the list case.
+
+-----------------------------------------------------------------------------
+-- Guards
+
+guardquals :: { Located [LStmt RdrName] }
+ : guardquals1 { L (getLoc $1) (reverse (unLoc $1)) }
--- We are reusing `lexps' and `pquals' from the list case.
+guardquals1 :: { Located [LStmt RdrName] }
+ : guardquals1 ',' qual { LL ($3 : unLoc $1) }
+ | qual { L1 [$1] }
-----------------------------------------------------------------------------
-- Case alternatives
@@ -1500,7 +1540,7 @@ gdpats :: { Located [LGRHS RdrName] }
| gdpat { L1 [$1] }
gdpat :: { LGRHS RdrName }
- : '|' quals '->' exp { sL (comb2 $1 $>) $ GRHS (reverse (unLoc $2)) $4 }
+ : '|' guardquals '->' exp { sL (comb2 $1 $>) $ GRHS (unLoc $2) $4 }
-- 'pat' recognises a pattern, including one with a bang at the top
-- e.g. "!x" or "!(x,y)" or "C a b" etc
@@ -1546,13 +1586,13 @@ maybe_stmt :: { Maybe (LStmt RdrName) }
| {- nothing -} { Nothing }
stmt :: { LStmt RdrName }
- : qual { $1 }
+ : qual { $1 }
| 'rec' stmtlist { LL $ mkRecStmt (unLoc $2) }
qual :: { LStmt RdrName }
- : pat '<-' exp { LL $ mkBindStmt $1 $3 }
- | exp { L1 $ mkExprStmt $1 }
- | 'let' binds { LL $ LetStmt (unLoc $2) }
+ : pat '<-' exp { LL $ mkBindStmt $1 $3 }
+ | exp { L1 $ mkExprStmt $1 }
+ | 'let' binds { LL $ LetStmt (unLoc $2) }
-----------------------------------------------------------------------------
-- Record Field Update/Construction
diff --git a/compiler/prelude/PrelNames.lhs b/compiler/prelude/PrelNames.lhs
index baf3b50ecc..bffd07c7d4 100644
--- a/compiler/prelude/PrelNames.lhs
+++ b/compiler/prelude/PrelNames.lhs
@@ -175,12 +175,15 @@ basicKnownKeyNames
-- Stable pointers
newStablePtrName,
+ -- GHC Extensions
+ groupWithName,
+
-- Strings and lists
unpackCStringName, unpackCStringAppendName,
unpackCStringFoldrName, unpackCStringUtf8Name,
-- List operations
- concatName, filterName,
+ concatName, filterName, mapName,
zipName, foldrName, buildName, augmentName, appendName,
-- Parallel array operations
@@ -262,15 +265,15 @@ tYPEABLE = mkBaseModule FSLIT("Data.Typeable")
gENERICS = mkBaseModule FSLIT("Data.Generics.Basics")
dOTNET = mkBaseModule FSLIT("GHC.Dotnet")
rEAD_PREC = mkBaseModule FSLIT("Text.ParserCombinators.ReadPrec")
-lEX = mkBaseModule FSLIT("Text.Read.Lex")
+lEX = mkBaseModule FSLIT("Text.Read.Lex")
gHC_INT = mkBaseModule FSLIT("GHC.Int")
gHC_WORD = mkBaseModule FSLIT("GHC.Word")
mONAD = mkBaseModule FSLIT("Control.Monad")
mONAD_FIX = mkBaseModule FSLIT("Control.Monad.Fix")
aRROW = mkBaseModule FSLIT("Control.Arrow")
-gHC_DESUGAR = mkBaseModule FSLIT("GHC.Desugar")
+gHC_DESUGAR = mkBaseModule FSLIT("GHC.Desugar")
rANDOM = mkBaseModule FSLIT("System.Random")
-gLA_EXTS = mkBaseModule FSLIT("GHC.Exts")
+gHC_EXTS = mkBaseModule FSLIT("GHC.Exts")
mAIN = mkMainModule_ mAIN_NAME
rOOT_MAIN = mkMainModule FSLIT(":Main") -- Root module for initialisation
@@ -496,12 +499,16 @@ bindMName = methName gHC_BASE FSLIT(">>=") bindMClassOpKey
returnMName = methName gHC_BASE FSLIT("return") returnMClassOpKey
failMName = methName gHC_BASE FSLIT("fail") failMClassOpKey
+-- Functions for GHC extensions
+groupWithName = varQual gHC_EXTS FSLIT("groupWith") groupWithIdKey
+
-- Random PrelBase functions
fromStringName = methName dATA_STRING FSLIT("fromString") fromStringClassOpKey
otherwiseIdName = varQual gHC_BASE FSLIT("otherwise") otherwiseIdKey
foldrName = varQual gHC_BASE FSLIT("foldr") foldrIdKey
buildName = varQual gHC_BASE FSLIT("build") buildIdKey
augmentName = varQual gHC_BASE FSLIT("augment") augmentIdKey
+mapName = varQual gHC_BASE FSLIT("map") mapIdKey
appendName = varQual gHC_BASE FSLIT("++") appendIdKey
andName = varQual gHC_BASE FSLIT("&&") andIdKey
orName = varQual gHC_BASE FSLIT("||") orIdKey
@@ -975,6 +982,9 @@ breakpointAutoJumpIdKey = mkPreludeMiscIdUnique 67
inlineIdKey = mkPreludeMiscIdUnique 68
+mapIdKey = mkPreludeMiscIdUnique 69
+groupWithIdKey = mkPreludeMiscIdUnique 70
+
-- Parallel array functions
singletonPIdKey = mkPreludeMiscIdUnique 79
nullPIdKey = mkPreludeMiscIdUnique 80
diff --git a/compiler/rename/RnEnv.lhs b/compiler/rename/RnEnv.lhs
index c5b1a8c5cf..508bea6459 100644
--- a/compiler/rename/RnEnv.lhs
+++ b/compiler/rename/RnEnv.lhs
@@ -745,7 +745,7 @@ newLocalsRn rdr_names_w_loc
mkInternalName uniq (rdrNameOcc rdr_name) loc
bindLocatedLocalsRn :: SDoc -- Documentation string for error message
- -> [Located RdrName]
+ -> [Located RdrName]
-> ([Name] -> RnM a)
-> RnM a
bindLocatedLocalsRn doc_str rdr_names_w_loc enclosed_scope
@@ -756,10 +756,8 @@ bindLocatedLocalsRn doc_str rdr_names_w_loc enclosed_scope
checkShadowing doc_str rdr_names_w_loc `thenM_`
-- Make fresh Names and extend the environment
- newLocalsRn rdr_names_w_loc `thenM` \ names ->
- getLocalRdrEnv `thenM` \ local_env ->
- setLocalRdrEnv (extendLocalRdrEnv local_env names)
- (enclosed_scope names)
+ newLocalsRn rdr_names_w_loc `thenM` \names ->
+ bindLocalNames names (enclosed_scope names)
bindLocalNames :: [Name] -> RnM a -> RnM a
bindLocalNames names enclosed_scope
diff --git a/compiler/rename/RnExpr.lhs b/compiler/rename/RnExpr.lhs
index ba6b0e0a29..a496c66daa 100644
--- a/compiler/rename/RnExpr.lhs
+++ b/compiler/rename/RnExpr.lhs
@@ -36,14 +36,15 @@ import RnTypes ( rnHsTypeFVs,
import RnPat (rnOverLit, rnPatsAndThen_LocalRightwards, rnBindPat,
localRecNameMaker, rnLit,
rnHsRecFields_Con, rnHsRecFields_Update, checkTupSize)
+import RdrName ( mkRdrUnqual )
import DynFlags ( DynFlag(..) )
import BasicTypes ( FixityDirection(..) )
import SrcLoc ( SrcSpan )
import PrelNames ( thFAKE, hasKey, assertIdKey, assertErrorName,
loopAName, choiceAName, appAName, arrAName, composeAName, firstAName,
- negateName, thenMName, bindMName, failMName )
+ negateName, thenMName, bindMName, failMName, groupWithName )
-import Name ( Name, nameOccName, nameIsLocalOrFrom )
+import Name ( Name, nameOccName, nameModule, nameIsLocalOrFrom )
import NameSet
import UniqFM
import RdrName ( RdrName, extendLocalRdrEnv, lookupLocalRdrEnv, hideSomeUnquals )
@@ -55,7 +56,7 @@ import Util ( isSingleton )
import ListSetOps ( removeDups )
import Maybes ( expectJust )
import Outputable
-import SrcLoc ( Located(..), unLoc, getLoc )
+import SrcLoc ( Located(..), unLoc, getLoc, noLoc )
import FastString
import List ( unzip4 )
@@ -477,7 +478,9 @@ methodNamesStmt (RecStmt stmts _ _ _ _)
= methodNamesStmts stmts `addOneFV` loopAName
methodNamesStmt (LetStmt b) = emptyFVs
methodNamesStmt (ParStmt ss) = emptyFVs
- -- ParStmt can't occur in commands, but it's not convenient to error
+methodNamesStmt (TransformStmt _ _ _) = emptyFVs
+methodNamesStmt (GroupStmt _ _) = emptyFVs
+ -- ParStmt, TransformStmt and GroupStmt can't occur in commands, but it's not convenient to error
-- here so we just do what's convenient
\end{code}
@@ -588,13 +591,12 @@ rnNormalStmts :: HsStmtContext Name -> [LStmt RdrName]
-- Implements nested scopes
rnNormalStmts ctxt [] thing_inside
- = do { (thing, fvs) <- thing_inside
+ = do { (thing, fvs) <- thing_inside
; return (([],thing), fvs) }
rnNormalStmts ctxt (L loc stmt : stmts) thing_inside
- = do { ((stmt', (stmts', thing)), fvs)
- <- rnStmt ctxt stmt $
- rnNormalStmts ctxt stmts thing_inside
+ = do { ((stmt', (stmts', thing)), fvs) <- rnStmt ctxt stmt $
+ rnNormalStmts ctxt stmts thing_inside
; return (((L loc stmt' : stmts'), thing), fvs) }
@@ -621,12 +623,11 @@ rnStmt ctxt (BindStmt pat expr _ _) thing_inside
-- fv_expr shouldn't really be filtered by the rnPatsAndThen
-- but it does not matter because the names are unique
-rnStmt ctxt (LetStmt binds) thing_inside
- = do { checkErr (ok ctxt binds)
- (badIpBinds (ptext SLIT("a parallel list comprehension:")) binds)
- ; rnLocalBindsAndThen binds $ \ binds' -> do
- { (thing, fvs) <- thing_inside
- ; return ((LetStmt binds', thing), fvs) }}
+rnStmt ctxt (LetStmt binds) thing_inside = do
+ checkErr (ok ctxt binds) (badIpBinds (ptext SLIT("a parallel list comprehension:")) binds)
+ rnLocalBindsAndThen binds $ \binds' -> do
+ (thing, fvs) <- thing_inside
+ return ((LetStmt binds', thing), fvs)
where
-- We do not allow implicit-parameter bindings in a parallel
-- list comprehension. I'm not sure what it might mean.
@@ -649,58 +650,163 @@ rnStmt ctxt (RecStmt rec_stmts _ _ _ _) thing_inside
where
doc = text "In a recursive do statement"
+rnStmt ctxt (TransformStmt (stmts, _) usingExpr maybeByExpr) thing_inside = do
+ checkIsTransformableListComp ctxt
+
+ (usingExpr', fv_usingExpr) <- rnLExpr usingExpr
+ ((stmts', binders, (maybeByExpr', thing)), fvs) <-
+ rnNormalStmtsAndFindUsedBinders (TransformStmtCtxt ctxt) stmts $ \unshadowed_bndrs -> do
+ (maybeByExpr', fv_maybeByExpr) <- rnMaybeLExpr maybeByExpr
+ (thing, fv_thing) <- thing_inside
+
+ return ((maybeByExpr', thing), fv_maybeByExpr `plusFV` fv_thing)
+
+ return ((TransformStmt (stmts', binders) usingExpr' maybeByExpr', thing), fv_usingExpr `plusFV` fvs)
+ where
+ rnMaybeLExpr Nothing = return (Nothing, emptyFVs)
+ rnMaybeLExpr (Just expr) = do
+ (expr', fv_expr) <- rnLExpr expr
+ return (Just expr', fv_expr)
+
+rnStmt ctxt (GroupStmt (stmts, _) groupByClause) thing_inside = do
+ checkIsTransformableListComp ctxt
+
+ -- We must rename the using expression in the context before the transform is begun
+ groupByClauseAction <-
+ case groupByClause of
+ GroupByNothing usingExpr -> do
+ (usingExpr', fv_usingExpr) <- rnLExpr usingExpr
+ (return . return) (GroupByNothing usingExpr', fv_usingExpr)
+ GroupBySomething eitherUsingExpr byExpr -> do
+ (eitherUsingExpr', fv_eitherUsingExpr) <-
+ case eitherUsingExpr of
+ Right _ -> return (Right $ HsVar groupWithName, unitNameSet groupWithName)
+ Left usingExpr -> do
+ (usingExpr', fv_usingExpr) <- rnLExpr usingExpr
+ return (Left usingExpr', fv_usingExpr)
+
+ return $ do
+ (byExpr', fv_byExpr) <- rnLExpr byExpr
+ return (GroupBySomething eitherUsingExpr' byExpr', fv_eitherUsingExpr `plusFV` fv_byExpr)
+
+ -- We only use rnNormalStmtsAndFindUsedBinders to get unshadowed_bndrs, so
+ -- perhaps we could refactor this to use rnNormalStmts directly?
+ ((stmts', _, (groupByClause', usedBinderMap, thing)), fvs) <-
+ rnNormalStmtsAndFindUsedBinders (TransformStmtCtxt ctxt) stmts $ \unshadowed_bndrs -> do
+ (groupByClause', fv_groupByClause) <- groupByClauseAction
+
+ unshadowed_bndrs' <- mapM newLocalName unshadowed_bndrs
+ let binderMap = zip unshadowed_bndrs unshadowed_bndrs'
+
+ -- Bind the "thing" inside a context where we have REBOUND everything
+ -- bound by the statements before the group. This is necessary since after
+ -- the grouping the same identifiers actually have different meanings
+ -- i.e. they refer to lists not singletons!
+ (thing, fv_thing) <- bindLocalNames unshadowed_bndrs' thing_inside
+
+ -- We remove entries from the binder map that are not used in the thing_inside.
+ -- We can then use that usage information to ensure that the free variables do
+ -- not contain the things we just bound, but do contain the things we need to
+ -- make those bindings (i.e. the corresponding non-listy variables)
+
+ -- Note that we also retain those entries which have an old binder in our
+ -- own free variables (the using or by expression). This is because this map
+ -- is reused in the desugarer to create the type to bind from the statements
+ -- that occur before this one. If the binders we need are not in the map, they
+ -- will never get bound into our desugared expression and hence the simplifier
+ -- crashes as we refer to variables that don't exist!
+ let usedBinderMap = filter
+ (\(old_binder, new_binder) ->
+ (new_binder `elemNameSet` fv_thing) ||
+ (old_binder `elemNameSet` fv_groupByClause)) binderMap
+ (usedOldBinders, usedNewBinders) = unzip usedBinderMap
+ real_fv_thing = (delListFromNameSet fv_thing usedNewBinders) `plusFV` (mkNameSet usedOldBinders)
+
+ return ((groupByClause', usedBinderMap, thing), fv_groupByClause `plusFV` real_fv_thing)
+
+ traceRn (text "rnStmt: implicitly rebound these used binders:" <+> ppr usedBinderMap)
+ return ((GroupStmt (stmts', usedBinderMap) groupByClause', thing), fvs)
+
rnStmt ctxt (ParStmt segs) thing_inside
= do { parallel_list_comp <- doptM Opt_ParallelListComp
; checkM parallel_list_comp parStmtErr
- ; orig_lcl_env <- getLocalRdrEnv
- ; ((segs',thing), fvs) <- go orig_lcl_env [] segs
+ ; ((segs', thing), fvs) <- rnParallelStmts (ParStmtCtxt ctxt) segs thing_inside
; return ((ParStmt segs', thing), fvs) }
+
+
+rnNormalStmtsAndFindUsedBinders :: HsStmtContext Name
+ -> [LStmt RdrName]
+ -> ([Name] -> RnM (thing, FreeVars))
+ -> RnM (([LStmt Name], [Name], thing), FreeVars)
+rnNormalStmtsAndFindUsedBinders ctxt stmts thing_inside = do
+ ((stmts', (used_bndrs, inner_thing)), fvs) <- rnNormalStmts ctxt stmts $ do
+ -- Find the Names that are bound by stmts that
+ -- by assumption we have just renamed
+ local_env <- getLocalRdrEnv
+ let
+ stmts_binders = collectLStmtsBinders stmts
+ bndrs = map (expectJust "rnStmt"
+ . lookupLocalRdrEnv local_env
+ . unLoc) stmts_binders
+
+ -- If shadow, we'll look up (Unqual x) twice, getting
+ -- the second binding both times, which is the
+ -- one we want
+ unshadowed_bndrs = nub bndrs
+
+ -- Typecheck the thing inside, passing on all
+ -- the Names bound before it for its information
+ (thing, fvs) <- thing_inside unshadowed_bndrs
+
+ -- Figure out which of the bound names are used
+ -- after the statements we renamed
+ let used_bndrs = filter (`elemNameSet` fvs) bndrs
+ return ((used_bndrs, thing), fvs)
+
+ -- Flatten the tuple returned by the above call a bit!
+ return ((stmts', used_bndrs, inner_thing), fvs)
+
+
+rnParallelStmts ctxt segs thing_inside = do
+ orig_lcl_env <- getLocalRdrEnv
+ go orig_lcl_env [] segs
+ where
+ go orig_lcl_env bndrs [] = do
+ let (bndrs', dups) = removeDups cmpByOcc bndrs
+ inner_env = extendLocalRdrEnv orig_lcl_env bndrs'
+
+ mappM dupErr dups
+ (thing, fvs) <- setLocalRdrEnv inner_env thing_inside
+ return (([], thing), fvs)
+
+ go orig_lcl_env bndrs_so_far ((stmts, _) : segs) = do
+ ((stmts', bndrs, (segs', thing)), fvs) <- rnNormalStmtsAndFindUsedBinders ctxt stmts $ \new_bndrs -> do
+ -- Typecheck the thing inside, passing on all
+ -- the Names bound, but separately; revert the envt
+ setLocalRdrEnv orig_lcl_env $ do
+ go orig_lcl_env (new_bndrs ++ bndrs_so_far) segs
+
+ let seg' = (stmts', bndrs)
+ return (((seg':segs'), thing), delListFromNameSet fvs bndrs)
+
+ cmpByOcc n1 n2 = nameOccName n1 `compare` nameOccName n2
+ dupErr vs = addErr (ptext SLIT("Duplicate binding in parallel list comprehension for:")
+ <+> quotes (ppr (head vs)))
+
+
+checkIsTransformableListComp :: HsStmtContext Name -> RnM ()
+checkIsTransformableListComp ctxt = do
+ -- Ensure we are really within a list comprehension because otherwise the
+ -- desugarer will break when we come to operate on a parallel array
+ checkM (notParallelArray ctxt) transformStmtOutsideListCompErr
+
+ -- Ensure the user has turned the correct flag on
+ transform_list_comp <- doptM Opt_TransformListComp
+ checkM transform_list_comp transformStmtErr
where
--- type ParSeg id = [([LStmt id], [id])]
--- go :: NameSet -> [ParSeg RdrName]
--- -> RnM (([ParSeg Name], thing), FreeVars)
-
- go orig_lcl_env bndrs []
- = do { let { (bndrs', dups) = removeDups cmpByOcc bndrs
- ; inner_env = extendLocalRdrEnv orig_lcl_env bndrs' }
- ; mappM dupErr dups
- ; (thing, fvs) <- setLocalRdrEnv inner_env thing_inside
- ; return (([], thing), fvs) }
-
- go orig_lcl_env bndrs_so_far ((stmts, _) : segs)
- = do { ((stmts', (bndrs, segs', thing)), fvs)
- <- rnNormalStmts par_ctxt stmts $ do
- { -- Find the Names that are bound by stmts
- lcl_env <- getLocalRdrEnv
- ; let { rdr_bndrs = collectLStmtsBinders stmts
- ; bndrs = map ( expectJust "rnStmt"
- . lookupLocalRdrEnv lcl_env
- . unLoc) rdr_bndrs
- ; new_bndrs = nub bndrs ++ bndrs_so_far
- -- The nub is because there might be shadowing
- -- x <- e1; x <- e2
- -- So we'll look up (Unqual x) twice, getting
- -- the second binding both times, which is the
- } -- one we want
-
- -- Typecheck the thing inside, passing on all
- -- the Names bound, but separately; revert the envt
- ; ((segs', thing), fvs) <- setLocalRdrEnv orig_lcl_env $
- go orig_lcl_env new_bndrs segs
-
- -- Figure out which of the bound names are used
- ; let used_bndrs = filter (`elemNameSet` fvs) bndrs
- ; return ((used_bndrs, segs', thing), fvs) }
-
- ; let seg' = (stmts', bndrs)
- ; return (((seg':segs'), thing),
- delListFromNameSet fvs bndrs) }
-
- par_ctxt = ParStmtCtxt ctxt
-
- cmpByOcc n1 n2 = nameOccName n1 `compare` nameOccName n2
- dupErr vs = addErr (ptext SLIT("Duplicate binding in parallel list comprehension for:")
- <+> quotes (ppr (head vs)))
+ notParallelArray PArrComp = False
+ notParallelArray _ = True
+
\end{code}
@@ -833,7 +939,13 @@ rn_rec_stmt_lhs fix_env (L loc (RecStmt stmts _ _ _ _)) -- Flatten Rec inside Re
rn_rec_stmt_lhs _ stmt@(L _ (ParStmt _)) -- Syntactically illegal in mdo
= pprPanic "rn_rec_stmt" (ppr stmt)
-
+
+rn_rec_stmt_lhs _ stmt@(L _ (TransformStmt _ _ _)) -- Syntactically illegal in mdo
+ = pprPanic "rn_rec_stmt" (ppr stmt)
+
+rn_rec_stmt_lhs _ stmt@(L _ (GroupStmt _ _)) -- Syntactically illegal in mdo
+ = pprPanic "rn_rec_stmt" (ppr stmt)
+
rn_rec_stmts_lhs :: UniqFM (Located Fixity) -- mini fixity env for the names we're about to bind
-- these fixities need to be brought into scope with the names
-> [LStmt RdrName]
@@ -890,6 +1002,12 @@ rn_rec_stmt all_bndrs stmt@(L loc (RecStmt stmts _ _ _ _)) _
rn_rec_stmt all_bndrs stmt@(L _ (ParStmt _)) _ -- Syntactically illegal in mdo
= pprPanic "rn_rec_stmt: ParStmt" (ppr stmt)
+rn_rec_stmt all_bndrs stmt@(L _ (TransformStmt _ _ _)) _ -- Syntactically illegal in mdo
+ = pprPanic "rn_rec_stmt: TransformStmt" (ppr stmt)
+
+rn_rec_stmt all_bndrs stmt@(L _ (GroupStmt _ _)) _ -- Syntactically illegal in mdo
+ = pprPanic "rn_rec_stmt: GroupStmt" (ppr stmt)
+
rn_rec_stmts :: [Name] -> [(LStmtLR Name RdrName, FreeVars)] -> RnM [Segment (LStmt Name)]
rn_rec_stmts bndrs stmts = mappM (uncurry (rn_rec_stmt bndrs)) stmts `thenM` \ segs_s ->
returnM (concat segs_s)
@@ -1027,8 +1145,12 @@ patSynErr e = do { addErr (sep [ptext SLIT("Pattern syntax in expression context
nest 4 (ppr e)])
; return (EWildPat, emptyFVs) }
+
parStmtErr = addErr (ptext SLIT("Illegal parallel list comprehension: use -XParallelListComp"))
+transformStmtErr = addErr (ptext SLIT("Illegal transform or grouping list comprehension: use -XTransformListComp"))
+transformStmtOutsideListCompErr = addErr (ptext SLIT("Currently you may only use transform or grouping comprehensions within list comprehensions, not parallel array comprehensions"))
+
badIpBinds what binds
= hang (ptext SLIT("Implicit-parameter bindings illegal in") <+> what)
2 (ppr binds)
diff --git a/compiler/typecheck/TcExpr.lhs b/compiler/typecheck/TcExpr.lhs
index 804fb47890..99d0c5449d 100644
--- a/compiler/typecheck/TcExpr.lhs
+++ b/compiler/typecheck/TcExpr.lhs
@@ -12,7 +12,7 @@
-- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#Warnings
-- for details
-module TcExpr ( tcPolyExpr, tcPolyExprNC, tcMonoExpr, tcInferRho, tcSyntaxOp) where
+module TcExpr ( tcPolyExpr, tcPolyExprNC, tcMonoExpr, tcInferRho, tcSyntaxOp ) where
#include "HsVersions.h"
diff --git a/compiler/typecheck/TcHsSyn.lhs b/compiler/typecheck/TcHsSyn.lhs
index b9a2188ec5..ff5c942149 100644
--- a/compiler/typecheck/TcHsSyn.lhs
+++ b/compiler/typecheck/TcHsSyn.lhs
@@ -647,6 +647,37 @@ zonkStmt env (ExprStmt expr then_op ty)
zonkTcTypeToType env ty `thenM` \ new_ty ->
returnM (env, ExprStmt new_expr new_then new_ty)
+zonkStmt env (TransformStmt (stmts, binders) usingExpr maybeByExpr)
+ = do { (env', stmts') <- zonkStmts env stmts
+ ; let binders' = zonkIdOccs env' binders
+ ; usingExpr' <- zonkLExpr env' usingExpr
+ ; maybeByExpr' <- zonkMaybeLExpr env' maybeByExpr
+ ; return (env', TransformStmt (stmts', binders') usingExpr' maybeByExpr') }
+
+zonkStmt env (GroupStmt (stmts, binderMap) groupByClause)
+ = do { (env', stmts') <- zonkStmts env stmts
+ ; binderMap' <- mappM (zonkBinderMapEntry env') binderMap
+ ; groupByClause' <-
+ case groupByClause of
+ GroupByNothing usingExpr -> (zonkLExpr env' usingExpr) >>= (return . GroupByNothing)
+ GroupBySomething eitherUsingExpr byExpr -> do
+ eitherUsingExpr' <- mapEitherM (zonkLExpr env') (zonkExpr env') eitherUsingExpr
+ byExpr' <- zonkLExpr env' byExpr
+ return $ GroupBySomething eitherUsingExpr' byExpr'
+
+ ; let env'' = extendZonkEnv env' (map snd binderMap')
+ ; return (env'', GroupStmt (stmts', binderMap') groupByClause') }
+ where
+ mapEitherM f g x = do
+ case x of
+ Left a -> f a >>= (return . Left)
+ Right b -> g b >>= (return . Right)
+
+ zonkBinderMapEntry env (oldBinder, newBinder) = do
+ let oldBinder' = zonkIdOcc env oldBinder
+ newBinder' <- zonkIdBndr env newBinder
+ return (oldBinder', newBinder')
+
zonkStmt env (LetStmt binds)
= zonkLocalBinds env binds `thenM` \ (env1, new_binds) ->
returnM (env1, LetStmt new_binds)
@@ -658,6 +689,9 @@ zonkStmt env (BindStmt pat expr bind_op fail_op)
; new_fail <- zonkExpr env fail_op
; return (env1, BindStmt new_pat new_expr new_bind new_fail) }
+zonkMaybeLExpr env Nothing = return Nothing
+zonkMaybeLExpr env (Just e) = (zonkLExpr env e) >>= (return . Just)
+
-------------------------------------------------------------------------
zonkRecFields :: ZonkEnv -> HsRecordBinds TcId -> TcM (HsRecordBinds TcId)
diff --git a/compiler/typecheck/TcMatches.lhs b/compiler/typecheck/TcMatches.lhs
index da1d0e017d..e07e6dad76 100644
--- a/compiler/typecheck/TcMatches.lhs
+++ b/compiler/typecheck/TcMatches.lhs
@@ -39,8 +39,12 @@ import TysWiredIn
import PrelNames
import Id
import TyCon
+import TysPrim
import Outputable
+import Util
import SrcLoc
+
+import Control.Monad( liftM )
\end{code}
%************************************************************************
@@ -391,9 +395,72 @@ tcLcStmt m_tc ctxt (ParStmt bndr_stmts_s) elt_ty thing_inside
; return (ids, pairs', thing) }
; return ( (stmts', ids) : pairs', thing ) }
+tcLcStmt m_tc ctxt (TransformStmt (stmts, binders) usingExpr maybeByExpr) elt_ty thing_inside = do
+ (stmts', (binders', usingExpr', maybeByExpr', thing)) <-
+ tcStmts (TransformStmtCtxt ctxt) (tcLcStmt m_tc) stmts elt_ty $ \elt_ty' -> do
+ let alphaListTy = mkTyConApp m_tc [alphaTy]
+
+ (usingExpr', maybeByExpr') <-
+ case maybeByExpr of
+ Nothing -> do
+ -- We must validate that usingExpr :: forall a. [a] -> [a]
+ usingExpr' <- tcPolyExpr usingExpr (mkForAllTy alphaTyVar (alphaListTy `mkFunTy` alphaListTy))
+ return (usingExpr', Nothing)
+ Just byExpr -> do
+ -- We must infer a type such that e :: t and then check that usingExpr :: forall a. (a -> t) -> [a] -> [a]
+ (byExpr', tTy) <- tcInferRho byExpr
+ usingExpr' <- tcPolyExpr usingExpr (mkForAllTy alphaTyVar ((alphaTy `mkFunTy` tTy) `mkFunTy` (alphaListTy `mkFunTy` alphaListTy)))
+ return (usingExpr', Just byExpr')
+
+ binders' <- tcLookupLocalIds binders
+ thing <- thing_inside elt_ty'
+
+ return (binders', usingExpr', maybeByExpr', thing)
+
+ return (TransformStmt (stmts', binders') usingExpr' maybeByExpr', thing)
+
+tcLcStmt m_tc ctxt (GroupStmt (stmts, bindersMap) groupByClause) elt_ty thing_inside = do
+ (stmts', (bindersMap', groupByClause', thing)) <-
+ tcStmts (TransformStmtCtxt ctxt) (tcLcStmt m_tc) stmts elt_ty $ \elt_ty' -> do
+ let alphaListTy = mkTyConApp m_tc [alphaTy]
+ alphaListListTy = mkTyConApp m_tc [alphaListTy]
+
+ groupByClause' <-
+ case groupByClause of
+ GroupByNothing usingExpr ->
+ -- We must validate that usingExpr :: forall a. [a] -> [[a]]
+ tcPolyExpr usingExpr (mkForAllTy alphaTyVar (alphaListTy `mkFunTy` alphaListListTy)) >>= (return . GroupByNothing)
+ GroupBySomething eitherUsingExpr byExpr -> do
+ -- We must infer a type such that byExpr :: t
+ (byExpr', tTy) <- tcInferRho byExpr
+
+ -- If it exists, we then check that usingExpr :: forall a. (a -> t) -> [a] -> [[a]]
+ let expectedUsingType = mkForAllTy alphaTyVar ((alphaTy `mkFunTy` tTy) `mkFunTy` (alphaListTy `mkFunTy` alphaListListTy))
+ eitherUsingExpr' <-
+ case eitherUsingExpr of
+ Left usingExpr -> (tcPolyExpr usingExpr expectedUsingType) >>= (return . Left)
+ Right usingExpr -> (tcPolyExpr (noLoc usingExpr) expectedUsingType) >>= (return . Right . unLoc)
+ return $ GroupBySomething eitherUsingExpr' byExpr'
+
+ -- Find the IDs and types of all old binders
+ let (oldBinders, newBinders) = unzip bindersMap
+ oldBinders' <- tcLookupLocalIds oldBinders
+
+ -- Ensure that every old binder of type b is linked up with its new binder which should have type [b]
+ let newBinders' = zipWith associateNewBinder oldBinders' newBinders
+
+ -- Type check the thing in the environment with these new binders and return the result
+ thing <- tcExtendIdEnv newBinders' (thing_inside elt_ty')
+ return (zipEqual "tcLcStmt: Old and new binder lists were not of the same length" oldBinders' newBinders', groupByClause', thing)
+
+ return (GroupStmt (stmts', bindersMap') groupByClause', thing)
+ where
+ associateNewBinder :: TcId -> Name -> TcId
+ associateNewBinder oldBinder newBinder = mkLocalId newBinder (mkTyConApp m_tc [idType oldBinder])
+
tcLcStmt m_tc ctxt stmt elt_ty thing_inside
= pprPanic "tcLcStmt: unexpected Stmt" (ppr stmt)
-
+
--------------------------------
-- Do-notation
-- The main excitement here is dealing with rebindable syntax
diff --git a/compiler/utils/Panic.lhs b/compiler/utils/Panic.lhs
index baf1bf3e28..95878c4f52 100644
--- a/compiler/utils/Panic.lhs
+++ b/compiler/utils/Panic.lhs
@@ -135,7 +135,7 @@ Panics and asserts.
\begin{code}
panic, pgmError :: String -> a
-panic x = Exception.throwDyn (Panic x)
+panic x = trace ("Panic (" ++ x ++ ")") (Exception.throwDyn (Panic x))
pgmError x = Exception.throwDyn (ProgramError x)
-- #-versions because panic can't return an unboxed int, and that's
diff --git a/docs/users_guide/flags.xml b/docs/users_guide/flags.xml
index 1da736b5ed..a00a4f1ab8 100644
--- a/docs/users_guide/flags.xml
+++ b/docs/users_guide/flags.xml
@@ -827,6 +827,12 @@
<entry><option>-XNoParallelListComp</option></entry>
</row>
<row>
+ <entry><option>-XTransformListComp</option></entry>
+ <entry>Enable <link linkend="generalised-list-comprehensions">transform list comprehensions</link>.</entry>
+ <entry>dynamic</entry>
+ <entry><option>-XNoTransformListComp</option></entry>
+ </row>
+ <row>
<entry><option>-XUnliftedFFITypes</option></entry>
<entry>Enable unlifted FFI types.</entry>
<entry>dynamic</entry>
diff --git a/docs/users_guide/glasgow_exts.xml b/docs/users_guide/glasgow_exts.xml
index a1cf5c5f6a..de69b60063 100644
--- a/docs/users_guide/glasgow_exts.xml
+++ b/docs/users_guide/glasgow_exts.xml
@@ -1058,6 +1058,166 @@ This name is not supported by GHC.
branches.</para>
</sect2>
+
+ <!-- ===================== TRANSFORM LIST COMPREHENSIONS =================== -->
+
+ <sect2 id="generalised-list-comprehensions">
+ <title>Generalised (SQL-Like) List Comprehensions</title>
+ <indexterm><primary>list comprehensions</primary><secondary>generalised</secondary>
+ </indexterm>
+ <indexterm><primary>extended list comprehensions</primary>
+ </indexterm>
+ <indexterm><primary>group</primary></indexterm>
+ <indexterm><primary>sql</primary></indexterm>
+
+
+ <para>Generalised list comprehensions are a further enhancement to the
+ list comprehension syntatic sugar to allow operations such as sorting
+ and grouping which are familiar from SQL. They are fully described in the
+ paper <ulink url="http://research.microsoft.com/~simonpj/papers/list-comp">
+ Comprehensive comprehensions: comprehensions with "order by" and "group by"</ulink>,
+ except that the syntax we use differs slightly from the paper.</para>
+<para>Here is an example:
+<programlisting>
+employees = [ ("Simon", "MS", 80)
+, ("Erik", "MS", 100)
+, ("Phil", "Ed", 40)
+, ("Gordon", "Ed", 45)
+, ("Paul", "Yale", 60)]
+
+output = [ (the dept, sum salary)
+| (name, dept, salary) &lt;- employees
+, then group by dept
+, then sortWith by (sum salary)
+, then take 5 ]
+</programlisting>
+In this example, the list <literal>output</literal> would take on
+ the value:
+
+<programlisting>
+[("Yale", 60), ("Ed", 85), ("MS", 180)]
+</programlisting>
+</para>
+<para>There are three new keywords: <literal>group</literal>, <literal>by</literal>, and <literal>using</literal>.
+(The function <literal>sortWith</literal> is not a keyword; it is an ordinary
+function that is exported by <literal>GHC.Exts</literal>.)</para>
+
+<para>There are five new forms of compehension qualifier,
+all introduced by the (existing) keyword <literal>then</literal>:
+ <itemizedlist>
+ <listitem>
+
+<programlisting>
+then f
+</programlisting>
+
+ This statement requires that <literal>f</literal> have the type <literal>
+ forall a. [a] -> [a]</literal>. You can see an example of it's use in the
+ motivating example, as this form is used to apply <literal>take 5</literal>.
+
+ </listitem>
+
+
+ <listitem>
+<para>
+<programlisting>
+then f by e
+</programlisting>
+
+ This form is similar to the previous one, but allows you to create a function
+ which will be passed as the first argument to f. As a consequence f must have
+ the type <literal>forall a. (a -> t) -> [a] -> [a]</literal>. As you can see
+ from the type, this function lets f &quot;project out&quot; some information
+ from the elements of the list it is transforming.</para>
+
+ <para>An example is shown in the opening example, where <literal>sortWith</literal>
+ is supplied with a function that lets it find out the <literal>sum salary</literal>
+ for any item in the list comprehension it transforms.</para>
+
+ </listitem>
+
+
+ <listitem>
+
+<programlisting>
+then group by e using f
+</programlisting>
+
+ <para>This is the most general of the grouping-type statements. In this form,
+ f is required to have type <literal>forall a. (a -> t) -> [a] -> [[a]]</literal>.
+ As with the <literal>then f by e</literal> case above, the first argument
+ is a function supplied to f by the compiler which lets it compute e on every
+ element of the list being transformed. However, unlike the non-grouping case,
+ f additionally partitions the list into a number of sublists: this means that
+ at every point after this statement, binders occuring before it in the comprehension
+ refer to <emphasis>lists</emphasis> of possible values, not single values. To help understand
+ this, let's look at an example:</para>
+
+<programlisting>
+-- This works similarly to groupWith in GHC.Exts, but doesn't sort its input first
+groupRuns :: Eq b => (a -> b) -> [a] -> [[a]]
+groupRuns f = groupBy (\x y -> f x == f y)
+
+output = [ (the x, y)
+| x &lt;- ([1..3] ++ [1..2])
+, y &lt;- [4..6]
+, then group by x using groupRuns ]
+</programlisting>
+
+ <para>This results in the variable <literal>output</literal> taking on the value below:</para>
+
+<programlisting>
+[(1, [4, 5, 6]), (2, [4, 5, 6]), (3, [4, 5, 6]), (1, [4, 5, 6]), (2, [4, 5, 6])]
+</programlisting>
+
+ <para>Note that we have used the <literal>the</literal> function to change the type
+ of x from a list to its original numeric type. The variable y, in contrast, is left
+ unchanged from the list form introduced by the grouping.</para>
+
+ </listitem>
+
+ <listitem>
+
+<programlisting>
+then group by e
+</programlisting>
+
+ <para>This form of grouping is essentially the same as the one described above. However,
+ since no function to use for the grouping has been supplied it will fall back on the
+ <literal>groupWith</literal> function defined in
+ <ulink url="../libraries/base/GHC-Exts.html"><literal>GHC.Exts</literal></ulink>. This
+ is the form of the group statement that we made use of in the opening example.</para>
+
+ </listitem>
+
+
+ <listitem>
+
+<programlisting>
+then group using f
+</programlisting>
+
+ <para>With this form of the group statement, f is required to simply have the type
+ <literal>forall a. [a] -> [[a]]</literal>, which will be used to group up the
+ comprehension so far directly. An example of this form is as follows:</para>
+
+<programlisting>
+output = [ x
+| y &lt;- [1..5]
+, x &lt;- "hello"
+, then group using inits]
+</programlisting>
+
+ <para>This will yield a list containing every prefix of the word "hello" written out 5 times:</para>
+
+<programlisting>
+["","h","he","hel","hell","hello","helloh","hellohe","hellohel","hellohell","hellohello","hellohelloh",...]
+</programlisting>
+
+ </listitem>
+</itemizedlist>
+</para>
+ </sect2>
<!-- ===================== REBINDABLE SYNTAX =================== -->