summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Eisenberg <eir@cis.upenn.edu>2014-12-07 14:29:25 -0500
committerRichard Eisenberg <eir@cis.upenn.edu>2014-12-07 14:29:25 -0500
commit85db007fed4b8a3396d4713ad08e75bc95e1405c (patch)
treec3369e6a1d5d7474ff41abddeabafe949036856a
parent1b8a6d7eaa3cfe91e3864a7a6ef38209734b7d58 (diff)
parentbafba119387cdba1a84a45b6a4fe616792c94271 (diff)
downloadhaskell-85db007fed4b8a3396d4713ad08e75bc95e1405c.tar.gz
Merge commit 'bafba11' into wip/rae-new-coercible
Conflicts: compiler/basicTypes/DataCon.hs compiler/utils/Util.hs
-rw-r--r--compiler/basicTypes/BasicTypes.hs (renamed from compiler/basicTypes/BasicTypes.lhs)295
-rw-r--r--compiler/basicTypes/ConLike.hs (renamed from compiler/basicTypes/ConLike.lhs)36
-rw-r--r--compiler/basicTypes/DataCon.hs (renamed from compiler/basicTypes/DataCon.lhs)104
-rw-r--r--compiler/basicTypes/DataCon.hs-boot (renamed from compiler/basicTypes/DataCon.lhs-boot)2
-rw-r--r--compiler/basicTypes/Demand.hs (renamed from compiler/basicTypes/Demand.lhs)326
-rw-r--r--compiler/basicTypes/Id.hs (renamed from compiler/basicTypes/Id.lhs)118
-rw-r--r--compiler/basicTypes/IdInfo.hs (renamed from compiler/basicTypes/IdInfo.lhs)134
-rw-r--r--compiler/basicTypes/IdInfo.hs-boot (renamed from compiler/basicTypes/IdInfo.lhs-boot)2
-rw-r--r--compiler/basicTypes/Literal.hs (renamed from compiler/basicTypes/Literal.lhs)74
-rw-r--r--compiler/basicTypes/MkId.hs (renamed from compiler/basicTypes/MkId.lhs)128
-rw-r--r--compiler/basicTypes/MkId.hs-boot (renamed from compiler/basicTypes/MkId.lhs-boot)4
-rw-r--r--compiler/basicTypes/Module.hs (renamed from compiler/basicTypes/Module.lhs)88
-rw-r--r--compiler/basicTypes/Module.hs-boot (renamed from compiler/basicTypes/Module.lhs-boot)2
-rw-r--r--compiler/basicTypes/Name.hs (renamed from compiler/basicTypes/Name.lhs)128
-rw-r--r--compiler/basicTypes/Name.hs-boot (renamed from compiler/basicTypes/Name.lhs-boot)2
-rw-r--r--compiler/basicTypes/NameEnv.hs (renamed from compiler/basicTypes/NameEnv.lhs)36
-rw-r--r--compiler/basicTypes/NameSet.hs (renamed from compiler/basicTypes/NameSet.lhs)48
-rw-r--r--compiler/basicTypes/OccName.hs (renamed from compiler/basicTypes/OccName.lhs)158
-rw-r--r--compiler/basicTypes/OccName.hs-boot (renamed from compiler/basicTypes/OccName.lhs-boot)2
-rw-r--r--compiler/basicTypes/PatSyn.hs (renamed from compiler/basicTypes/PatSyn.lhs)55
-rw-r--r--compiler/basicTypes/PatSyn.hs-boot (renamed from compiler/basicTypes/PatSyn.lhs-boot)2
-rw-r--r--compiler/basicTypes/RdrName.hs (renamed from compiler/basicTypes/RdrName.lhs)112
-rw-r--r--compiler/basicTypes/SrcLoc.hs (renamed from compiler/basicTypes/SrcLoc.lhs)134
-rw-r--r--compiler/basicTypes/UniqSupply.hs (renamed from compiler/basicTypes/UniqSupply.lhs)48
-rw-r--r--compiler/basicTypes/Unique.hs (renamed from compiler/basicTypes/Unique.lhs)87
-rw-r--r--compiler/basicTypes/Var.hs (renamed from compiler/basicTypes/Var.lhs)85
-rw-r--r--compiler/basicTypes/VarEnv.hs (renamed from compiler/basicTypes/VarEnv.lhs)68
-rw-r--r--compiler/basicTypes/VarSet.hs (renamed from compiler/basicTypes/VarSet.lhs)27
-rw-r--r--compiler/coreSyn/CoreArity.hs (renamed from compiler/coreSyn/CoreArity.lhs)57
-rw-r--r--compiler/coreSyn/CoreFVs.hs (renamed from compiler/coreSyn/CoreFVs.lhs)94
-rw-r--r--compiler/coreSyn/CoreLint.hs (renamed from compiler/coreSyn/CoreLint.lhs)169
-rw-r--r--compiler/coreSyn/CorePrep.hs (renamed from compiler/coreSyn/CorePrep.lhs)91
-rw-r--r--compiler/coreSyn/CoreSubst.hs (renamed from compiler/coreSyn/CoreSubst.lhs)126
-rw-r--r--compiler/coreSyn/CoreSyn.hs (renamed from compiler/coreSyn/CoreSyn.lhs)189
-rw-r--r--compiler/coreSyn/CoreTidy.hs (renamed from compiler/coreSyn/CoreTidy.lhs)42
-rw-r--r--compiler/coreSyn/CoreUnfold.hs (renamed from compiler/coreSyn/CoreUnfold.lhs)100
-rw-r--r--compiler/coreSyn/CoreUtils.hs (renamed from compiler/coreSyn/CoreUtils.lhs)276
-rw-r--r--compiler/coreSyn/MkCore.hs (renamed from compiler/coreSyn/MkCore.lhs)134
-rw-r--r--compiler/coreSyn/PprCore.hs (renamed from compiler/coreSyn/PprCore.lhs)77
-rw-r--r--compiler/coreSyn/TrieMap.hs (renamed from compiler/coreSyn/TrieMap.lhs)115
-rw-r--r--compiler/hsSyn/Convert.hs (renamed from compiler/hsSyn/Convert.lhs)15
-rw-r--r--compiler/hsSyn/HsBinds.hs (renamed from compiler/hsSyn/HsBinds.lhs)76
-rw-r--r--compiler/hsSyn/HsDecls.hs (renamed from compiler/hsSyn/HsDecls.lhs)200
-rw-r--r--compiler/hsSyn/HsExpr.hs (renamed from compiler/hsSyn/HsExpr.lhs)160
-rw-r--r--compiler/hsSyn/HsExpr.hs-boot (renamed from compiler/hsSyn/HsExpr.lhs-boot)10
-rw-r--r--compiler/hsSyn/HsImpExp.hs (renamed from compiler/hsSyn/HsImpExp.lhs)43
-rw-r--r--compiler/hsSyn/HsLit.hs (renamed from compiler/hsSyn/HsLit.lhs)34
-rw-r--r--compiler/hsSyn/HsPat.hs (renamed from compiler/hsSyn/HsPat.lhs)73
-rw-r--r--compiler/hsSyn/HsPat.hs-boot (renamed from compiler/hsSyn/HsPat.lhs-boot)2
-rw-r--r--compiler/hsSyn/HsSyn.hs (renamed from compiler/hsSyn/HsSyn.lhs)16
-rw-r--r--compiler/hsSyn/HsTypes.hs (renamed from compiler/hsSyn/HsTypes.lhs)144
-rw-r--r--compiler/hsSyn/HsUtils.hs (renamed from compiler/hsSyn/HsUtils.lhs)97
-rw-r--r--compiler/iface/BuildTyCl.hs (renamed from compiler/iface/BuildTyCl.lhs)19
-rw-r--r--compiler/iface/IfaceEnv.hs (renamed from compiler/iface/IfaceEnv.lhs)60
-rw-r--r--compiler/iface/IfaceSyn.hs (renamed from compiler/iface/IfaceSyn.lhs)169
-rw-r--r--compiler/iface/IfaceType.hs (renamed from compiler/iface/IfaceType.lhs)102
-rw-r--r--compiler/iface/LoadIface.hs (renamed from compiler/iface/LoadIface.lhs)262
-rw-r--r--compiler/iface/MkIface.hs (renamed from compiler/iface/MkIface.lhs)90
-rw-r--r--compiler/iface/TcIface.hs (renamed from compiler/iface/TcIface.lhs)214
-rw-r--r--compiler/iface/TcIface.hs-boot (renamed from compiler/iface/TcIface.lhs-boot)3
-rw-r--r--compiler/main/CodeOutput.hs (renamed from compiler/main/CodeOutput.lhs)89
-rw-r--r--compiler/main/Constants.hs (renamed from compiler/main/Constants.lhs)9
-rw-r--r--compiler/main/DriverPipeline.hs49
-rw-r--r--compiler/main/ErrUtils.hs (renamed from compiler/main/ErrUtils.lhs)14
-rw-r--r--compiler/main/ErrUtils.hs-boot (renamed from compiler/main/ErrUtils.lhs-boot)3
-rw-r--r--compiler/main/Finder.hs (renamed from compiler/main/Finder.lhs)11
-rw-r--r--compiler/main/Hooks.hs (renamed from compiler/main/Hooks.lhs)19
-rw-r--r--compiler/main/Hooks.hs-boot (renamed from compiler/main/Hooks.lhs-boot)4
-rw-r--r--compiler/main/HscMain.hs5
-rw-r--r--compiler/main/HscTypes.hs (renamed from compiler/main/HscTypes.lhs)282
-rw-r--r--compiler/main/Packages.hs (renamed from compiler/main/Packages.lhs)8
-rw-r--r--compiler/main/Packages.hs-boot (renamed from compiler/main/Packages.lhs-boot)2
-rw-r--r--compiler/main/SysTools.hs (renamed from compiler/main/SysTools.lhs)60
-rw-r--r--compiler/main/TidyPgm.hs (renamed from compiler/main/TidyPgm.lhs)148
-rw-r--r--compiler/prelude/ForeignCall.hs (renamed from compiler/prelude/ForeignCall.lhs)74
-rw-r--r--compiler/prelude/PrelInfo.hs (renamed from compiler/prelude/PrelInfo.lhs)74
-rw-r--r--compiler/prelude/PrelNames.hs (renamed from compiler/prelude/PrelNames.lhs)182
-rw-r--r--compiler/prelude/PrelNames.hs-boot (renamed from compiler/prelude/PrelNames.lhs-boot)3
-rw-r--r--compiler/prelude/PrelRules.hs (renamed from compiler/prelude/PrelRules.lhs)90
-rw-r--r--compiler/prelude/PrimOp.hs (renamed from compiler/prelude/PrimOp.lhs)173
-rw-r--r--compiler/prelude/PrimOp.hs-boot (renamed from compiler/prelude/PrimOp.lhs-boot)4
-rw-r--r--compiler/prelude/TysPrim.hs (renamed from compiler/prelude/TysPrim.lhs)225
-rw-r--r--compiler/prelude/TysWiredIn.hs (renamed from compiler/prelude/TysWiredIn.lhs)150
-rw-r--r--compiler/prelude/TysWiredIn.hs-boot (renamed from compiler/prelude/TysWiredIn.lhs-boot)2
-rw-r--r--compiler/rename/RnBinds.hs (renamed from compiler/rename/RnBinds.lhs)126
-rw-r--r--compiler/rename/RnEnv.hs (renamed from compiler/rename/RnEnv.lhs)177
-rw-r--r--compiler/rename/RnExpr.hs (renamed from compiler/rename/RnExpr.lhs)121
-rw-r--r--compiler/rename/RnExpr.hs-boot (renamed from compiler/rename/RnExpr.lhs-boot)7
-rw-r--r--compiler/rename/RnNames.hs (renamed from compiler/rename/RnNames.lhs)135
-rw-r--r--compiler/rename/RnPat.hs (renamed from compiler/rename/RnPat.lhs)197
-rw-r--r--compiler/rename/RnSource.hs (renamed from compiler/rename/RnSource.lhs)223
-rw-r--r--compiler/rename/RnSplice.hs (renamed from compiler/rename/RnSplice.lhs)45
-rw-r--r--compiler/rename/RnSplice.hs-boot (renamed from compiler/rename/RnSplice.lhs-boot)2
-rw-r--r--compiler/rename/RnTypes.hs (renamed from compiler/rename/RnTypes.lhs)84
-rw-r--r--compiler/simplCore/CSE.hs (renamed from compiler/simplCore/CSE.lhs)35
-rw-r--r--compiler/simplCore/CoreMonad.hs (renamed from compiler/simplCore/CoreMonad.lhs)186
-rw-r--r--compiler/simplCore/FloatIn.hs (renamed from compiler/simplCore/FloatIn.lhs)75
-rw-r--r--compiler/simplCore/FloatOut.hs (renamed from compiler/simplCore/FloatOut.lhs)69
-rw-r--r--compiler/simplCore/LiberateCase.hs (renamed from compiler/simplCore/LiberateCase.lhs)71
-rw-r--r--compiler/simplCore/OccurAnal.hs (renamed from compiler/simplCore/OccurAnal.lhs)142
-rw-r--r--compiler/simplCore/SAT.hs (renamed from compiler/simplCore/SAT.lhs)43
-rw-r--r--compiler/simplCore/SetLevels.hs (renamed from compiler/simplCore/SetLevels.lhs)109
-rw-r--r--compiler/simplCore/SimplCore.hs (renamed from compiler/simplCore/SimplCore.lhs)101
-rw-r--r--compiler/simplCore/SimplEnv.hs (renamed from compiler/simplCore/SimplEnv.lhs)112
-rw-r--r--compiler/simplCore/SimplMonad.hs (renamed from compiler/simplCore/SimplMonad.lhs)51
-rw-r--r--compiler/simplCore/SimplUtils.hs (renamed from compiler/simplCore/SimplUtils.lhs)164
-rw-r--r--compiler/simplCore/Simplify.hs (renamed from compiler/simplCore/Simplify.lhs)195
-rw-r--r--compiler/simplStg/SimplStg.hs (renamed from compiler/simplStg/SimplStg.lhs)11
-rw-r--r--compiler/simplStg/StgStats.hs (renamed from compiler/simplStg/StgStats.lhs)50
-rw-r--r--compiler/simplStg/UnariseStg.hs (renamed from compiler/simplStg/UnariseStg.lhs)55
-rw-r--r--compiler/specialise/Rules.hs (renamed from compiler/specialise/Rules.lhs)99
-rw-r--r--compiler/specialise/SpecConstr.hs (renamed from compiler/specialise/SpecConstr.lhs)116
-rw-r--r--compiler/specialise/Specialise.hs (renamed from compiler/specialise/Specialise.lhs)129
-rw-r--r--compiler/stgSyn/CoreToStg.hs (renamed from compiler/stgSyn/CoreToStg.lhs)2
-rw-r--r--compiler/stgSyn/StgLint.hs (renamed from compiler/stgSyn/StgLint.lhs)77
-rw-r--r--compiler/stgSyn/StgSyn.hs (renamed from compiler/stgSyn/StgSyn.lhs)228
-rw-r--r--compiler/typecheck/Inst.lhs15
-rw-r--r--compiler/typecheck/TcEnv.lhs21
-rw-r--r--compiler/typecheck/TcGenDeriv.lhs83
-rw-r--r--compiler/typecheck/TcInteract.lhs4
-rw-r--r--compiler/typecheck/TcRnDriver.lhs3
-rw-r--r--compiler/typecheck/TcRnTypes.lhs1
-rw-r--r--compiler/types/InstEnv.hs413
-rw-r--r--compiler/utils/Bag.hs (renamed from compiler/utils/Bag.lhs)17
-rw-r--r--compiler/utils/Digraph.hs (renamed from compiler/utils/Digraph.lhs)186
-rw-r--r--compiler/utils/FastBool.hs (renamed from compiler/utils/FastBool.lhs)10
-rw-r--r--compiler/utils/FastFunctions.hs (renamed from compiler/utils/FastFunctions.lhs)9
-rw-r--r--compiler/utils/FastMutInt.hs (renamed from compiler/utils/FastMutInt.lhs)5
-rw-r--r--compiler/utils/FastString.hs (renamed from compiler/utils/FastString.lhs)7
-rw-r--r--compiler/utils/FastTypes.hs (renamed from compiler/utils/FastTypes.lhs)10
-rw-r--r--compiler/utils/FiniteMap.hs (renamed from compiler/utils/FiniteMap.lhs)5
-rw-r--r--compiler/utils/ListSetOps.hs (renamed from compiler/utils/ListSetOps.lhs)67
-rw-r--r--compiler/utils/Maybes.hs (renamed from compiler/utils/Maybes.lhs)49
-rw-r--r--compiler/utils/OrdList.hs (renamed from compiler/utils/OrdList.lhs)11
-rw-r--r--compiler/utils/Outputable.hs (renamed from compiler/utils/Outputable.lhs)130
-rw-r--r--compiler/utils/Outputable.hs-boot (renamed from compiler/utils/Outputable.lhs-boot)4
-rw-r--r--compiler/utils/Pair.hs (renamed from compiler/utils/Pair.lhs)5
-rw-r--r--compiler/utils/Panic.hs (renamed from compiler/utils/Panic.lhs)12
-rw-r--r--compiler/utils/Pretty.hs (renamed from compiler/utils/Pretty.lhs)207
-rw-r--r--compiler/utils/StringBuffer.hs (renamed from compiler/utils/StringBuffer.lhs)12
-rw-r--r--compiler/utils/UniqFM.hs (renamed from compiler/utils/UniqFM.lhs)61
-rw-r--r--compiler/utils/UniqSet.hs (renamed from compiler/utils/UniqSet.lhs)38
-rw-r--r--compiler/utils/Util.hs (renamed from compiler/utils/Util.lhs)205
-rw-r--r--docs/users_guide/separate_compilation.xml6
m---------libraries/Cabal0
-rw-r--r--libraries/base/Data/Fixed.hs4
-rw-r--r--libraries/base/GHC/Natural.hs6
-rw-r--r--libraries/base/changelog.md2
-rw-r--r--libraries/base/tests/data-fixed-show-read.hs7
-rw-r--r--libraries/base/tests/data-fixed-show-read.stdout2
-rw-r--r--libraries/ghc-prim/GHC/Types.hs12
m---------libraries/parallel0
-rw-r--r--packages2
-rw-r--r--testsuite/tests/deriving/should_compile/T4896.hs19
-rw-r--r--testsuite/tests/deriving/should_compile/T7947.hs16
-rw-r--r--testsuite/tests/deriving/should_compile/T7947a.hs3
-rw-r--r--testsuite/tests/deriving/should_compile/T7947b.hs3
-rw-r--r--testsuite/tests/deriving/should_compile/all.T3
-rw-r--r--testsuite/tests/ghc-api/annotations/Makefile9
-rw-r--r--testsuite/tests/perf/compiler/all.T22
-rw-r--r--testsuite/tests/perf/haddock/all.T13
-rw-r--r--testsuite/tests/perf/should_run/all.T9
-rw-r--r--testsuite/tests/perf/space_leaks/all.T9
-rw-r--r--testsuite/tests/typecheck/should_fail/T4921.hs12
-rw-r--r--testsuite/tests/typecheck/should_fail/T4921.stderr19
-rw-r--r--testsuite/tests/typecheck/should_fail/all.T1
166 files changed, 5684 insertions, 6566 deletions
diff --git a/compiler/basicTypes/BasicTypes.lhs b/compiler/basicTypes/BasicTypes.hs
index d8c651964c..d2207d48f4 100644
--- a/compiler/basicTypes/BasicTypes.lhs
+++ b/compiler/basicTypes/BasicTypes.hs
@@ -1,7 +1,7 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1997-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1997-1998
+
\section[BasicTypes]{Miscellanous types}
This module defines a miscellaneously collection of very simple
@@ -12,8 +12,8 @@ types that
\item don't depend on any other complicated types
\item are used in more than one "part" of the compiler
\end{itemize}
+-}
-\begin{code}
{-# LANGUAGE DeriveDataTypeable #-}
module BasicTypes(
@@ -94,15 +94,15 @@ import SrcLoc ( Located,unLoc )
import Data.Data hiding (Fixity)
import Data.Function (on)
import GHC.Exts (Any)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Arity]{Arity}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | The number of value arguments that can be applied to a value before it does
-- "real work". So:
-- fib 100 has arity 0
@@ -115,40 +115,40 @@ type Arity = Int
-- \x -> fib x has representation arity 1
-- \(# x, y #) -> fib (x + y) has representation arity 2
type RepArity = Int
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Constructor tags
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Type of the tags associated with each constructor possibility
type ConTag = Int
fIRST_TAG :: ConTag
-- ^ Tags are allocated from here for real constructors
fIRST_TAG = 1
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Alignment]{Alignment}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type Alignment = Int -- align to next N-byte boundary (N must be a power of 2).
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
One-shot information
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | If the 'Id' is a lambda-bound variable then it may have lambda-bound
-- variable info. Sometimes we know whether the lambda binding this variable
-- is a \"one-shot\" lambda; that is, whether it is applied at most once.
@@ -191,16 +191,15 @@ pprOneShotInfo OneShotLam = ptext (sLit "OneShot")
instance Outputable OneShotInfo where
ppr = pprOneShotInfo
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Swap flag
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data SwapFlag
= NotSwapped -- Args are: actual, expected
| IsSwapped -- Args are: expected, actual
@@ -220,32 +219,30 @@ isSwapped NotSwapped = False
unSwap :: SwapFlag -> (a->a->b) -> a -> a -> b
unSwap NotSwapped f a b = f a b
unSwap IsSwapped f a b = f b a
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[FunctionOrData]{FunctionOrData}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data FunctionOrData = IsFunction | IsData
deriving (Eq, Ord, Data, Typeable)
instance Outputable FunctionOrData where
ppr IsFunction = text "(function)"
ppr IsData = text "(data)"
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Version]{Module and identifier version numbers}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type Version = Int
bumpVersion :: Version -> Version
@@ -253,16 +250,15 @@ bumpVersion v = v+1
initialVersion :: Version
initialVersion = 1
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Deprecations
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-
-\begin{code}
-- reason/explanation from a WARNING or DEPRECATED pragma
data WarningTxt = WarningTxt [Located FastString]
| DeprecatedTxt [Located FastString]
@@ -272,25 +268,25 @@ instance Outputable WarningTxt where
ppr (WarningTxt ws) = doubleQuotes (vcat (map (ftext . unLoc) ws))
ppr (DeprecatedTxt ds) = text "Deprecated:" <+>
doubleQuotes (vcat (map (ftext . unLoc) ds))
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Rules
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type RuleName = FastString
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Fixity]{Fixity info}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
------------------------
data Fixity = Fixity Int FixityDirection
deriving (Data, Typeable)
@@ -322,8 +318,8 @@ negateFixity, funTyFixity :: Fixity
-- Wired-in fixities
negateFixity = Fixity 6 InfixL -- Fixity of unary negate
funTyFixity = Fixity 0 InfixR -- Fixity of '->'
-\end{code}
+{-
Consider
\begin{verbatim}
@@ -331,8 +327,8 @@ Consider
\end{verbatim}
@(compareFixity op1 op2)@ tells which way to arrange appication, or
whether there's an error.
+-}
-\begin{code}
compareFixity :: Fixity -> Fixity
-> (Bool, -- Error please
Bool) -- Associate to the right: a op1 (b op2 c)
@@ -348,16 +344,15 @@ compareFixity (Fixity prec1 dir1) (Fixity prec2 dir2)
right = (False, True)
left = (False, False)
error_please = (True, False)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Top-level/local]{Top-level/not-top level flag}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data TopLevelFlag
= TopLevel
| NotTopLevel
@@ -373,16 +368,15 @@ isTopLevel NotTopLevel = False
instance Outputable TopLevelFlag where
ppr TopLevel = ptext (sLit "<TopLevel>")
ppr NotTopLevel = ptext (sLit "<NotTopLevel>")
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Boxity flag
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data Boxity
= Boxed
| Unboxed
@@ -391,16 +385,15 @@ data Boxity
isBoxed :: Boxity -> Bool
isBoxed Boxed = True
isBoxed Unboxed = False
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Recursive/Non-Recursive flag
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data RecFlag = Recursive
| NonRecursive
deriving( Eq, Data, Typeable )
@@ -420,14 +413,15 @@ boolToRecFlag False = NonRecursive
instance Outputable RecFlag where
ppr Recursive = ptext (sLit "Recursive")
ppr NonRecursive = ptext (sLit "NonRecursive")
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Code origin
-%* *
-%************************************************************************
-\begin{code}
+* *
+************************************************************************
+-}
+
data Origin = FromSource
| Generated
deriving( Eq, Data, Typeable )
@@ -439,15 +433,15 @@ isGenerated FromSource = False
instance Outputable Origin where
ppr FromSource = ptext (sLit "FromSource")
ppr Generated = ptext (sLit "Generated")
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Instance overlap flag
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | The semantics allowed for overlapping instances for a particular
-- instance. See Note [Safe Haskell isSafeOverlap] (in `InstEnv.lhs`) for a
-- explanation of the `isSafeOverlap` field.
@@ -541,15 +535,15 @@ instance Outputable OverlapMode where
pprSafeOverlap :: Bool -> SDoc
pprSafeOverlap True = ptext $ sLit "[safe]"
pprSafeOverlap False = empty
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Tuples
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data TupleSort
= BoxedTuple
| UnboxedTuple
@@ -570,13 +564,13 @@ tupleParens BoxedTuple p = parens p
tupleParens ConstraintTuple p = parens p -- The user can't write fact tuples
-- directly, we overload the (,,) syntax
tupleParens UnboxedTuple p = ptext (sLit "(#") <+> p <+> ptext (sLit "#)")
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Generic]{Generic flag}
-%* *
-%************************************************************************
+* *
+************************************************************************
This is the "Embedding-Projection pair" datatype, it contains
two pieces of code (normally either RenamedExpr's or Id's)
@@ -593,12 +587,12 @@ And we should have
T and Tring are arbitrary, but typically T is the 'main' type while
Tring is the 'representation' type. (This just helps us remember
whether to use 'from' or 'to'.
+-}
-\begin{code}
data EP a = EP { fromEP :: a, -- :: T -> Tring
toEP :: a } -- :: Tring -> T
-\end{code}
+{-
Embedding-projection pairs are used in several places:
First of all, each type constructor has an EP associated with it, the
@@ -609,18 +603,18 @@ tcMethodBinds), we are constructing bimaps by induction on the structure
of the type of the method signature.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Occurrence information}
-%* *
-%************************************************************************
+* *
+************************************************************************
This data type is used exclusively by the simplifier, but it appears in a
SubstResult, which is currently defined in VarEnv, which is pretty near
the base of the module hierarchy. So it seemed simpler to put the
defn of OccInfo here, safely at the bottom
+-}
-\begin{code}
-- | Identifier occurrence information
data OccInfo
= NoOccInfo -- ^ There are many occurrences, or unknown occurrences
@@ -639,8 +633,8 @@ data OccInfo
!RulesOnly
type RulesOnly = Bool
-\end{code}
+{-
Note [LoopBreaker OccInfo]
~~~~~~~~~~~~~~~~~~~~~~~~~~
IAmALoopBreaker True <=> A "weak" or rules-only loop breaker
@@ -650,9 +644,8 @@ Note [LoopBreaker OccInfo]
Do not inline at all
See OccurAnal Note [Weak loop breakers]
+-}
-
-\begin{code}
isNoOcc :: OccInfo -> Bool
isNoOcc NoOccInfo = True
isNoOcc _ = False
@@ -703,9 +696,7 @@ isOneOcc _ = False
zapFragileOcc :: OccInfo -> OccInfo
zapFragileOcc (OneOcc {}) = NoOccInfo
zapFragileOcc occ = occ
-\end{code}
-\begin{code}
instance Outputable OccInfo where
-- only used for debugging; never parsed. KSW 1999-07
ppr NoOccInfo = empty
@@ -720,20 +711,20 @@ instance Outputable OccInfo where
| otherwise = char '*'
pp_args | int_cxt = char '!'
| otherwise = empty
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Default method specfication
-%* *
-%************************************************************************
+* *
+************************************************************************
The DefMethSpec enumeration just indicates what sort of default method
is used for a class. It is generated from source code, and present in
interface files; it is converted to Class.DefMeth before begin put in a
Class object.
+-}
-\begin{code}
data DefMethSpec = NoDM -- No default method
| VanillaDM -- Default method given with polymorphic code
| GenericDM -- Default method given with generic code
@@ -743,15 +734,15 @@ instance Outputable DefMethSpec where
ppr NoDM = empty
ppr VanillaDM = ptext (sLit "{- Has default method -}")
ppr GenericDM = ptext (sLit "{- Has generic default method -}")
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Success flag}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data SuccessFlag = Succeeded | Failed
instance Outputable SuccessFlag where
@@ -768,18 +759,17 @@ succeeded Failed = False
failed Succeeded = False
failed Failed = True
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Activation}
-%* *
-%************************************************************************
+* *
+************************************************************************
When a rule or inlining is active
+-}
-\begin{code}
type PhaseNum = Int -- Compilation phase
-- Phases decrease towards zero
-- Zero is the last phase
@@ -827,8 +817,8 @@ data InlineSpec -- What the user's INLINE pragma looked like
-- where there isn't any real inline pragma at all
deriving( Eq, Data, Typeable, Show )
-- Show needed for Lexer.x
-\end{code}
+{-
Note [InlinePragma]
~~~~~~~~~~~~~~~~~~~
This data type mirrors what you can write in an INLINE or NOINLINE pragma in
@@ -879,8 +869,8 @@ The main effects of CONLIKE are:
- The rule matcher consults this field. See
Note [Expanding variables] in Rules.lhs.
+-}
-\begin{code}
isConLike :: RuleMatchInfo -> Bool
isConLike ConLike = True
isConLike _ = False
@@ -1003,11 +993,7 @@ isAlwaysActive _ = False
isEarlyActive AlwaysActive = True
isEarlyActive (ActiveBefore {}) = True
isEarlyActive _ = False
-\end{code}
-
-
-\begin{code}
-- Used (instead of Rational) to represent exactly the floating point literal that we
-- encountered in the user's source program. This allows us to pretty-print exactly what
-- the user wrote, which is important e.g. for floating point numbers that can't represented
@@ -1037,10 +1023,5 @@ instance Ord FractionalLit where
instance Outputable FractionalLit where
ppr = text . fl_text
-\end{code}
-
-\begin{code}
newtype HValue = HValue Any
-
-\end{code}
diff --git a/compiler/basicTypes/ConLike.lhs b/compiler/basicTypes/ConLike.hs
index 3414aa4230..7b8f70d625 100644
--- a/compiler/basicTypes/ConLike.lhs
+++ b/compiler/basicTypes/ConLike.hs
@@ -1,10 +1,10 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1998
+
\section[ConLike]{@ConLike@: Constructor-like things}
+-}
-\begin{code}
{-# LANGUAGE CPP, DeriveDataTypeable #-}
module ConLike (
@@ -23,29 +23,28 @@ import Name
import Data.Function (on)
import qualified Data.Data as Data
import qualified Data.Typeable
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Constructor-like things}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A constructor-like thing
data ConLike = RealDataCon DataCon
| PatSynCon PatSyn
deriving Data.Typeable.Typeable
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Instances}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
instance Eq ConLike where
(==) = (==) `on` getUnique
(/=) = (/=) `on` getUnique
@@ -80,4 +79,3 @@ instance Data.Data ConLike where
toConstr _ = abstractConstr "ConLike"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "ConLike"
-\end{code}
diff --git a/compiler/basicTypes/DataCon.lhs b/compiler/basicTypes/DataCon.hs
index 3305a90540..4323d6d147 100644
--- a/compiler/basicTypes/DataCon.lhs
+++ b/compiler/basicTypes/DataCon.hs
@@ -1,10 +1,10 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1998
+
\section[DataCon]{@DataCon@: Data Constructors}
+-}
-\begin{code}
{-# LANGUAGE CPP, DeriveDataTypeable #-}
module DataCon (
@@ -71,9 +71,8 @@ import qualified Data.Typeable
import Data.Maybe
import Data.Char
import Data.Word
-\end{code}
-
+{-
Data constructor representation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider the following Haskell data type declaration
@@ -238,13 +237,13 @@ Does the C constructor in Core contain the Ord dictionary? Yes, it must:
Note that (Foo a) might not be an instance of Ord.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Data constructors}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A data constructor
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
@@ -460,8 +459,8 @@ data HsBang
-- StrictnessMark is internal only, used to indicate strictness
-- of the DataCon *worker* fields
data StrictnessMark = MarkedStrict | NotMarkedStrict
-\end{code}
+{-
Note [Data con representation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The dcRepType field contains the type of the representation of a contructor
@@ -502,13 +501,13 @@ For imported data types, the dcArgBangs field is just the same as the
dcr_bangs field; we don't know what the user originally said.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Instances}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
instance Eq DataCon where
a == b = getUnique a == getUnique b
a /= b = getUnique a /= getUnique b
@@ -572,16 +571,15 @@ isBanged _ = True
isMarkedStrict :: StrictnessMark -> Bool
isMarkedStrict NotMarkedStrict = False
isMarkedStrict _ = True -- All others are strict
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Construction}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Build a new data constructor
mkDataCon :: Name
-> Bool -- ^ Is the constructor declared infix?
@@ -659,8 +657,8 @@ mkDataCon name declared_infix
eqSpecPreds :: [(TyVar,Type)] -> ThetaType
eqSpecPreds spec = [ mkEqPred (mkTyVarTy tv) ty | (tv,ty) <- spec ]
-\end{code}
+{-
Note [Unpack equality predicates]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have a GADT with a contructor C :: (a~[b]) => b -> T a
@@ -669,8 +667,8 @@ takes no space at all. This is easily done: just give it
an UNPACK pragma. The rest of the unpack/repack code does the
heavy lifting. This one line makes every GADT take a word less
space for each equality predicate, so it's pretty important!
+-}
-\begin{code}
-- | The 'Name' of the 'DataCon', giving it a unique, rooted identification
dataConName :: DataCon -> Name
dataConName = dcName
@@ -911,9 +909,7 @@ dataConInstOrigArgTys dc@(MkData {dcOrigArgTys = arg_tys,
map (substTyWith tyvars inst_tys) arg_tys
where
tyvars = univ_tvs ++ ex_tvs
-\end{code}
-\begin{code}
-- | Returns the argument types of the wrapper, excluding all dictionary arguments
-- and without substituting for any type variables
dataConOrigArgTys :: DataCon -> [Type]
@@ -929,9 +925,7 @@ dataConRepArgTys (MkData { dcRep = rep
= case rep of
NoDataConRep -> ASSERT( null eq_spec ) theta ++ orig_arg_tys
DCR { dcr_arg_tys = arg_tys } -> arg_tys
-\end{code}
-\begin{code}
-- | The string @package:module.name@ identifying a constructor, which is attached
-- to its info table and used by the GHCi debugger and the heap profiler
dataConIdentity :: DataCon -> [Word8]
@@ -941,9 +935,7 @@ dataConIdentity dc = bytesFS (packageKeyFS (modulePackageKey mod)) ++
fromIntegral (ord '.') : bytesFS (occNameFS (nameOccName name))
where name = dataConName dc
mod = ASSERT( isExternalName name ) nameModule name
-\end{code}
-\begin{code}
isTupleDataCon :: DataCon -> Bool
isTupleDataCon (MkData {dcRepTyCon = tc}) = isTupleTyCon tc
@@ -953,16 +945,12 @@ isUnboxedTupleCon (MkData {dcRepTyCon = tc}) = isUnboxedTupleTyCon tc
-- | Vanilla 'DataCon's are those that are nice boring Haskell 98 constructors
isVanillaDataCon :: DataCon -> Bool
isVanillaDataCon dc = dcVanilla dc
-\end{code}
-\begin{code}
classDataCon :: Class -> DataCon
classDataCon clas = case tyConDataCons (classTyCon clas) of
(dict_constr:no_more) -> ASSERT( null no_more ) dict_constr
[] -> panic "classDataCon"
-\end{code}
-\begin{code}
dataConCannotMatch :: [Type] -> DataCon -> Bool
-- Returns True iff the data con *definitely cannot* match a
-- scrutinee of type (T tys)
@@ -986,18 +974,18 @@ dataConCannotMatch tys con
EqPred NomEq ty1 ty2 -> [(ty1, ty2)]
TuplePred ts -> concatMap predEqs ts
_ -> []
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Building an algebraic data type
-%* *
-%************************************************************************
+* *
+************************************************************************
buildAlgTyCon is here because it is called from TysWiredIn, which in turn
depends on DataCon, but not on BuildTyCl.
+-}
-\begin{code}
buildAlgTyCon :: Name
-> [TyVar] -- ^ Kind variables and type variables
-> [Role]
@@ -1024,28 +1012,27 @@ buildAlgTyCon tc_name ktvs roles cType stupid_theta rhs
mb_promoted_tc
| is_promotable = Just (mkPromotedTyCon tc (promoteKind kind))
| otherwise = Nothing
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Promoting of data types to the kind level
-%* *
-%************************************************************************
+* *
+************************************************************************
These two 'promoted..' functions are here because
* They belong together
* 'promoteDataCon' depends on DataCon stuff
+-}
-\begin{code}
promoteDataCon :: DataCon -> TyCon
promoteDataCon (MkData { dcPromoted = Just tc }) = tc
promoteDataCon dc = pprPanic "promoteDataCon" (ppr dc)
promoteDataCon_maybe :: DataCon -> Maybe TyCon
promoteDataCon_maybe (MkData { dcPromoted = mb_tc }) = mb_tc
-\end{code}
+{-
Note [Promoting a Type to a Kind]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppsoe we have a data constructor D
@@ -1062,8 +1049,8 @@ The transformation from type to kind is done by promoteType
* Ensure that all type constructors mentioned (Maybe and T
in the example) are promotable; that is, they have kind
* -> ... -> * -> *
+-}
-\begin{code}
-- | Promotes a type to a kind.
-- Assumes the argument satisfies 'isPromotableType'
promoteType :: Type -> Kind
@@ -1088,15 +1075,15 @@ promoteKind (TyConApp tc [])
| tc `hasKey` liftedTypeKindTyConKey = superKind
promoteKind (FunTy arg res) = FunTy (promoteKind arg) (promoteKind res)
promoteKind k = pprPanic "promoteKind" (ppr k)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Splitting products}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Extract the type constructor, type argument, data constructor and it's
-- /representation/ argument types from a type if it is a product type.
--
@@ -1126,4 +1113,3 @@ splitDataProductType_maybe ty
= Just (tycon, ty_args, con, dataConInstArgTys con ty_args)
| otherwise
= Nothing
-\end{code}
diff --git a/compiler/basicTypes/DataCon.lhs-boot b/compiler/basicTypes/DataCon.hs-boot
index 08920ccf64..5370a87d32 100644
--- a/compiler/basicTypes/DataCon.lhs-boot
+++ b/compiler/basicTypes/DataCon.hs-boot
@@ -1,4 +1,3 @@
-\begin{code}
module DataCon where
import Name( Name, NamedThing )
import {-# SOURCE #-} TyCon( TyCon )
@@ -17,4 +16,3 @@ instance Uniquable DataCon
instance NamedThing DataCon
instance Outputable DataCon
instance OutputableBndr DataCon
-\end{code}
diff --git a/compiler/basicTypes/Demand.lhs b/compiler/basicTypes/Demand.hs
index f553fc2ae5..ecf22bc51f 100644
--- a/compiler/basicTypes/Demand.lhs
+++ b/compiler/basicTypes/Demand.hs
@@ -1,22 +1,22 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[Demand]{@Demand@: A decoupled implementation of a demand domain}
+-}
-\begin{code}
{-# LANGUAGE CPP, FlexibleInstances, TypeSynonymInstances #-}
module Demand (
- StrDmd, UseDmd(..), Count(..),
+ StrDmd, UseDmd(..), Count(..),
countOnce, countMany, -- cardinality
- Demand, CleanDemand,
+ Demand, CleanDemand,
mkProdDmd, mkOnceUsedDmd, mkManyUsedDmd, mkHeadStrict, oneifyDmd,
- getUsage, toCleanDmd,
+ getUsage, toCleanDmd,
absDmd, topDmd, botDmd, seqDmd,
- lubDmd, bothDmd, apply1Dmd, apply2Dmd,
- isTopDmd, isBotDmd, isAbsDmd, isSeqDmd,
+ lubDmd, bothDmd, apply1Dmd, apply2Dmd,
+ isTopDmd, isBotDmd, isAbsDmd, isSeqDmd,
peelUseCall, cleanUseDmd_maybe, strictenDmd, bothCleanDmd,
DmdType(..), dmdTypeDepth, lubDmdType, bothDmdType,
@@ -30,14 +30,14 @@ module Demand (
DmdResult, CPRResult,
isBotRes, isTopRes,
topRes, botRes, cprProdRes, vanillaCprProdRes, cprSumRes,
- appIsBottom, isBottomingSig, pprIfaceStrictSig,
+ appIsBottom, isBottomingSig, pprIfaceStrictSig,
trimCPRInfo, returnsCPR_maybe,
StrictSig(..), mkStrictSig, mkClosedStrictSig, nopSig, botSig, cprProdSig,
isNopSig, splitStrictSig, increaseStrictSigArity,
- seqDemand, seqDemandList, seqDmdType, seqStrictSig,
+ seqDemand, seqDemandList, seqDmdType, seqStrictSig,
- evalDmd, cleanEvalDmd, cleanEvalProdDmd, isStrictDmd,
+ evalDmd, cleanEvalDmd, cleanEvalProdDmd, isStrictDmd,
splitDmdTy, splitFVs,
deferAfterIO,
postProcessUnsat, postProcessDmdTypeM,
@@ -70,13 +70,13 @@ import Type ( Type, isUnLiftedType )
import TyCon ( isNewTyCon, isClassTyCon )
import DataCon ( splitDataProductType_maybe )
import FastString
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Strictness domain}
-%* *
-%************************************************************************
+* *
+************************************************************************
Lazy
|
@@ -85,12 +85,11 @@ import FastString
SCall SProd
\ /
HyperStr
-
-\begin{code}
+-}
-- Vanilla strictness domain
data StrDmd
- = HyperStr -- Hyper-strict
+ = HyperStr -- Hyper-strict
-- Bottom of the lattice
-- Note [HyperStr and Use demands]
@@ -168,7 +167,7 @@ lubStr HeadStr _ = HeadStr
bothMaybeStr :: MaybeStr -> MaybeStr -> MaybeStr
bothMaybeStr Lazy s = s
-bothMaybeStr s Lazy = s
+bothMaybeStr s Lazy = s
bothMaybeStr (Str s1) (Str s2) = Str (s1 `bothStr` s2)
bothStr :: StrDmd -> StrDmd -> StrDmd
@@ -181,7 +180,7 @@ bothStr (SCall _) (SProd _) = HyperStr -- Weird
bothStr (SProd _) HyperStr = HyperStr
bothStr (SProd s1) HeadStr = SProd s1
-bothStr (SProd s1) (SProd s2)
+bothStr (SProd s1) (SProd s2)
| length s1 == length s2 = mkSProd (zipWith bothMaybeStr s1 s2)
| otherwise = HyperStr -- Weird
bothStr (SProd _) (SCall _) = HyperStr
@@ -189,7 +188,7 @@ bothStr (SProd _) (SCall _) = HyperStr
-- utility functions to deal with memory leaks
seqStrDmd :: StrDmd -> ()
seqStrDmd (SProd ds) = seqStrDmdList ds
-seqStrDmd (SCall s) = s `seq` ()
+seqStrDmd (SCall s) = s `seq` ()
seqStrDmd _ = ()
seqStrDmdList :: [MaybeStr] -> ()
@@ -208,13 +207,13 @@ splitStrProdDmd n (SProd ds) = ASSERT( ds `lengthIs` n) Just ds
splitStrProdDmd _ (SCall {}) = Nothing
-- This can happen when the programmer uses unsafeCoerce,
-- and we don't then want to crash the compiler (Trac #9208)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Absence domain}
-%* *
-%************************************************************************
+* *
+************************************************************************
Used
/ \
@@ -223,21 +222,20 @@ splitStrProdDmd _ (SCall {}) = Nothing
UHead
|
Abs
-
-\begin{code}
+-}
-- Domain for genuine usage
data UseDmd
= UCall Count UseDmd -- Call demand for absence
-- Used only for values of function type
- | UProd [MaybeUsed] -- Product
+ | UProd [MaybeUsed] -- Product
-- Used only for values of product type
-- See Note [Don't optimise UProd(Used) to Used]
-- [Invariant] Not all components are Abs
-- (in that case, use UHead)
- | UHead -- May be used; but its sub-components are
+ | UHead -- May be used; but its sub-components are
-- definitely *not* used. Roughly U(AAA)
-- Eg the usage of x in x `seq` e
-- A polymorphic demand: used for values of all types,
@@ -254,17 +252,17 @@ data MaybeUsed
= Abs -- Definitely unused
-- Bottom of the lattice
- | Use Count UseDmd -- May be used with some cardinality
+ | Use Count UseDmd -- May be used with some cardinality
deriving ( Eq, Show )
-- Abstract counting of usages
data Count = One | Many
- deriving ( Eq, Show )
+ deriving ( Eq, Show )
-- Pretty-printing
instance Outputable MaybeUsed where
ppr Abs = char 'A'
- ppr (Use Many a) = ppr a
+ ppr (Use Many a) = ppr a
ppr (Use One a) = char '1' <> char '*' <> ppr a
instance Outputable UseDmd where
@@ -287,18 +285,18 @@ useBot = Abs
useTop = Use Many Used
mkUCall :: Count -> UseDmd -> UseDmd
---mkUCall c Used = Used c
+--mkUCall c Used = Used c
mkUCall c a = UCall c a
mkUProd :: [MaybeUsed] -> UseDmd
-mkUProd ux
+mkUProd ux
| all (== Abs) ux = UHead
| otherwise = UProd ux
lubCount :: Count -> Count -> Count
lubCount _ Many = Many
lubCount Many _ = Many
-lubCount x _ = x
+lubCount x _ = x
lubMaybeUsed :: MaybeUsed -> MaybeUsed -> MaybeUsed
lubMaybeUsed Abs x = x
@@ -310,7 +308,7 @@ lubUse UHead u = u
lubUse (UCall c u) UHead = UCall c u
lubUse (UCall c1 u1) (UCall c2 u2) = UCall (lubCount c1 c2) (lubUse u1 u2)
lubUse (UCall _ _) _ = Used
-lubUse (UProd ux) UHead = UProd ux
+lubUse (UProd ux) UHead = UProd ux
lubUse (UProd ux1) (UProd ux2)
| length ux1 == length ux2 = UProd $ zipWith lubMaybeUsed ux1 ux2
| otherwise = Used
@@ -322,7 +320,7 @@ lubUse Used _ = Used -- Note [Used should win]
-- `both` is different from `lub` in its treatment of counting; if
-- `both` is computed for two used, the result always has
--- cardinality `Many` (except for the inner demands of UCall demand -- [TODO] explain).
+-- cardinality `Many` (except for the inner demands of UCall demand -- [TODO] explain).
-- Also, x `bothUse` x /= x (for anything but Abs).
bothMaybeUsed :: MaybeUsed -> MaybeUsed -> MaybeUsed
@@ -335,12 +333,12 @@ bothUse :: UseDmd -> UseDmd -> UseDmd
bothUse UHead u = u
bothUse (UCall c u) UHead = UCall c u
--- Exciting special treatment of inner demand for call demands:
+-- Exciting special treatment of inner demand for call demands:
-- use `lubUse` instead of `bothUse`!
bothUse (UCall _ u1) (UCall _ u2) = UCall Many (u1 `lubUse` u2)
bothUse (UCall {}) _ = Used
-bothUse (UProd ux) UHead = UProd ux
+bothUse (UProd ux) UHead = UProd ux
bothUse (UProd ux1) (UProd ux2)
| length ux1 == length ux2 = UProd $ zipWith bothMaybeUsed ux1 ux2
| otherwise = Used
@@ -353,8 +351,8 @@ bothUse Used _ = Used -- Note [Used should win]
peelUseCall :: UseDmd -> Maybe (Count, UseDmd)
peelUseCall (UCall c u) = Just (c,u)
peelUseCall _ = Nothing
-\end{code}
+{-
Note [Don't optimise UProd(Used) to Used]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
These two UseDmds:
@@ -363,7 +361,7 @@ are semantically equivalent, but we do not turn the former into
the latter, for a regrettable-subtle reason. Suppose we did.
then
f (x,y) = (y,x)
-would get
+would get
StrDmd = Str = SProd [Lazy, Lazy]
UseDmd = Used = UProd [Used, Used]
But with the joint demand of <Str, Used> doesn't convey any clue
@@ -383,7 +381,7 @@ Note [Used should win]
Both in lubUse and bothUse we want (Used `both` UProd us) to be Used.
Why? Because Used carries the implication the whole thing is used,
box and all, so we don't want to w/w it. If we use it both boxed and
-unboxed, then we are definitely using the box, and so we are quite
+unboxed, then we are definitely using the box, and so we are quite
likely to pay a reboxing cost. So we make Used win here.
Example is in the Buffer argument of GHC.IO.Handle.Internals.writeCharBuffer
@@ -401,9 +399,8 @@ Compare with: (C) making Used win for both, but UProd win for lub
Min -0.1% -0.3% -7.9% -8.0% -6.5%
Max +0.1% +1.0% +21.0% +21.0% +0.5%
Geometric Mean +0.0% +0.0% -0.0% -0.1% -0.1%
+-}
-
-\begin{code}
-- If a demand is used multiple times (i.e. reused), than any use-once
-- mentioned there, that is not protected by a UCall, can happen many times.
markReusedDmd :: MaybeUsed -> MaybeUsed
@@ -447,21 +444,21 @@ seqMaybeUsed _ = ()
splitUseProdDmd :: Int -> UseDmd -> Maybe [MaybeUsed]
splitUseProdDmd n Used = Just (replicate n useTop)
splitUseProdDmd n UHead = Just (replicate n Abs)
-splitUseProdDmd n (UProd ds) = ASSERT2( ds `lengthIs` n, text "splitUseProdDmd" $$ ppr n $$ ppr ds )
+splitUseProdDmd n (UProd ds) = ASSERT2( ds `lengthIs` n, text "splitUseProdDmd" $$ ppr n $$ ppr ds )
Just ds
splitUseProdDmd _ (UCall _ _) = Nothing
-- This can happen when the programmer uses unsafeCoerce,
-- and we don't then want to crash the compiler (Trac #9208)
-\end{code}
-%************************************************************************
-%* *
-\subsection{Joint domain for Strictness and Absence}
-%* *
-%************************************************************************
-\begin{code}
+{-
+************************************************************************
+* *
+\subsection{Joint domain for Strictness and Absence}
+* *
+************************************************************************
+-}
-data JointDmd = JD { strd :: MaybeStr, absd :: MaybeUsed }
+data JointDmd = JD { strd :: MaybeStr, absd :: MaybeUsed }
deriving ( Eq, Show )
-- Pretty-printing
@@ -474,7 +471,7 @@ mkJointDmd s a = JD { strd = s, absd = a }
mkJointDmds :: [MaybeStr] -> [MaybeUsed] -> [JointDmd]
mkJointDmds ss as = zipWithEqual "mkJointDmds" mkJointDmd ss as
-
+
absDmd :: JointDmd
absDmd = mkJointDmd Lazy Abs
@@ -493,23 +490,23 @@ botDmd :: JointDmd
botDmd = mkJointDmd strBot useBot
lubDmd :: JointDmd -> JointDmd -> JointDmd
-lubDmd (JD {strd = s1, absd = a1})
+lubDmd (JD {strd = s1, absd = a1})
(JD {strd = s2, absd = a2}) = mkJointDmd (s1 `lubMaybeStr` s2) (a1 `lubMaybeUsed` a2)
bothDmd :: JointDmd -> JointDmd -> JointDmd
-bothDmd (JD {strd = s1, absd = a1})
+bothDmd (JD {strd = s1, absd = a1})
(JD {strd = s2, absd = a2}) = mkJointDmd (s1 `bothMaybeStr` s2) (a1 `bothMaybeUsed` a2)
isTopDmd :: JointDmd -> Bool
isTopDmd (JD {strd = Lazy, absd = Use Many Used}) = True
-isTopDmd _ = False
+isTopDmd _ = False
isBotDmd :: JointDmd -> Bool
isBotDmd (JD {strd = Str HyperStr, absd = Abs}) = True
-isBotDmd _ = False
-
+isBotDmd _ = False
+
isAbsDmd :: JointDmd -> Bool
-isAbsDmd (JD {absd = Abs}) = True -- The strictness part can be HyperStr
+isAbsDmd (JD {absd = Abs}) = True -- The strictness part can be HyperStr
isAbsDmd _ = False -- for a bottom demand
isSeqDmd :: JointDmd -> Bool
@@ -547,20 +544,20 @@ splitFVs is_thunk rhs_fvs
| Lazy <- s = (addToUFM_Directly lazy_fv uniq dmd, sig_fv)
| otherwise = ( addToUFM_Directly lazy_fv uniq (JD { strd = Lazy, absd = u })
, addToUFM_Directly sig_fv uniq (JD { strd = s, absd = Abs }) )
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Clean demand for Strictness and Usage}
-%* *
-%************************************************************************
+* *
+************************************************************************
This domain differst from JointDemand in the sence that pure absence
is taken away, i.e., we deal *only* with non-absent demands.
Note [Strict demands]
~~~~~~~~~~~~~~~~~~~~~
-isStrictDmd returns true only of demands that are
+isStrictDmd returns true only of demands that are
both strict
and used
In particular, it is False for <HyperStr, Abs>, which can and does
@@ -587,11 +584,9 @@ f :: (Int -> (Int, Int)) -> (Int, Bool)
f g = (snd (g 3), True)
should be: <L,C(U(AU))>m
+-}
-
-\begin{code}
-
-data CleanDemand = CD { sd :: StrDmd, ud :: UseDmd }
+data CleanDemand = CD { sd :: StrDmd, ud :: UseDmd }
deriving ( Eq, Show )
instance Outputable CleanDemand where
@@ -601,7 +596,7 @@ mkCleanDmd :: StrDmd -> UseDmd -> CleanDemand
mkCleanDmd s a = CD { sd = s, ud = a }
bothCleanDmd :: CleanDemand -> CleanDemand -> CleanDemand
-bothCleanDmd (CD { sd = s1, ud = a1}) (CD { sd = s2, ud = a2})
+bothCleanDmd (CD { sd = s1, ud = a1}) (CD { sd = s2, ud = a2})
= CD { sd = s1 `bothStr` s2, ud = a1 `bothUse` a2 }
mkHeadStrict :: CleanDemand -> CleanDemand
@@ -623,14 +618,14 @@ evalDmd :: JointDmd
evalDmd = mkJointDmd (Str HeadStr) useTop
mkProdDmd :: [JointDmd] -> CleanDemand
-mkProdDmd dx
- = mkCleanDmd sp up
+mkProdDmd dx
+ = mkCleanDmd sp up
where
sp = mkSProd $ map strd dx
- up = mkUProd $ map absd dx
+ up = mkUProd $ map absd dx
mkCallDmd :: CleanDemand -> CleanDemand
-mkCallDmd (CD {sd = d, ud = u})
+mkCallDmd (CD {sd = d, ud = u})
= mkCleanDmd (mkSCall d) (mkUCall One u)
cleanEvalDmd :: CleanDemand
@@ -682,8 +677,8 @@ trimToType (JD ms mu) ts
go_u (UProd mus) (TsProd tss)
| equalLength mus tss = UProd (zipWith go_mu mus tss)
go_u _ _ = Used
-\end{code}
+{-
Note [Trimming a demand to a type]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this:
@@ -720,10 +715,9 @@ Head-stricts demands. For instance,
S ~ S(L, ..., L)
Also, when top or bottom is occurred as a result demand, it in fact
-can be expanded to saturate a callee's arity.
-
+can be expanded to saturate a callee's arity.
+-}
-\begin{code}
splitProdDmd_maybe :: JointDmd -> Maybe [JointDmd]
-- Split a product into its components, iff there is any
-- useful information to be extracted thereby
@@ -736,13 +730,13 @@ splitProdDmd_maybe (JD {strd = s, absd = u})
-> Just (mkJointDmds sx ux)
(Lazy, Use _ (UProd ux)) -> Just (mkJointDmds (replicate (length ux) Lazy) ux)
_ -> Nothing
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Demand results
-%* *
-%************************************************************************
+* *
+************************************************************************
DmdResult: Dunno CPRResult
@@ -758,11 +752,10 @@ CPRResult: NoCPR
Product contructors return (Dunno (RetProd rs))
In a fixpoint iteration, start from Diverges
We have lubs, but not glbs; but that is ok.
+-}
-
-\begin{code}
------------------------------------------------------------------------
--- Constructed Product Result
+-- Constructed Product Result
------------------------------------------------------------------------
data Termination r = Diverges -- Definitely diverges
@@ -777,7 +770,7 @@ data CPRResult = NoCPR -- Top of the lattice
deriving( Eq, Show )
lubCPR :: CPRResult -> CPRResult -> CPRResult
-lubCPR (RetSum t1) (RetSum t2)
+lubCPR (RetSum t1) (RetSum t2)
| t1 == t2 = RetSum t1
lubCPR RetProd RetProd = RetProd
lubCPR _ _ = NoCPR
@@ -885,8 +878,8 @@ resTypeArgDmd :: DmdResult -> JointDmd
-- Also see Note [defaultDmd vs. resTypeArgDmd]
resTypeArgDmd r | isBotRes r = botDmd
resTypeArgDmd _ = topDmd
-\end{code}
+{-
Note [defaultDmd and resTypeArgDmd]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -973,24 +966,24 @@ Imagine that it had millions of fields. This actually happened
in GHC itself where the tuple was DynFlags
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Demand environments and types}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type Demand = JointDmd
type DmdEnv = VarEnv Demand -- See Note [Default demand on free variables]
-data DmdType = DmdType
- DmdEnv -- Demand on explicitly-mentioned
+data DmdType = DmdType
+ DmdEnv -- Demand on explicitly-mentioned
-- free variables
[Demand] -- Demand on arguments
DmdResult -- See [Nature of result demand]
-\end{code}
+{-
Note [Nature of result demand]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A DmdResult contains information about termination (currently distinguishing
@@ -1036,7 +1029,7 @@ Note [Asymmetry of 'both' for DmdType and DmdResult]
'both' for DmdTypes is *assymetrical*, because there is only one
result! For example, given (e1 e2), we get a DmdType dt1 for e1, use
its arg demand to analyse e2 giving dt2, and then do (dt1 `bothType` dt2).
-Similarly with
+Similarly with
case e of { p -> rhs }
we get dt_scrut from the scrutinee and dt_rhs from the RHS, and then
compute (dt_rhs `bothType` dt_scrut).
@@ -1048,9 +1041,8 @@ We
4. take CPR info from the first argument.
3 and 4 are implementd in bothDmdResult.
+-}
-
-\begin{code}
-- Equality needed for fixpoints in DmdAnal
instance Eq DmdType where
(==) (DmdType fv1 ds1 res1)
@@ -1068,8 +1060,8 @@ lubDmdType d1 d2
lub_fv = plusVarEnv_CD lubDmd fv1 (defaultDmd r1) fv2 (defaultDmd r2)
lub_ds = zipWithEqual "lubDmdType" lubDmd ds1 ds2
lub_res = lubDmdResult r1 r2
-\end{code}
+{-
Note [The need for BothDmdArg]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Previously, the right argument to bothDmdType, as well as the return value of
@@ -1077,9 +1069,8 @@ dmdAnalStar via postProcessDmdTypeM, was a DmdType. But bothDmdType only needs
to know about the free variables and termination information, but nothing about
the demand put on arguments, nor cpr information. So we make that explicit by
only passing the relevant information.
+-}
-
-\begin{code}
type BothDmdArg = (DmdEnv, Termination ())
mkBothDmdArg :: DmdEnv -> BothDmdArg
@@ -1100,7 +1091,7 @@ bothDmdType (DmdType fv1 ds1 r1) (fv2, t2)
where both_fv = plusVarEnv_CD bothDmd fv1 (defaultDmd r1) fv2 (defaultDmd t2)
instance Outputable DmdType where
- ppr (DmdType fv ds res)
+ ppr (DmdType fv ds res)
= hsep [text "DmdType",
hcat (map ppr ds) <> ppr res,
if null fv_elts then empty
@@ -1193,11 +1184,9 @@ strictenDmd (JD {strd = s, absd = u})
poke_s (Str s) = s
poke_u Abs = UHead
poke_u (Use _ u) = u
-\end{code}
-Deferring and peeeling
+-- Deferring and peeeling
-\begin{code}
type DeferAndUse -- Describes how to degrade a result type
=( Bool -- Lazify (defer) the type
, Count) -- Many => manify the type
@@ -1298,8 +1287,8 @@ peelManyCalls n (CD { sd = str, ud = abs })
go_abs 0 _ = One -- one UCall Many in the demand
go_abs n (UCall One d') = go_abs (n-1) d'
go_abs _ _ = Many
-\end{code}
+{-
Note [Demands from unsaturated function calls]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1337,8 +1326,8 @@ cases, and then call postProcessUnsat to reduce the demand appropriately.
Similarly, dmdTransformDictSelSig and dmdAnal, when analyzing a Lambda, use
peelCallDmd, which peels only one level, but also returns the demand put on the
body of the function.
+-}
-\begin{code}
peelFV :: DmdType -> Var -> (DmdType, Demand)
peelFV (DmdType fv ds res) id = -- pprTrace "rfv" (ppr id <+> ppr dmd $$ ppr fv)
(DmdType fv' ds res, dmd)
@@ -1349,8 +1338,8 @@ peelFV (DmdType fv ds res) id = -- pprTrace "rfv" (ppr id <+> ppr dmd $$ ppr fv)
addDemand :: Demand -> DmdType -> DmdType
addDemand dmd (DmdType fv ds res) = DmdType fv (dmd:ds) res
-\end{code}
+{-
Note [Default demand on free variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the variable is not mentioned in the environment of a demand type,
@@ -1369,14 +1358,14 @@ Tricky point: make sure that we analyse in the 'virgin' pass. Consider
rec { f acc x True = f (...rec { g y = ...g... }...)
f acc x False = acc }
In the virgin pass for 'f' we'll give 'f' a very strict (bottom) type.
-That might mean that we analyse the sub-expression containing the
+That might mean that we analyse the sub-expression containing the
E = "...rec g..." stuff in a bottom demand. Suppose we *didn't analyse*
-E, but just retuned botType.
+E, but just retuned botType.
Then in the *next* (non-virgin) iteration for 'f', we might analyse E
in a weaker demand, and that will trigger doing a fixpoint iteration
for g. But *because it's not the virgin pass* we won't start g's
-iteration at bottom. Disaster. (This happened in $sfibToList' of
+iteration at bottom. Disaster. (This happened in $sfibToList' of
nofib/spectral/fibheaps.)
So in the virgin pass we make sure that we do analyse the expression
@@ -1446,18 +1435,18 @@ There are several wrinkles:
'f' above.
-%************************************************************************
-%* *
+************************************************************************
+* *
Demand signatures
-%* *
-%************************************************************************
+* *
+************************************************************************
-In a let-bound Id we record its strictness info.
+In a let-bound Id we record its strictness info.
In principle, this strictness info is a demand transformer, mapping
a demand on the Id into a DmdType, which gives
a) the free vars of the Id's value
b) the Id's arguments
- c) an indication of the result of applying
+ c) an indication of the result of applying
the Id to its arguments
However, in fact we store in the Id an extremely emascuated demand
@@ -1485,8 +1474,8 @@ and <L,U(U,U)> on the second, then returning a constructor.
If this same function is applied to one arg, all we can say is that it
uses x with <L,U>, and its arg with demand <L,U>.
+-}
-\begin{code}
newtype StrictSig = StrictSig DmdType
deriving( Eq )
@@ -1537,9 +1526,9 @@ dmdTransformSig (StrictSig dmd_ty@(DmdType _ arg_ds _)) cd
-- see Note [Demands from unsaturated function calls]
dmdTransformDataConSig :: Arity -> StrictSig -> CleanDemand -> DmdType
--- Same as dmdTransformSig but for a data constructor (worker),
+-- Same as dmdTransformSig but for a data constructor (worker),
-- which has a special kind of demand transformer.
--- If the constructor is saturated, we feed the demand on
+-- If the constructor is saturated, we feed the demand on
-- the result into the constructor arguments.
dmdTransformDataConSig arity (StrictSig (DmdType _ _ con_res))
(CD { sd = str, ud = abs })
@@ -1577,8 +1566,8 @@ dmdTransformDictSelSig (StrictSig (DmdType _ [dict_dmd] _)) cd
| otherwise = mkOnceUsedDmd cd -- This is the one!
dmdTransformDictSelSig _ _ = panic "dmdTransformDictSelSig: no args"
-\end{code}
+{-
Note [Demand transformer for a dictionary selector]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we evaluate (op dict-expr) under demand 'd', then we can push the demand 'd'
@@ -1586,7 +1575,7 @@ into the appropriate field of the dictionary. What *is* the appropriate field?
We just look at the strictness signature of the class op, which will be
something like: U(AAASAAAAA). Then replace the 'S' by the demand 'd'.
-For single-method classes, which are represented by newtypes the signature
+For single-method classes, which are represented by newtypes the signature
of 'op' won't look like U(...), so the splitProdDmd_maybe will fail.
That's fine: if we are doing strictness analysis we are also doing inling,
so we'll have inlined 'op' into a cast. So we can bale out in a conservative
@@ -1600,8 +1589,8 @@ ops. Now if a subsequent module in the --make sweep has a local -O flag
you might do strictness analysis, but there is no inlining for the class op.
This is weird, so I'm not worried about whether this optimises brilliantly; but
it should not fall over.
+-}
-\begin{code}
argsOneShots :: StrictSig -> Arity -> [[OneShotInfo]]
-- See Note [Computing one-shot info, and ProbOneShot]
argsOneShots (StrictSig (DmdType _ arg_ds _)) n_val_args
@@ -1628,8 +1617,8 @@ argOneShots one_shot_info (JD { absd = usg })
go (UCall One u) = one_shot_info : go u
go (UCall Many u) = NoOneShotInfo : go u
go _ = []
-\end{code}
+{-
Note [Computing one-shot info, and ProbOneShot]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider a call
@@ -1654,17 +1643,16 @@ How is it used? Well, it's quite likely that the partial application
of f is not shared, so the float-out pass (in SetLevels.lvlLamBndrs)
does not float MFEs out of a ProbOneShot lambda. That currently is
the only way that ProbOneShot is used.
+-}
-
-\begin{code}
-- appIsBottom returns true if an application to n args would diverge
-- See Note [Unsaturated applications]
appIsBottom :: StrictSig -> Int -> Bool
appIsBottom (StrictSig (DmdType _ ds res)) n
- | isBotRes res = not $ lengthExceeds ds n
+ | isBotRes res = not $ lengthExceeds ds n
appIsBottom _ _ = False
-\end{code}
+{-
Note [Unsaturated applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If a function having bottom as its demand result is applied to a less
@@ -1675,33 +1663,33 @@ of arguments, says conservatively if the function is going to diverge
or not.
Zap absence or one-shot information, under control of flags
+-}
-\begin{code}
zapDemand :: DynFlags -> Demand -> Demand
-zapDemand dflags dmd
+zapDemand dflags dmd
| Just kfs <- killFlags dflags = zap_dmd kfs dmd
| otherwise = dmd
zapStrictSig :: DynFlags -> StrictSig -> StrictSig
-zapStrictSig dflags sig@(StrictSig (DmdType env ds r))
+zapStrictSig dflags sig@(StrictSig (DmdType env ds r))
| Just kfs <- killFlags dflags = StrictSig (DmdType env (map (zap_dmd kfs) ds) r)
| otherwise = sig
type KillFlags = (Bool, Bool)
killFlags :: DynFlags -> Maybe KillFlags
-killFlags dflags
+killFlags dflags
| not kill_abs && not kill_one_shot = Nothing
| otherwise = Just (kill_abs, kill_one_shot)
where
kill_abs = gopt Opt_KillAbsence dflags
kill_one_shot = gopt Opt_KillOneShot dflags
-
+
zap_dmd :: KillFlags -> Demand -> Demand
zap_dmd kfs (JD {strd = s, absd = u}) = JD {strd = s, absd = zap_musg kfs u}
zap_musg :: KillFlags -> MaybeUsed -> MaybeUsed
-zap_musg (kill_abs, _) Abs
+zap_musg (kill_abs, _) Abs
| kill_abs = useTop
| otherwise = Abs
zap_musg kfs (Use c u) = Use (zap_count kfs c) (zap_usg kfs u)
@@ -1715,9 +1703,7 @@ zap_usg :: KillFlags -> UseDmd -> UseDmd
zap_usg kfs (UCall c u) = UCall (zap_count kfs c) (zap_usg kfs u)
zap_usg kfs (UProd us) = UProd (map (zap_musg kfs) us)
zap_usg _ u = u
-\end{code}
-\begin{code}
-- If the argument is a used non-newtype dictionary, give it strict
-- demand. Also split the product type & demand and recur in order to
-- similarly strictify the argument's contained used non-newtype
@@ -1746,8 +1732,8 @@ strictifyDictDmd ty dmd = case absd dmd of
-- TODO could optimize with an aborting variant of zipWith since
-- the superclass dicts are always a prefix
_ -> dmd -- unused or not a dictionary
-\end{code}
+{-
Note [HyperStr and Use demands]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1762,22 +1748,21 @@ distinguishing the uses on x and y in the True case, we could either not figure
out how deeply we can unpack x, or that we do not have to pass y.
-%************************************************************************
-%* *
+************************************************************************
+* *
Serialisation
-%* *
-%************************************************************************
-
+* *
+************************************************************************
+-}
-\begin{code}
instance Binary StrDmd where
put_ bh HyperStr = do putByte bh 0
put_ bh HeadStr = do putByte bh 1
put_ bh (SCall s) = do putByte bh 2
put_ bh s
put_ bh (SProd sx) = do putByte bh 3
- put_ bh sx
- get bh = do
+ put_ bh sx
+ get bh = do
h <- getByte bh
case h of
0 -> do return HyperStr
@@ -1788,15 +1773,15 @@ instance Binary StrDmd where
return (SProd sx)
instance Binary MaybeStr where
- put_ bh Lazy = do
+ put_ bh Lazy = do
putByte bh 0
- put_ bh (Str s) = do
+ put_ bh (Str s) = do
putByte bh 1
put_ bh s
get bh = do
h <- getByte bh
- case h of
+ case h of
0 -> return Lazy
_ -> do s <- get bh
return $ Str s
@@ -1804,32 +1789,32 @@ instance Binary MaybeStr where
instance Binary Count where
put_ bh One = do putByte bh 0
put_ bh Many = do putByte bh 1
-
+
get bh = do h <- getByte bh
case h of
0 -> return One
- _ -> return Many
+ _ -> return Many
instance Binary MaybeUsed where
- put_ bh Abs = do
+ put_ bh Abs = do
putByte bh 0
- put_ bh (Use c u) = do
+ put_ bh (Use c u) = do
putByte bh 1
put_ bh c
put_ bh u
get bh = do
h <- getByte bh
- case h of
- 0 -> return Abs
+ case h of
+ 0 -> return Abs
_ -> do c <- get bh
u <- get bh
return $ Use c u
instance Binary UseDmd where
- put_ bh Used = do
+ put_ bh Used = do
putByte bh 0
- put_ bh UHead = do
+ put_ bh UHead = do
putByte bh 1
put_ bh (UCall c u) = do
putByte bh 2
@@ -1841,7 +1826,7 @@ instance Binary UseDmd where
get bh = do
h <- getByte bh
- case h of
+ case h of
0 -> return $ Used
1 -> return $ UHead
2 -> do c <- get bh
@@ -1852,7 +1837,7 @@ instance Binary UseDmd where
instance Binary JointDmd where
put_ bh (JD {strd = x, absd = y}) = do put_ bh x; put_ bh y
- get bh = do
+ get bh = do
x <- get bh
y <- get bh
return $ mkJointDmd x y
@@ -1866,12 +1851,12 @@ instance Binary StrictSig where
instance Binary DmdType where
-- Ignore DmdEnv when spitting out the DmdType
- put_ bh (DmdType _ ds dr)
- = do put_ bh ds
+ put_ bh (DmdType _ ds dr)
+ = do put_ bh ds
put_ bh dr
- get bh
- = do ds <- get bh
- dr <- get bh
+ get bh
+ = do ds <- get bh
+ dr <- get bh
return (DmdType emptyDmdEnv ds dr)
instance Binary DmdResult where
@@ -1890,8 +1875,7 @@ instance Binary CPRResult where
get bh = do
h <- getByte bh
- case h of
+ case h of
0 -> do { n <- get bh; return (RetSum n) }
1 -> return RetProd
_ -> return NoCPR
-\end{code}
diff --git a/compiler/basicTypes/Id.lhs b/compiler/basicTypes/Id.hs
index 85e9b3083a..fa34a4fd78 100644
--- a/compiler/basicTypes/Id.lhs
+++ b/compiler/basicTypes/Id.hs
@@ -1,10 +1,10 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[Id]{@Ids@: Value and constructor identifiers}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
-- |
@@ -41,15 +41,15 @@ module Id (
recordSelectorFieldLabel,
-- ** Modifying an Id
- setIdName, setIdUnique, Id.setIdType,
- setIdExported, setIdNotExported,
- globaliseId, localiseId,
+ setIdName, setIdUnique, Id.setIdType,
+ setIdExported, setIdNotExported,
+ globaliseId, localiseId,
setIdInfo, lazySetIdInfo, modifyIdInfo, maybeModifyIdInfo,
zapLamIdInfo, zapDemandIdInfo, zapFragileIdInfo, transferPolyIdInfo,
zapIdStrictness,
-- ** Predicates on Ids
- isImplicitId, isDeadBinder,
+ isImplicitId, isDeadBinder,
isStrictId,
isExportedId, isLocalId, isGlobalId,
isRecordSelector, isNaughtyRecordSelector,
@@ -69,7 +69,7 @@ module Id (
-- ** One-shot lambdas
isOneShotBndr, isOneShotLambda, isProbablyOneShotLambda,
- setOneShotLambda, clearOneShotLambda,
+ setOneShotLambda, clearOneShotLambda,
updOneShotInfo, setIdOneShotInfo,
isStateHackType, stateHackOneShot, typeOneShot,
@@ -92,10 +92,10 @@ module Id (
setIdCafInfo,
setIdOccInfo, zapIdOccInfo,
- setIdDemandInfo,
- setIdStrictness,
+ setIdDemandInfo,
+ setIdStrictness,
- idDemandInfo,
+ idDemandInfo,
idStrictness,
) where
@@ -147,15 +147,15 @@ infixl 1 `setIdUnfoldingLazily`,
`setIdDemandInfo`,
`setIdStrictness`
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Basic Id manipulation}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
idName :: Id -> Name
idName = Var.varName
@@ -207,13 +207,13 @@ modifyIdInfo fn id = setIdInfo id (fn (idInfo id))
maybeModifyIdInfo :: Maybe IdInfo -> Id -> Id
maybeModifyIdInfo (Just new_info) id = lazySetIdInfo id new_info
maybeModifyIdInfo Nothing id = id
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Simple Id construction}
-%* *
-%************************************************************************
+* *
+************************************************************************
Absolutely all Ids are made by mkId. It is just like Var.mkId,
but in addition it pins free-tyvar-info onto the Id's type,
@@ -228,8 +228,8 @@ the compiler overall. I don't quite know why; perhaps finding free
type variables of an Id isn't all that common whereas applying a
substitution (which changes the free type variables) is more common.
Anyway, we removed it in March 2008.
+-}
-\begin{code}
-- | For an explanation of global vs. local 'Id's, see "Var#globalvslocal"
mkGlobalId :: IdDetails -> Name -> Type -> IdInfo -> Id
mkGlobalId = Var.mkGlobalVar
@@ -283,13 +283,13 @@ mkDerivedLocalM deriv_name id ty
mkWiredInIdName :: Module -> FastString -> Unique -> Id -> Name
mkWiredInIdName mod fs uniq id
= mkWiredInName mod (mkOccNameFS varName fs) uniq (AnId id) UserSyntax
-\end{code}
+{-
Make some local @Ids@ for a template @CoreExpr@. These have bogus
@Uniques@, but that's OK because the templates are supposed to be
instantiated before use.
+-}
-\begin{code}
-- | Workers get local names. "CoreTidy" will externalise these if necessary
mkWorkerId :: Unique -> Id -> Type -> Id
mkWorkerId uniq unwrkr ty
@@ -306,8 +306,8 @@ mkTemplateLocals = mkTemplateLocalsNum 1
-- | Create a template local for a series of type, but start from a specified template local
mkTemplateLocalsNum :: Int -> [Type] -> [Id]
mkTemplateLocalsNum n tys = zipWith mkTemplateLocal [n..] tys
-\end{code}
+{-
Note [Exported LocalIds]
~~~~~~~~~~~~~~~~~~~~~~~~
We use mkExportedLocalId for things like
@@ -343,13 +343,13 @@ In CoreTidy we must make all these LocalIds into GlobalIds, so that in
importing modules (in --make mode) we treat them as properly global.
That is what is happening in, say tidy_insts in TidyPgm.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Special Ids}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | If the 'Id' is that for a record selector, extract the 'sel_tycon' and label. Panic otherwise
recordSelectorFieldLabel :: Id -> (TyCon, FieldLabel)
recordSelectorFieldLabel id
@@ -459,8 +459,8 @@ isImplicitId id
idIsFrom :: Module -> Id -> Bool
idIsFrom mod id = nameIsLocalOrFrom mod (idName id)
-\end{code}
+{-
Note [Primop wrappers]
~~~~~~~~~~~~~~~~~~~~~~
Currently hasNoBinding claims that PrimOpIds don't have a curried
@@ -473,36 +473,34 @@ applications of GHC.Prim.plusInt# to GHC.PrimopWrappers.plusInt#.
Nota Bene: GHC.PrimopWrappers is needed *regardless*, because it's
used by GHCi, which does not implement primops direct at all.
+-}
-
-
-\begin{code}
isDeadBinder :: Id -> Bool
isDeadBinder bndr | isId bndr = isDeadOcc (idOccInfo bndr)
| otherwise = False -- TyVars count as not dead
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Evidence variables
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
isEvVar :: Var -> Bool
isEvVar var = isPredTy (varType var)
isDictId :: Id -> Bool
isDictId id = isDictTy (idType id)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{IdInfo stuff}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
---------------------------------
-- ARITY
idArity :: Id -> Arity
@@ -543,7 +541,7 @@ isStrictId :: Id -> Bool
isStrictId id
= ASSERT2( isId id, text "isStrictId: not an id: " <+> ppr id )
(isStrictType (idType id)) ||
- -- Take the best of both strictnesses - old and new
+ -- Take the best of both strictnesses - old and new
(isStrictDmd (idDemandInfo id))
---------------------------------
@@ -607,15 +605,14 @@ setIdOccInfo id occ_info = modifyIdInfo (`setOccInfo` occ_info) id
zapIdOccInfo :: Id -> Id
zapIdOccInfo b = b `setIdOccInfo` NoOccInfo
-\end{code}
-
+{-
---------------------------------
-- INLINING
The inline pragma tells us to be very keen to inline this Id, but it's still
OK not to if optimisation is switched off.
+-}
-\begin{code}
idInlinePragma :: Id -> InlinePragma
idInlinePragma id = inlinePragInfo (idInfo id)
@@ -636,12 +633,12 @@ idRuleMatchInfo id = inlinePragmaRuleMatchInfo (idInlinePragma id)
isConLikeId :: Id -> Bool
isConLikeId id = isDataConWorkId id || isConLike (idRuleMatchInfo id)
-\end{code}
-
+{-
---------------------------------
-- ONE-SHOT LAMBDAS
-\begin{code}
+-}
+
idOneShotInfo :: Id -> OneShotInfo
idOneShotInfo id = oneShotInfo (idInfo id)
@@ -728,9 +725,7 @@ updOneShotInfo id one_shot
-- But watch out: this may change the type of something else
-- f = \x -> e
-- If we change the one-shot-ness of x, f's type changes
-\end{code}
-\begin{code}
zapInfo :: (IdInfo -> Maybe IdInfo) -> Id -> Id
zapInfo zapper id = maybeModifyIdInfo (zapper (idInfo id)) id
@@ -738,12 +733,12 @@ zapLamIdInfo :: Id -> Id
zapLamIdInfo = zapInfo zapLamInfo
zapFragileIdInfo :: Id -> Id
-zapFragileIdInfo = zapInfo zapFragileInfo
+zapFragileIdInfo = zapInfo zapFragileInfo
zapDemandIdInfo :: Id -> Id
zapDemandIdInfo = zapInfo zapDemandInfo
-\end{code}
+{-
Note [transferPolyIdInfo]
~~~~~~~~~~~~~~~~~~~~~~~~~
This transfer is used in two places:
@@ -791,8 +786,8 @@ arity and strictness info before transferring it. E.g.
g' = \y. \x. e
+ substitute (g' y) for g
Notice that g' has an arity one more than the original g
+-}
-\begin{code}
transferPolyIdInfo :: Id -- Original Id
-> [Var] -- Abstract wrt these variables
-> Id -- New Id
@@ -816,4 +811,3 @@ transferPolyIdInfo old_id abstract_wrt new_id
`setInlinePragInfo` old_inline_prag
`setOccInfo` old_occ_info
`setStrictnessInfo` new_strictness
-\end{code}
diff --git a/compiler/basicTypes/IdInfo.lhs b/compiler/basicTypes/IdInfo.hs
index 685d79e21d..d2179dc08a 100644
--- a/compiler/basicTypes/IdInfo.lhs
+++ b/compiler/basicTypes/IdInfo.hs
@@ -1,13 +1,13 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1993-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
+
\section[IdInfo]{@IdInfos@: Non-essential information about @Ids@}
(And a pretty good illustration of quite a few things wrong with
Haskell. [WDP 94/11])
+-}
-\begin{code}
module IdInfo (
-- * The IdDetails type
IdDetails(..), pprIdDetails, coVarDetails,
@@ -93,15 +93,15 @@ infixl 1 `setSpecInfo`,
`setCafInfo`,
`setStrictnessInfo`,
`setDemandInfo`
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
IdDetails
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | The 'IdDetails' of an 'Id' give stable, and necessary,
-- information about the Id.
data IdDetails
@@ -165,16 +165,15 @@ pprIdDetails other = brackets (pp other)
pp (RecSelId { sel_naughty = is_naughty })
= brackets $ ptext (sLit "RecSel")
<> ppWhen is_naughty (ptext (sLit "(naughty)"))
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{The main IdInfo type}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | An 'IdInfo' gives /optional/ information about an 'Id'. If
-- present it never lies, but it may not be present, in which case there
-- is always a conservative assumption which can be made.
@@ -232,11 +231,9 @@ seqStrictnessInfo ty = seqStrictSig ty
seqDemandInfo :: Demand -> ()
seqDemandInfo dmd = seqDemand dmd
-\end{code}
-Setters
+-- Setters
-\begin{code}
setSpecInfo :: IdInfo -> SpecInfo -> IdInfo
setSpecInfo info sp = sp `seq` info { specInfo = sp }
setInlinePragInfo :: IdInfo -> InlinePragma -> IdInfo
@@ -273,10 +270,7 @@ setDemandInfo info dd = dd `seq` info { demandInfo = dd }
setStrictnessInfo :: IdInfo -> StrictSig -> IdInfo
setStrictnessInfo info dd = dd `seq` info { strictnessInfo = dd }
-\end{code}
-
-\begin{code}
-- | Basic 'IdInfo' that carries no useful information whatsoever
vanillaIdInfo :: IdInfo
vanillaIdInfo
@@ -297,20 +291,19 @@ vanillaIdInfo
noCafIdInfo :: IdInfo
noCafIdInfo = vanillaIdInfo `setCafInfo` NoCafRefs
-- Used for built-in type Ids in MkId.
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[arity-IdInfo]{Arity info about an @Id@}
-%* *
-%************************************************************************
+* *
+************************************************************************
For locally-defined Ids, the code generator maintains its own notion
of their arities; so it should not be asking... (but other things
besides the code-generator need arity info!)
+-}
-\begin{code}
-- | An 'ArityInfo' of @n@ tells us that partial application of this
-- 'Id' to up to @n-1@ value arguments does essentially no work.
--
@@ -328,15 +321,15 @@ unknownArity = 0 :: Arity
ppArityInfo :: Int -> SDoc
ppArityInfo 0 = empty
ppArityInfo n = hsep [ptext (sLit "Arity"), int n]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Inline-pragma information}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Tells when the inlining is active.
-- When it is active the thing may be inlined, depending on how
-- big it is.
@@ -347,26 +340,24 @@ ppArityInfo n = hsep [ptext (sLit "Arity"), int n]
-- The default 'InlinePragInfo' is 'AlwaysActive', so the info serves
-- entirely as a way to inhibit inlining until we want it
type InlinePragInfo = InlinePragma
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Strictness
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
pprStrictness :: StrictSig -> SDoc
pprStrictness sig = ppr sig
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
SpecInfo
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Specialisations and RULES in IdInfo]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -386,8 +377,8 @@ differently because:
In TidyPgm, when the LocalId becomes a GlobalId, its RULES are stripped off
and put in the global list.
+-}
-\begin{code}
-- | Records the specializations of this 'Id' that we know about
-- in the form of rewrite 'CoreRule's that target them
data SpecInfo
@@ -420,15 +411,15 @@ setSpecInfoHead fn (SpecInfo rules fvs)
seqSpecInfo :: SpecInfo -> ()
seqSpecInfo (SpecInfo rules fvs) = seqRules rules `seq` seqVarSet fvs
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[CG-IdInfo]{Code generator-related information}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- CafInfo is used to build Static Reference Tables (see simplStg/SRT.lhs).
-- | Records whether an 'Id' makes Constant Applicative Form references
@@ -461,15 +452,15 @@ instance Outputable CafInfo where
ppCafInfo :: CafInfo -> SDoc
ppCafInfo NoCafRefs = ptext (sLit "NoCafRefs")
ppCafInfo MayHaveCafRefs = empty
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Bulk operations on IdInfo}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | This is used to remove information on lambda binders that we have
-- setup as part of a lambda group, assuming they will be applied all at once,
-- but turn out to be part of an unsaturated lambda as in e.g:
@@ -492,15 +483,11 @@ zapLamInfo info@(IdInfo {occInfo = occ, demandInfo = demand})
_other -> occ
is_safe_dmd dmd = not (isStrictDmd dmd)
-\end{code}
-\begin{code}
-- | Remove demand info on the 'IdInfo' if it is present, otherwise return @Nothing@
zapDemandInfo :: IdInfo -> Maybe IdInfo
zapDemandInfo info = Just (info {demandInfo = topDmd})
-\end{code}
-\begin{code}
zapFragileInfo :: IdInfo -> Maybe IdInfo
-- ^ Zap info that depends on free variables
zapFragileInfo info
@@ -509,15 +496,15 @@ zapFragileInfo info
`setOccInfo` zapFragileOcc occ)
where
occ = occInfo info
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{TickBoxOp}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type TickBoxId = Int
-- | Tick box for Hpc-style coverage
@@ -526,4 +513,3 @@ data TickBoxOp
instance Outputable TickBoxOp where
ppr (TickBox mod n) = ptext (sLit "tick") <+> ppr (mod,n)
-\end{code}
diff --git a/compiler/basicTypes/IdInfo.lhs-boot b/compiler/basicTypes/IdInfo.hs-boot
index 257e1c6e5e..2e9862944e 100644
--- a/compiler/basicTypes/IdInfo.lhs-boot
+++ b/compiler/basicTypes/IdInfo.hs-boot
@@ -1,4 +1,3 @@
-\begin{code}
module IdInfo where
import Outputable
data IdInfo
@@ -7,4 +6,3 @@ data IdDetails
vanillaIdInfo :: IdInfo
coVarDetails :: IdDetails
pprIdDetails :: IdDetails -> SDoc
-\end{code} \ No newline at end of file
diff --git a/compiler/basicTypes/Literal.lhs b/compiler/basicTypes/Literal.hs
index 13fbb4d46d..cb0be03402 100644
--- a/compiler/basicTypes/Literal.lhs
+++ b/compiler/basicTypes/Literal.hs
@@ -1,10 +1,10 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1998
+
\section[Literal]{@Literal@: Machine literals (unboxed, of course)}
+-}
-\begin{code}
{-# LANGUAGE CPP, DeriveDataTypeable #-}
module Literal
@@ -63,16 +63,15 @@ import Data.Word
import Data.Char
import Data.Data ( Data, Typeable )
import Numeric ( fromRat )
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Literals}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | So-called 'Literal's are one of:
--
-- * An unboxed (/machine/) literal ('MachInt', 'MachFloat', etc.),
@@ -118,8 +117,8 @@ data Literal
| LitInteger Integer Type -- ^ Integer literals
-- See Note [Integer literals]
deriving (Data, Typeable)
-\end{code}
+{-
Note [Integer literals]
~~~~~~~~~~~~~~~~~~~~~~~
An Integer literal is represented using, well, an Integer, to make it
@@ -139,8 +138,8 @@ in TcIface.
Binary instance
+-}
-\begin{code}
instance Binary Literal where
put_ bh (MachChar aa) = do putByte bh 0; put_ bh aa
put_ bh (MachStr ab) = do putByte bh 1; put_ bh ab
@@ -195,9 +194,7 @@ instance Binary Literal where
i <- get bh
-- See Note [Integer literals]
return $ mkLitInteger i (panic "Evaluated the place holder for mkInteger")
-\end{code}
-\begin{code}
instance Outputable Literal where
ppr lit = pprLiteral (\d -> d) lit
@@ -211,12 +208,12 @@ instance Ord Literal where
a >= b = case (a `compare` b) of { LT -> False; EQ -> True; GT -> True }
a > b = case (a `compare` b) of { LT -> False; EQ -> False; GT -> True }
compare a b = cmpLit a b
-\end{code}
-
+{-
Construction
~~~~~~~~~~~~
-\begin{code}
+-}
+
-- | Creates a 'Literal' of type @Int#@
mkMachInt :: DynFlags -> Integer -> Literal
mkMachInt dflags x = ASSERT2( inIntRange dflags x, integer x )
@@ -272,11 +269,12 @@ isZeroLit (MachWord64 0) = True
isZeroLit (MachFloat 0) = True
isZeroLit (MachDouble 0) = True
isZeroLit _ = False
-\end{code}
+{-
Coercions
~~~~~~~~~
-\begin{code}
+-}
+
narrow8IntLit, narrow16IntLit, narrow32IntLit,
narrow8WordLit, narrow16WordLit, narrow32WordLit,
char2IntLit, int2CharLit,
@@ -330,11 +328,12 @@ double2FloatLit l = pprPanic "double2FloatLit" (ppr l)
nullAddrLit :: Literal
nullAddrLit = MachNullAddr
-\end{code}
+{-
Predicates
~~~~~~~~~~
-\begin{code}
+-}
+
-- | True if there is absolutely no penalty to duplicating the literal.
-- False principally of strings
litIsTrivial :: Literal -> Bool
@@ -359,11 +358,12 @@ litFitsInChar _ = False
litIsLifted :: Literal -> Bool
litIsLifted (LitInteger {}) = True
litIsLifted _ = False
-\end{code}
+{-
Types
~~~~~
-\begin{code}
+-}
+
-- | Find the Haskell 'Type' the literal occupies
literalType :: Literal -> Type
literalType MachNullAddr = addrPrimTy
@@ -392,12 +392,12 @@ absent_lits = listToUFM [ (addrPrimTyConKey, MachNullAddr)
, (doublePrimTyConKey, MachDouble 0)
, (wordPrimTyConKey, MachWord 0)
, (word64PrimTyConKey, MachWord64 0) ]
-\end{code}
-
+{-
Comparison
~~~~~~~~~~
-\begin{code}
+-}
+
cmpLit :: Literal -> Literal -> Ordering
cmpLit (MachChar a) (MachChar b) = a `compare` b
cmpLit (MachStr a) (MachStr b) = a `compare` b
@@ -425,14 +425,14 @@ litTag (MachFloat _) = _ILIT(8)
litTag (MachDouble _) = _ILIT(9)
litTag (MachLabel _ _ _) = _ILIT(10)
litTag (LitInteger {}) = _ILIT(11)
-\end{code}
+{-
Printing
~~~~~~~~
* MachX (i.e. unboxed) things are printed unadornded (e.g. 3, 'a', "foo")
exceptions: MachFloat gets an initial keyword prefix.
+-}
-\begin{code}
pprLiteral :: (SDoc -> SDoc) -> Literal -> SDoc
-- The function is used on non-atomic literals
-- to wrap parens around literals that occur in
@@ -456,19 +456,18 @@ pprIntVal :: Integer -> SDoc
-- ^ Print negative integers with parens to be sure it's unambiguous
pprIntVal i | i < 0 = parens (integer i)
| otherwise = integer i
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Hashing}
-%* *
-%************************************************************************
+* *
+************************************************************************
Hash values should be zero or a positive integer. No negatives please.
(They mess up the UniqFM for some reason.)
+-}
-\begin{code}
hashLiteral :: Literal -> Int
hashLiteral (MachChar c) = ord c + 1000 -- Keep it out of range of common ints
hashLiteral (MachStr s) = hashByteString s
@@ -492,4 +491,3 @@ hashInteger i = 1 + abs (fromInteger (i `rem` 10000))
hashFS :: FastString -> Int
hashFS s = iBox (uniqueOfFS s)
-\end{code}
diff --git a/compiler/basicTypes/MkId.lhs b/compiler/basicTypes/MkId.hs
index 2f76fc29e0..14ed9b6ad6 100644
--- a/compiler/basicTypes/MkId.lhs
+++ b/compiler/basicTypes/MkId.hs
@@ -1,7 +1,7 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The AQUA Project, Glasgow University, 1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The AQUA Project, Glasgow University, 1998
+
This module contains definitions for the IdInfo for things that
have a standard form, namely:
@@ -10,8 +10,8 @@ have a standard form, namely:
- record selectors
- method and superclass selectors
- primitive operations
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module MkId (
@@ -76,13 +76,13 @@ import FastString
import ListSetOps
import Data.Maybe ( maybeToList )
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Wired in Ids}
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Wired-in Ids]
~~~~~~~~~~~~~~~~~~~
@@ -115,8 +115,8 @@ In cases (2-4), the function has a definition in a library module, and
can be called; but the wired-in version means that the details are
never read from that module's interface file; instead, the full definition
is right here.
+-}
-\begin{code}
wiredInIds :: [Id]
wiredInIds
= [lazyId, dollarId, oneShotId]
@@ -137,13 +137,13 @@ ghcPrimIds
coerceId,
proxyHashId
]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Data constructors}
-%* *
-%************************************************************************
+* *
+************************************************************************
The wrapper for a constructor is an ordinary top-level binding that evaluates
any strict args, unboxes any args that are going to be flattened, and calls
@@ -241,11 +241,11 @@ predicate (C a). But now we treat that as an ordinary argument, not
part of the theta-type, so all is well.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Dictionary selectors}
-%* *
-%************************************************************************
+* *
+************************************************************************
Selecting a field for a dictionary. If there is just one field, then
there's nothing to do.
@@ -264,8 +264,8 @@ Then the top-level type for op is
This is unlike ordinary record selectors, which have all the for-alls
at the outside. When dealing with classes it's very convenient to
recover the original type signature from the class op selector.
+-}
-\begin{code}
mkDictSelId :: Name -- Name of one of the *value* selectors
-- (dictionary superclass or method)
-> Class -> Id
@@ -355,17 +355,15 @@ dictSelRule val_index n_ty_args _ id_unf _ args
= Just (getNth con_args val_index)
| otherwise
= Nothing
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Data constructors
-%* *
-%************************************************************************
-
+* *
+************************************************************************
+-}
-\begin{code}
mkDataConWorkId :: Name -> DataCon -> Id
mkDataConWorkId wkr_name data_con
| isNewTyCon tycon
@@ -445,8 +443,8 @@ dataConCPR con
-- on the stack, and are often then allocated in the heap
-- by the caller. So doing CPR for them may in fact make
-- things worse.
-\end{code}
+{-
-------------------------------------------------
-- Data constructor representation
--
@@ -454,9 +452,8 @@ dataConCPR con
-- constructor fields
--
--------------------------------------------------
+-}
-
-\begin{code}
type Unboxer = Var -> UniqSM ([Var], CoreExpr -> CoreExpr)
-- Unbox: bind rep vars by decomposing src var
@@ -728,8 +725,8 @@ isUnpackableType fam_envs ty
attempt_unpack (HsUserBang Nothing bang) = bang -- Be conservative
attempt_unpack HsStrict = False
attempt_unpack HsNoBang = False
-\end{code}
+{-
Note [Unpack one-wide fields]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The flag UnboxSmallStrictFields ensures that any field that can
@@ -790,22 +787,21 @@ takes no space at all. This is easily done: just give it
an UNPACK pragma. The rest of the unpack/repack code does the
heavy lifting. This one line makes every GADT take a word less
space for each equality predicate, so it's pretty important!
+-}
-
-\begin{code}
mk_pred_strict_mark :: PredType -> HsBang
mk_pred_strict_mark pred
| isEqPred pred = HsUnpack Nothing -- Note [Unpack equality predicates]
| otherwise = HsNoBang
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Wrapping and unwrapping newtypes and type families
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
wrapNewTypeBody :: TyCon -> [Type] -> CoreExpr -> CoreExpr
-- The wrapper for the data constructor for a newtype looks like this:
-- newtype T a = MkT (a,Int)
@@ -878,16 +874,15 @@ unwrapTypeFamInstScrut axiom ind args scrut
unwrapTypeUnbranchedFamInstScrut :: CoAxiom Unbranched -> [Type] -> CoreExpr -> CoreExpr
unwrapTypeUnbranchedFamInstScrut axiom
= unwrapTypeFamInstScrut axiom 0
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Primitive operations}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
mkPrimOpId :: PrimOp -> Id
mkPrimOpId prim_op
= id
@@ -939,14 +934,13 @@ mkFCallId dflags uniq fcall ty
(arg_tys, _) = tcSplitFunTys tau
arity = length arg_tys
strict_sig = mkClosedStrictSig (replicate arity evalDmd) topRes
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{DictFuns and default methods}
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Dict funs and default methods]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -955,8 +949,8 @@ involves user-written code, so we can't figure out their strictness etc
based on fixed info, as we can for constructors and record selectors (say).
NB: See also Note [Exported LocalIds] in Id
+-}
-\begin{code}
mkDictFunId :: Name -- Name to use for the dict fun;
-> [TyVar]
-> ThetaType
@@ -989,14 +983,13 @@ mkDictFunTy tvs theta clas tys
-- See Note [Silent Superclass Arguments]
discard pred = any (`eqPred` pred) theta
-- See the DFun Superclass Invariant in TcInstDcls
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Un-definable}
-%* *
-%************************************************************************
+* *
+************************************************************************
These Ids can't be defined in Haskell. They could be defined in
unfoldings in the wired-in GHC.Prim interface file, but we'd have to
@@ -1012,8 +1005,8 @@ add it as a built-in Id with an unfolding here.
The type variables we use here are "open" type variables: this means
they can unify with both unlifted and lifted types. Hence we provide
another gun with which to shoot yourself in the foot.
+-}
-\begin{code}
lazyIdName, unsafeCoerceName, nullAddrName, seqName,
realWorldName, voidPrimIdName, coercionTokenName,
magicDictName, coerceName, proxyName, dollarName, oneShotName :: Name
@@ -1029,9 +1022,7 @@ coerceName = mkWiredInIdName gHC_PRIM (fsLit "coerce") coerceKey
proxyName = mkWiredInIdName gHC_PRIM (fsLit "proxy#") proxyHashKey proxyHashId
dollarName = mkWiredInIdName gHC_BASE (fsLit "$") dollarIdKey dollarId
oneShotName = mkWiredInIdName gHC_MAGIC (fsLit "oneShot") oneShotKey oneShotId
-\end{code}
-\begin{code}
dollarId :: Id -- Note [dollarId magic]
dollarId = pcMiscPrelId dollarName ty
(noCafIdInfo `setUnfoldingInfo` unf)
@@ -1155,8 +1146,8 @@ coerceId = pcMiscPrelId coerceName ty info
rhs = mkLams [alphaTyVar, betaTyVar, eqR, x] $
mkWildCase (Var eqR) eqRTy betaTy $
[(DataAlt coercibleDataCon, [eq], Cast (Var x) (CoVarCo eq))]
-\end{code}
+{-
Note [dollarId magic]
~~~~~~~~~~~~~~~~~~~~~
The only reason that ($) is wired in is so that its type can be
@@ -1351,9 +1342,8 @@ The evaldUnfolding makes it look that some primitive value is
evaluated, which in turn makes Simplify.interestingArg return True,
which in turn makes INLINE things applied to said value likely to be
inlined.
+-}
-
-\begin{code}
realWorldPrimId :: Id -- :: State# RealWorld
realWorldPrimId = pcMiscPrelId realWorldName realWorldStatePrimTy
(noCafIdInfo `setUnfoldingInfo` evaldUnfolding -- Note [evaldUnfoldings]
@@ -1371,10 +1361,7 @@ coercionTokenId -- Used to replace Coercion terms when we go to STG
= pcMiscPrelId coercionTokenName
(mkTyConApp eqPrimTyCon [liftedTypeKind, unitTy, unitTy])
noCafIdInfo
-\end{code}
-
-\begin{code}
pcMiscPrelId :: Name -> Type -> IdInfo -> Id
pcMiscPrelId name ty info
= mkVanillaGlobalWithInfo name ty info
@@ -1383,4 +1370,3 @@ pcMiscPrelId name ty info
-- random calls to GHCbase.unpackPS__. If GHCbase is the module
-- being compiled, then it's just a matter of luck if the definition
-- will be in "the right place" to be in scope.
-\end{code}
diff --git a/compiler/basicTypes/MkId.lhs-boot b/compiler/basicTypes/MkId.hs-boot
index d7adedb10e..69a694b1a2 100644
--- a/compiler/basicTypes/MkId.lhs-boot
+++ b/compiler/basicTypes/MkId.hs-boot
@@ -1,4 +1,3 @@
-\begin{code}
module MkId where
import Name( Name )
import Var( Id )
@@ -11,6 +10,3 @@ mkDataConWorkId :: Name -> DataCon -> Id
mkPrimOpId :: PrimOp -> Id
magicDictId :: Id
-\end{code}
-
-
diff --git a/compiler/basicTypes/Module.lhs b/compiler/basicTypes/Module.hs
index 120a11438b..ac5efd4a2c 100644
--- a/compiler/basicTypes/Module.lhs
+++ b/compiler/basicTypes/Module.hs
@@ -1,14 +1,14 @@
-%
-% (c) The University of Glasgow, 2004-2006
-%
+{-
+(c) The University of Glasgow, 2004-2006
+
Module
~~~~~~~~~~
Simply the name of a module, represented as a FastString.
These are Uniquable, hence we can build Maps with Modules as
the keys.
+-}
-\begin{code}
{-# LANGUAGE DeriveDataTypeable #-}
module Module
@@ -72,7 +72,7 @@ module Module
ModuleNameEnv,
-- * Sets of Modules
- ModuleSet, VisibleOrphanModules,
+ ModuleSet,
emptyModuleSet, mkModuleSet, moduleSetElts, extendModuleSet, elemModuleSet
) where
@@ -91,15 +91,15 @@ import Data.Map (Map)
import qualified Data.Map as Map
import qualified FiniteMap as Map
import System.FilePath
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Module locations}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Where a module lives on the file system: the actual locations
-- of the .hs, .hi and .o files, if we have them
data ModLocation
@@ -122,8 +122,8 @@ data ModLocation
instance Outputable ModLocation where
ppr = text . show
-\end{code}
+{-
For a module in another package, the hs_file and obj_file
components of ModLocation are undefined.
@@ -131,8 +131,8 @@ The locations specified by a ModLocation may or may not
correspond to actual files yet: for example, even if the object
file doesn't exist, the ModLocation still contains the path to
where the object file will reside if/when it is created.
+-}
-\begin{code}
addBootSuffix :: FilePath -> FilePath
-- ^ Add the @-boot@ suffix to .hs, .hi and .o files
addBootSuffix path = path ++ "-boot"
@@ -149,16 +149,15 @@ addBootSuffixLocn locn
= locn { ml_hs_file = fmap addBootSuffix (ml_hs_file locn)
, ml_hi_file = addBootSuffix (ml_hi_file locn)
, ml_obj_file = addBootSuffix (ml_obj_file locn) }
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{The name of a module}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A ModuleName is essentially a simple string, e.g. @Data.List@.
newtype ModuleName = ModuleName FastString
deriving Typeable
@@ -226,15 +225,15 @@ moduleNameSlashes = dots_to_slashes . moduleNameString
moduleNameColons :: ModuleName -> String
moduleNameColons = dots_to_colons . moduleNameString
where dots_to_colons = map (\c -> if c == '.' then ':' else c)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{A fully qualified module}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A Module is a pair of a 'PackageKey' and a 'ModuleName'.
data Module = Module {
modulePackageKey :: !PackageKey, -- pkg-1.0
@@ -291,15 +290,15 @@ class ContainsModule t where
class HasModule m where
getModule :: m Module
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{PackageKey}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A string which uniquely identifies a package. For wired-in packages,
-- it is just the package name, but for user compiled packages, it is a hash.
-- ToDo: when the key is a hash, we can do more clever things than store
@@ -411,15 +410,15 @@ wiredInPackageKeys = [ primPackageKey,
thisGhcPackageKey,
dphSeqPackageKey,
dphParPackageKey ]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{@ModuleEnv@s}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A map keyed off of 'Module's
newtype ModuleEnv elt = ModuleEnv (Map Module elt)
@@ -486,9 +485,7 @@ isEmptyModuleEnv (ModuleEnv e) = Map.null e
foldModuleEnv :: (a -> b -> b) -> b -> ModuleEnv a -> b
foldModuleEnv f x (ModuleEnv e) = Map.foldRightWithKey (\_ v -> f v) x e
-\end{code}
-\begin{code}
-- | A set of 'Module's
type ModuleSet = Map Module ()
@@ -503,18 +500,11 @@ mkModuleSet ms = Map.fromList [(m,()) | m <- ms ]
extendModuleSet s m = Map.insert m () s
moduleSetElts = Map.keys
elemModuleSet = Map.member
-\end{code}
+{-
A ModuleName has a Unique, so we can build mappings of these using
UniqFM.
+-}
-\begin{code}
-- | A map keyed off of 'ModuleName's (actually, their 'Unique's)
type ModuleNameEnv elt = UniqFM elt
-
--- | Set of visible orphan modules, according to what modules have been directly
--- imported. This is based off of the dep_orphs field, which records
--- transitively reachable orphan modules (modules that define orphan instances).
-type VisibleOrphanModules = ModuleSet
-\end{code}
-
diff --git a/compiler/basicTypes/Module.lhs-boot b/compiler/basicTypes/Module.hs-boot
index 6d194d6a2a..8a73d38256 100644
--- a/compiler/basicTypes/Module.lhs-boot
+++ b/compiler/basicTypes/Module.hs-boot
@@ -1,4 +1,3 @@
-\begin{code}
module Module where
data Module
@@ -7,4 +6,3 @@ data PackageKey
moduleName :: Module -> ModuleName
modulePackageKey :: Module -> PackageKey
packageKeyString :: PackageKey -> String
-\end{code}
diff --git a/compiler/basicTypes/Name.lhs b/compiler/basicTypes/Name.hs
index d7c18fcfce..ffdd1a14d8 100644
--- a/compiler/basicTypes/Name.lhs
+++ b/compiler/basicTypes/Name.hs
@@ -1,10 +1,10 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[Name]{@Name@: to transmit name info from renamer to typechecker}
+-}
-\begin{code}
{-# LANGUAGE DeriveDataTypeable #-}
-- |
@@ -89,15 +89,15 @@ import FastString
import Outputable
import Data.Data
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Name-datatype]{The @Name@ datatype, and name construction}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A unique, unambigious name for something, containing information about where
-- that thing originated.
data Name = Name {
@@ -129,15 +129,15 @@ data NameSort
-- which have special syntactic forms. They aren't in scope
-- as such.
data BuiltInSyntax = BuiltInSyntax | UserSyntax
-\end{code}
+{-
Notes about the NameSorts:
1. Initially, top-level Ids (including locally-defined ones) get External names,
and all other local Ids get Internal names
2. In any invocation of GHC, an External Name for "M.x" has one and only one
- unique. This unique association is ensured via the Name Cache;
+ unique. This unique association is ensured via the Name Cache;
see Note [The Name Cache] in IfaceEnv.
3. Things with a External name are given C static labels, so they finally
@@ -165,8 +165,8 @@ Wired-in thing => The thing (Id, TyCon) is fully known to the compiler,
E.g. Bool, True, Int, Float, and many others
All built-in syntax is for wired-in things.
+-}
-\begin{code}
instance HasOccName Name where
occName = nameOccName
@@ -180,15 +180,15 @@ nameUnique name = mkUniqueGrimily (iBox (n_uniq name))
nameOccName name = n_occ name
nameSrcLoc name = srcSpanStart (n_loc name)
nameSrcSpan name = n_loc name
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Predicates on names}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
nameIsLocalOrFrom :: Module -> Name -> Bool
isInternalName :: Name -> Bool
isExternalName :: Name -> Bool
@@ -239,16 +239,15 @@ isVarName = isVarOcc . nameOccName
isSystemName (Name {n_sort = System}) = True
isSystemName _ = False
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Making names}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Create a name which is (for now at least) local to the current module and hence
-- does not need a 'Module' to disambiguate it from other 'Name's
mkInternalName :: Unique -> OccName -> SrcSpan -> Name
@@ -309,9 +308,7 @@ mkSysTvName uniq fs = mkSystemName uniq (mkOccNameFS tvName fs)
mkFCallName :: Unique -> String -> Name
mkFCallName uniq str = mkInternalName uniq (mkVarOcc str) noSrcSpan
-- The encoded string completely describes the ccall
-\end{code}
-\begin{code}
-- When we renumber/rename things, we need to be
-- able to change a Name's Unique to match the cached
-- one in the thing it's the name of. If you know what I mean.
@@ -333,9 +330,7 @@ tidyNameOcc name occ = name { n_occ = occ }
-- | Make the 'Name' into an internal name, regardless of what it was to begin with
localiseName :: Name -> Name
localiseName n = n { n_sort = Internal }
-\end{code}
-\begin{code}
-- |Create a localised variant of a name.
--
-- If the name is external, encode the original's module name to disambiguate.
@@ -346,15 +341,14 @@ mkLocalisedOccName this_mod mk_occ name = mk_occ origin (nameOccName name)
origin
| nameIsLocalOrFrom this_mod name = Nothing
| otherwise = Just (moduleNameColons . moduleName . nameModule $ name)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Hashing and comparison}
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
cmpName :: Name -> Name -> Ordering
cmpName n1 n2 = iBox (n_uniq n1) `compare` iBox (n_uniq n2)
@@ -378,15 +372,15 @@ stableNameCmp (Name { n_sort = s1, n_occ = occ1 })
sort_cmp Internal System = LT
sort_cmp System System = EQ
sort_cmp System _ = GT
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Name-instances]{Instance declarations}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
instance Eq Name where
a == b = case (a `compare` b) of { EQ -> True; _ -> False }
a /= b = case (a `compare` b) of { EQ -> False; _ -> True }
@@ -409,15 +403,15 @@ instance Data Name where
toConstr _ = abstractConstr "Name"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "Name"
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Binary}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
instance Binary Name where
put_ bh name =
case getUserData bh of
@@ -426,15 +420,15 @@ instance Binary Name where
get bh =
case getUserData bh of
UserData { ud_get_name = get_name } -> get_name bh
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Pretty printing}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
instance Outputable Name where
ppr name = pprName name
@@ -546,24 +540,22 @@ pprNameDefnLoc name
-> ptext (sLit "at") <+> ftext s
| otherwise
-> ptext (sLit "in") <+> quotes (ppr (nameModule name))
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Overloaded functions related to Names}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A class allowing convenient access to the 'Name' of various datatypes
class NamedThing a where
getOccName :: a -> OccName
getName :: a -> Name
getOccName n = nameOccName (getName n) -- Default method
-\end{code}
-\begin{code}
getSrcLoc :: NamedThing a => a -> SrcLoc
getSrcSpan :: NamedThing a => a -> SrcSpan
getOccString :: NamedThing a => a -> String
@@ -577,15 +569,15 @@ pprInfixName, pprPrefixName :: (Outputable a, NamedThing a) => a -> SDoc
-- add parens or back-quotes as appropriate
pprInfixName n = pprInfixVar (isSymOcc (getOccName n)) (ppr n)
-pprPrefixName thing
- | name `hasKey` liftedTypeKindTyConKey
+pprPrefixName thing
+ | name `hasKey` liftedTypeKindTyConKey
= ppr name -- See Note [Special treatment for kind *]
| otherwise
= pprPrefixVar (isSymOcc (nameOccName name)) (ppr name)
where
name = getName thing
-\end{code}
+{-
Note [Special treatment for kind *]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Do not put parens around the kind '*'. Even though it looks like
@@ -597,4 +589,4 @@ the overloaded function pprPrefixOcc. It's easier where we know the
type being pretty printed; eg the pretty-printing code in TypeRep.
See Trac #7645, which led to this.
-
+-}
diff --git a/compiler/basicTypes/Name.lhs-boot b/compiler/basicTypes/Name.hs-boot
index 27b71d944f..313db26e5c 100644
--- a/compiler/basicTypes/Name.lhs-boot
+++ b/compiler/basicTypes/Name.hs-boot
@@ -1,4 +1,3 @@
-\begin{code}
module Name where
import {-# SOURCE #-} Module
@@ -6,4 +5,3 @@ import {-# SOURCE #-} Module
data Name
nameModule :: Name -> Module
-\end{code}
diff --git a/compiler/basicTypes/NameEnv.lhs b/compiler/basicTypes/NameEnv.hs
index f86e174f98..9018bc44f9 100644
--- a/compiler/basicTypes/NameEnv.lhs
+++ b/compiler/basicTypes/NameEnv.hs
@@ -1,10 +1,10 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[NameEnv]{@NameEnv@: name environments}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module NameEnv (
-- * Var, Id and TyVar environments (maps)
@@ -31,15 +31,15 @@ import Name
import Unique
import UniqFM
import Maybes
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Name environment}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
depAnal :: (node -> [Name]) -- Defs
-> (node -> [Name]) -- Uses
-> [node]
@@ -56,16 +56,15 @@ depAnal get_defs get_uses nodes
key_map :: NameEnv Int -- Maps a Name to the key of the decl that defines it
key_map = mkNameEnv [(name,key) | (node, key) <- keyed_nodes, name <- get_defs node]
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Name environment}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type NameEnv a = UniqFM a -- Domain is Name
emptyNameEnv :: NameEnv a
@@ -116,4 +115,3 @@ anyNameEnv f x = foldUFM ((||) . f) False x
disjointNameEnv x y = isNullUFM (intersectUFM x y)
lookupNameEnv_NF env n = expectJust "lookupNameEnv_NF" (lookupNameEnv env n)
-\end{code}
diff --git a/compiler/basicTypes/NameSet.lhs b/compiler/basicTypes/NameSet.hs
index 0710dfa5ff..7bca4798e2 100644
--- a/compiler/basicTypes/NameSet.lhs
+++ b/compiler/basicTypes/NameSet.hs
@@ -1,9 +1,8 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1998
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module NameSet (
-- * Names set type
@@ -34,15 +33,15 @@ module NameSet (
import Name
import UniqSet
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Sets of names}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type NameSet = UniqSet Name
emptyNameSet :: NameSet
@@ -84,18 +83,17 @@ intersectNameSet = intersectUniqSets
delListFromNameSet set ns = foldl delFromNameSet set ns
intersectsNameSet s1 s2 = not (isEmptyNameSet (s1 `intersectNameSet` s2))
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Free variables}
-%* *
-%************************************************************************
+* *
+************************************************************************
These synonyms are useful when we are thinking of free variables
+-}
-\begin{code}
type FreeVars = NameSet
plusFV :: FreeVars -> FreeVars -> FreeVars
@@ -117,16 +115,15 @@ addOneFV = extendNameSet
unitFV = unitNameSet
delFV n s = delFromNameSet s n
delFVs ns s = delListFromNameSet s ns
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Defs and uses
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A set of names that are defined somewhere
type Defs = NameSet
@@ -196,4 +193,3 @@ findUses dus uses
= rhs_uses `unionNameSet` uses
| otherwise -- No def is used
= uses
-\end{code}
diff --git a/compiler/basicTypes/OccName.lhs b/compiler/basicTypes/OccName.hs
index fdc7c95918..b7da021d1c 100644
--- a/compiler/basicTypes/OccName.lhs
+++ b/compiler/basicTypes/OccName.hs
@@ -1,9 +1,8 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+-}
-\begin{code}
{-# LANGUAGE DeriveDataTypeable #-}
-- |
@@ -109,17 +108,17 @@ import Lexeme
import Binary
import Data.Char
import Data.Data
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
FastStringEnv
-%* *
-%************************************************************************
+* *
+************************************************************************
FastStringEnv can't be in FastString because the env depends on UniqFM
+-}
-\begin{code}
type FastStringEnv a = UniqFM a -- Keyed by FastString
@@ -132,15 +131,15 @@ emptyFsEnv = emptyUFM
lookupFsEnv = lookupUFM
extendFsEnv = addToUFM
mkFsEnv = listToUFM
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Name space}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data NameSpace = VarName -- Variables, including "real" data constructors
| DataName -- "Source" data constructors
| TvName -- Type variables
@@ -231,25 +230,21 @@ demoteNameSpace VarName = Nothing
demoteNameSpace DataName = Nothing
demoteNameSpace TvName = Nothing
demoteNameSpace TcClsName = Just DataName
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Name-pieces-datatypes]{The @OccName@ datatypes}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data OccName = OccName
{ occNameSpace :: !NameSpace
, occNameFS :: !FastString
}
deriving Typeable
-\end{code}
-
-\begin{code}
instance Eq OccName where
(OccName sp1 s1) == (OccName sp2 s2) = s1 == s2 && sp1 == sp2
@@ -266,16 +261,15 @@ instance Data OccName where
instance HasOccName OccName where
occName = id
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Printing}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
instance Outputable OccName where
ppr = pprOccName
@@ -303,21 +297,21 @@ pprOccName (OccName sp occ)
strip_th_unique ('[' : c : _) | isAlphaNum c = []
strip_th_unique (c : cs) = c : strip_th_unique cs
strip_th_unique [] = []
-\end{code}
+{-
Note [Suppressing uniques in OccNames]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This is a hack to de-wobblify the OccNames that contain uniques from
Template Haskell that have been turned into a string in the OccName.
See Note [Unique OccNames from Template Haskell] in Convert.hs
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Construction}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
mkOccName :: NameSpace -> String -> OccName
mkOccName occ_sp str = OccName occ_sp (mkFastString str)
@@ -378,14 +372,13 @@ otherNameSpace TcClsName = TvName
This class provides a consistent way to access the underlying OccName. -}
class HasOccName name where
occName :: name -> OccName
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Environments
-%* *
-%************************************************************************
+* *
+************************************************************************
OccEnvs are used mainly for the envts in ModIfaces.
@@ -399,8 +392,8 @@ So we can make a Unique using
mkUnique ns key :: Unique
where 'ns' is a Char representing the name space. This in turn makes it
easy to build an OccEnv.
+-}
-\begin{code}
instance Uniquable OccName where
-- See Note [The Unique of an OccName]
getUnique (OccName VarName fs) = mkVarOccUnique fs
@@ -487,16 +480,15 @@ foldOccSet = foldUniqSet
isEmptyOccSet = isEmptyUniqSet
intersectOccSet = intersectUniqSets
intersectsOccSet s1 s2 = not (isEmptyOccSet (s1 `intersectOccSet` s2))
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Predicates and taking them apart}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
occNameString :: OccName -> String
occNameString (OccName _ s) = unpackFS s
@@ -544,24 +536,20 @@ parenSymOcc :: OccName -> SDoc -> SDoc
-- ^ Wrap parens around an operator
parenSymOcc occ doc | isSymOcc occ = parens doc
| otherwise = doc
-\end{code}
-
-\begin{code}
startsWithUnderscore :: OccName -> Bool
-- ^ Haskell 98 encourages compilers to suppress warnings about unsed
-- names in a pattern if they start with @_@: this implements that test
startsWithUnderscore occ = case occNameString occ of
('_' : _) -> True
_other -> False
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Making system names}
-%* *
-%************************************************************************
+* *
+************************************************************************
Here's our convention for splitting up the interface file name space:
@@ -591,8 +579,8 @@ This knowledge is encoded in the following functions.
@mk_deriv@ generates an @OccName@ from the prefix and a string.
NB: The string must already be encoded!
+-}
-\begin{code}
mk_deriv :: NameSpace
-> String -- Distinguishes one sort of derived name from another
-> String
@@ -606,9 +594,7 @@ isDerivedOccName occ =
'$':c:_ | isAlphaNum c -> True
':':c:_ | isAlphaNum c -> True
_other -> False
-\end{code}
-\begin{code}
mkDataConWrapperOcc, mkWorkerOcc, mkMatcherOcc, mkDefaultMethodOcc,
mkGenDefMethodOcc, mkDerivedTyConOcc, mkClassDataConOcc, mkDictOcc,
mkIPOcc, mkSpecOcc, mkForeignExportOcc, mkRepEqOcc, mkGenOcc1, mkGenOcc2,
@@ -694,9 +680,7 @@ mk_simple_deriv_with sp px (Just with) occ = mk_deriv sp (px ++ with ++ "_") (oc
-- of the data constructor OccName (which should be a DataName)
-- to VarName
mkDataConWorkerOcc datacon_occ = setOccNameSpace varName datacon_occ
-\end{code}
-\begin{code}
mkSuperDictSelOcc :: Int -- ^ Index of superclass, e.g. 3
-> OccName -- ^ Class, e.g. @Ord@
-> OccName -- ^ Derived 'Occname', e.g. @$p3Ord@
@@ -710,9 +694,7 @@ mkLocalOcc uniq occ
= mk_deriv varName ("$L" ++ show uniq) (occNameString occ)
-- The Unique might print with characters
-- that need encoding (e.g. 'z'!)
-\end{code}
-\begin{code}
-- | Derive a name for the representation type constructor of a
-- @data@\/@newtype@ instance.
mkInstTyTcOcc :: String -- ^ Family name, e.g. @Map@
@@ -720,9 +702,7 @@ mkInstTyTcOcc :: String -- ^ Family name, e.g. @Map@
-> OccName -- ^ @R:Map@
mkInstTyTcOcc str set =
chooseUniqueOcc tcName ('R' : ':' : str) set
-\end{code}
-\begin{code}
mkDFunOcc :: String -- ^ Typically the class and type glommed together e.g. @OrdMaybe@.
-- Only used in debug mode, for extra clarity
-> Bool -- ^ Is this a hs-boot instance DFun?
@@ -738,20 +718,20 @@ mkDFunOcc info_str is_boot set
where
prefix | is_boot = "$fx"
| otherwise = "$f"
-\end{code}
+{-
Sometimes we need to pick an OccName that has not already been used,
given a set of in-use OccNames.
+-}
-\begin{code}
chooseUniqueOcc :: NameSpace -> String -> OccSet -> OccName
chooseUniqueOcc ns str set = loop (mkOccName ns str) (0::Int)
where
loop occ n
| occ `elemOccSet` set = loop (mkOccName ns (str ++ show n)) (n+1)
| otherwise = occ
-\end{code}
+{-
We used to add a '$m' to indicate a method, but that gives rise to bad
error messages from the type checker when we print the function name or pattern
of an instance-decl binding. Why? Because the binding is zapped
@@ -770,19 +750,18 @@ e.g. a call to constructor MkFoo where
If this is necessary, we do it by prefixing '$m'. These
guys never show up in error messages. What a hack.
+-}
-\begin{code}
mkMethodOcc :: OccName -> OccName
mkMethodOcc occ@(OccName VarName _) = occ
mkMethodOcc occ = mk_simple_deriv varName "$m" occ
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Tidying them up}
-%* *
-%************************************************************************
+* *
+************************************************************************
Before we print chunks of code we like to rename it so that
we don't have to print lots of silly uniques in it. But we mustn't
@@ -809,8 +788,8 @@ type TidyOccEnv = UniqFM Int
* When looking for a renaming for "foo2" we strip off the "2" and start
with "foo". Otherwise if we tidy twice we get silly names like foo23.
+-}
-\begin{code}
type TidyOccEnv = UniqFM Int -- The in-scope OccNames
-- See Note [TidyOccEnv]
@@ -843,16 +822,16 @@ tidyOccName env occ@(OccName occ_sp fs)
where
n1 = n+1
new_fs = mkFastString (base ++ show n)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Binary instance
Here rather than BinIface because OccName is abstract
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
instance Binary NameSpace where
put_ bh VarName = do
putByte bh 0
@@ -878,4 +857,3 @@ instance Binary OccName where
aa <- get bh
ab <- get bh
return (OccName aa ab)
-\end{code}
diff --git a/compiler/basicTypes/OccName.lhs-boot b/compiler/basicTypes/OccName.hs-boot
index d9c7fcd141..c6fa8850cf 100644
--- a/compiler/basicTypes/OccName.lhs-boot
+++ b/compiler/basicTypes/OccName.hs-boot
@@ -1,5 +1,3 @@
-\begin{code}
module OccName where
data OccName
-\end{code}
diff --git a/compiler/basicTypes/PatSyn.lhs b/compiler/basicTypes/PatSyn.hs
index 9fc4f98c8c..f2cef7bbe5 100644
--- a/compiler/basicTypes/PatSyn.lhs
+++ b/compiler/basicTypes/PatSyn.hs
@@ -1,10 +1,10 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1998
+
\section[PatSyn]{@PatSyn@: Pattern synonyms}
+-}
-\begin{code}
{-# LANGUAGE CPP, DeriveDataTypeable #-}
module PatSyn (
@@ -36,16 +36,15 @@ import HsBinds( HsPatSynDetails(..) )
import qualified Data.Data as Data
import qualified Data.Typeable
import Data.Function
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Pattern synonyms}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A pattern synonym
-- See Note [Pattern synonym representation]
data PatSyn
@@ -90,8 +89,8 @@ data PatSyn
-- See Note [Builder for pattern synonyms with unboxed type]
}
deriving Data.Typeable.Typeable
-\end{code}
+{-
Note [Pattern synonym representation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider the following pattern synonym declaration
@@ -143,7 +142,7 @@ For the above example, the matcher function has type:
with the following implementation:
- $mP @r @t $dEq $dNum scrut cont fail
+ $mP @r @t $dEq $dNum scrut cont fail
= case scrut of
MkT @b $dShow $dOrd [x] (Just 42) -> cont @b $dShow $dOrd x
_ -> fail Void#
@@ -153,7 +152,7 @@ be instantiated by an unboxed type; for example where we see
f (P x) = 3#
The extra Void# argument for the failure continuation is needed so that
-it is lazy even when the result type is unboxed.
+it is lazy even when the result type is unboxed.
For the same reason, if the pattern has no arguments, an extra Void#
argument is added to the success continuation as well.
@@ -190,13 +189,13 @@ we must remember that the builder has this void argument. This is
done by TcPatSyn.patSynBuilderOcc.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Instances}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
instance Eq PatSyn where
(==) = (==) `on` getUnique
(/=) = (/=) `on` getUnique
@@ -226,16 +225,15 @@ instance Data.Data PatSyn where
toConstr _ = abstractConstr "PatSyn"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "PatSyn"
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Construction}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Build a new pattern synonym
mkPatSyn :: Name
-> Bool -- ^ Is the pattern synonym declared infix?
@@ -263,9 +261,7 @@ mkPatSyn name declared_infix
psOrigResTy = orig_res_ty,
psMatcher = matcher,
psBuilder = builder }
-\end{code}
-\begin{code}
-- | The 'Name' of the 'PatSyn', giving it a unique, rooted identification
patSynName :: PatSyn -> Name
patSynName = psName
@@ -347,4 +343,3 @@ patSynInstResTy (MkPatSyn { psName = name, psUnivTyVars = univ_tvs
= ASSERT2( length univ_tvs == length inst_tys
, ptext (sLit "patSynInstResTy") <+> ppr name $$ ppr univ_tvs $$ ppr inst_tys )
substTyWith univ_tvs inst_tys res_ty
-\end{code}
diff --git a/compiler/basicTypes/PatSyn.lhs-boot b/compiler/basicTypes/PatSyn.hs-boot
index 0bb85e9413..733c51b355 100644
--- a/compiler/basicTypes/PatSyn.lhs-boot
+++ b/compiler/basicTypes/PatSyn.hs-boot
@@ -1,4 +1,3 @@
-\begin{code}
module PatSyn where
import Name( NamedThing )
import Data.Typeable ( Typeable )
@@ -16,4 +15,3 @@ instance OutputableBndr PatSyn
instance Uniquable PatSyn
instance Typeable PatSyn
instance Data PatSyn
-\end{code}
diff --git a/compiler/basicTypes/RdrName.lhs b/compiler/basicTypes/RdrName.hs
index 22893f341e..71135d05d1 100644
--- a/compiler/basicTypes/RdrName.lhs
+++ b/compiler/basicTypes/RdrName.hs
@@ -1,9 +1,8 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+-}
-\begin{code}
{-# LANGUAGE CPP, DeriveDataTypeable #-}
-- |
@@ -45,7 +44,7 @@ module RdrName (
-- * Global mapping of 'RdrName' to 'GlobalRdrElt's
GlobalRdrEnv, emptyGlobalRdrEnv, mkGlobalRdrEnv, plusGlobalRdrEnv,
- lookupGlobalRdrEnv, extendGlobalRdrEnv,
+ lookupGlobalRdrEnv, extendGlobalRdrEnv,
pprGlobalRdrEnv, globalRdrEnvElts,
lookupGRE_RdrName, lookupGRE_Name, getGRE_NameQualifier_maybes,
transformGREs, findLocalDupsRdrEnv, pickGREs,
@@ -76,15 +75,15 @@ import Util
import StaticFlags( opt_PprStyle_Debug )
import Data.Data
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{The main data type}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Do not use the data constructors of RdrName directly: prefer the family
-- of functions that creates them, such as 'mkRdrUnqual'
data RdrName
@@ -117,16 +116,14 @@ data RdrName
--
-- Such a 'RdrName' can be created by using 'getRdrName' on a 'Name'
deriving (Data, Typeable)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Simple functions}
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
instance HasOccName RdrName where
occName = rdrNameOcc
@@ -173,9 +170,7 @@ demoteRdrName (Unqual occ) = fmap Unqual (demoteOccName occ)
demoteRdrName (Qual m occ) = fmap (Qual m) (demoteOccName occ)
demoteRdrName (Orig _ _) = panic "demoteRdrName"
demoteRdrName (Exact _) = panic "demoteRdrName"
-\end{code}
-\begin{code}
-- These two are the basic constructors
mkRdrUnqual :: OccName -> RdrName
mkRdrUnqual occ = Unqual occ
@@ -213,9 +208,7 @@ nukeExact :: Name -> RdrName
nukeExact n
| isExternalName n = Orig (nameModule n) (nameOccName n)
| otherwise = Unqual (nameOccName n)
-\end{code}
-\begin{code}
isRdrDataCon :: RdrName -> Bool
isRdrTyVar :: RdrName -> Bool
isRdrTc :: RdrName -> Bool
@@ -256,16 +249,15 @@ isExact _ = False
isExact_maybe :: RdrName -> Maybe Name
isExact_maybe (Exact n) = Just n
isExact_maybe _ = Nothing
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Instances}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
instance Outputable RdrName where
ppr (Exact name) = ppr name
ppr (Unqual occ) = ppr occ
@@ -323,15 +315,15 @@ instance Ord RdrName where
compare (Orig m1 o1) (Orig m2 o2) = (o1 `compare` o2) `thenCmp` (m1 `compare` m2)
compare (Orig _ _) _ = GT
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
LocalRdrEnv
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | This environment is used to store local bindings (@let@, @where@, lambda, @case@).
-- It is keyed by OccName, because we never use it for qualified names
-- We keep the current mapping, *and* the set of all Names in scope
@@ -388,11 +380,11 @@ inLocalRdrEnvScope :: Name -> LocalRdrEnv -> Bool
inLocalRdrEnvScope name (LRE { lre_in_scope = ns }) = name `elemNameSet` ns
delLocalRdrEnvList :: LocalRdrEnv -> [OccName] -> LocalRdrEnv
-delLocalRdrEnvList (LRE { lre_env = env, lre_in_scope = ns }) occs
+delLocalRdrEnvList (LRE { lre_env = env, lre_in_scope = ns }) occs
= LRE { lre_env = delListFromOccEnv env occs
, lre_in_scope = ns }
-\end{code}
+{-
Note [Local bindings with Exact Names]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
With Template Haskell we can make local bindings that have Exact Names.
@@ -401,13 +393,13 @@ does so in RnTpes.bindHsTyVars), so for an Exact Name we must consult
the in-scope-name-set.
-%************************************************************************
-%* *
+************************************************************************
+* *
GlobalRdrEnv
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type GlobalRdrEnv = OccEnv [GlobalRdrElt]
-- ^ Keyed by 'OccName'; when looking up a qualified name
-- we look up the 'OccName' part, and then check the 'Provenance'
@@ -455,8 +447,8 @@ hasParent n (ParentIs n')
| n /= n' = pprPanic "hasParent" (ppr n <+> ppr n') -- Parents should agree
#endif
hasParent n _ = ParentIs n
-\end{code}
+{-
Note [Parents]
~~~~~~~~~~~~~~~~~
Parent Children
@@ -496,9 +488,8 @@ those. For T that will mean we have
one GRE with Parent C
one GRE with NoParent
That's why plusParent picks the "best" case.
+-}
-
-\begin{code}
-- | make a 'GlobalRdrEnv' where all the elements point to the same
-- Provenance (useful for "hiding" imports, or imports with
-- no details).
@@ -531,9 +522,9 @@ instance Outputable GlobalRdrElt where
pprGlobalRdrEnv :: Bool -> GlobalRdrEnv -> SDoc
pprGlobalRdrEnv locals_only env
- = vcat [ ptext (sLit "GlobalRdrEnv") <+> ppWhen locals_only (ptext (sLit "(locals only)"))
+ = vcat [ ptext (sLit "GlobalRdrEnv") <+> ppWhen locals_only (ptext (sLit "(locals only)"))
<+> lbrace
- , nest 2 (vcat [ pp (remove_locals gre_list) | gre_list <- occEnvElts env ]
+ , nest 2 (vcat [ pp (remove_locals gre_list) | gre_list <- occEnvElts env ]
<+> rbrace) ]
where
remove_locals gres | locals_only = filter isLocalGRE gres
@@ -642,11 +633,9 @@ pickGREs rdr_name gres
= filter ((== mod) . is_as . is_decl) is
| otherwise
= []
-\end{code}
-Building GlobalRdrEnvs
+-- Building GlobalRdrEnvs
-\begin{code}
plusGlobalRdrEnv :: GlobalRdrEnv -> GlobalRdrEnv -> GlobalRdrEnv
plusGlobalRdrEnv env1 env2 = plusOccEnv_C (foldr insertGRE) env1 env2
@@ -755,16 +744,16 @@ shadow_name env name
= Nothing -- Shadow both qualified and unqualified
| otherwise -- Shadow unqualified only
= Just (is { is_decl = id_spec { is_qual = True } })
-\end{code}
+{-
Note [Template Haskell binders in the GlobalRdrEnv]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For reasons described in Note [Top-level Names in Template Haskell decl quotes]
in RnNames, a GRE with an Internal gre_name (i.e. one generated by a TH decl
quote) should *shadow* a GRE with an External gre_name. Hence some faffing
around in pickGREs and findLocalDupsRdrEnv
+-}
-\begin{code}
findLocalDupsRdrEnv :: GlobalRdrEnv -> [Name] -> [[GlobalRdrElt]]
-- ^ For each 'OccName', see if there are multiple local definitions
-- for it; return a list of all such
@@ -791,15 +780,15 @@ findLocalDupsRdrEnv rdr_env occs
| isInternalName name = isInternalName n
| otherwise = True
pick _ _ = False
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Provenance
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | The 'Provenance' of something says how it came to be in scope.
-- It's quite elaborate so that we can give accurate unused-name warnings.
data Provenance
@@ -890,9 +879,7 @@ instance Ord ImpDeclSpec where
instance Ord ImpItemSpec where
compare is1 is2 = is_iloc is1 `compare` is_iloc is2
-\end{code}
-\begin{code}
plusProv :: Provenance -> Provenance -> Provenance
-- Choose LocalDef over Imported
-- There is an obscure bug lurking here; in the presence
@@ -946,4 +933,3 @@ instance Outputable ImportSpec where
pprLoc :: SrcSpan -> SDoc
pprLoc (RealSrcSpan s) = ptext (sLit "at") <+> ppr s
pprLoc (UnhelpfulSpan {}) = empty
-\end{code}
diff --git a/compiler/basicTypes/SrcLoc.lhs b/compiler/basicTypes/SrcLoc.hs
index c7e1fbea9f..8e17561651 100644
--- a/compiler/basicTypes/SrcLoc.lhs
+++ b/compiler/basicTypes/SrcLoc.hs
@@ -1,8 +1,5 @@
-%
-% (c) The University of Glasgow, 1992-2006
-%
+-- (c) The University of Glasgow, 1992-2006
-\begin{code}
{-# LANGUAGE DeriveDataTypeable #-}
{-# OPTIONS_GHC -fno-omit-interface-pragmas #-}
-- Workaround for Trac #5252 crashes the bootstrap compiler without -O
@@ -83,17 +80,18 @@ import Data.Bits
import Data.Data
import Data.List
import Data.Ord
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[SrcLoc-SrcLocations]{Source-location information}
-%* *
-%************************************************************************
+* *
+************************************************************************
We keep information about the {\em definition} point for each entity;
this is the obvious stuff:
-\begin{code}
+-}
+
-- | Represents a single point within a file
data RealSrcLoc
= SrcLoc FastString -- A precise location (file name)
@@ -104,15 +102,15 @@ data SrcLoc
= RealSrcLoc {-# UNPACK #-}!RealSrcLoc
| UnhelpfulLoc FastString -- Just a general indication
deriving Show
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[SrcLoc-access-fns]{Access functions}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
mkSrcLoc :: FastString -> Int -> Int -> SrcLoc
mkSrcLoc x line col = RealSrcLoc (mkRealSrcLoc x line col)
@@ -149,15 +147,15 @@ advanceSrcLoc (SrcLoc f l _) '\n' = SrcLoc f (l + 1) 1
advanceSrcLoc (SrcLoc f l c) '\t' = SrcLoc f l (((((c - 1) `shiftR` 3) + 1)
`shiftL` 3) + 1)
advanceSrcLoc (SrcLoc f l c) _ = SrcLoc f l (c + 1)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[SrcLoc-instances]{Instance declarations for various names}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- SrcLoc is an instance of Ord so that we can sort error messages easily
instance Eq SrcLoc where
loc1 == loc2 = case loc1 `cmpSrcLoc` loc2 of
@@ -219,15 +217,15 @@ instance Data SrcSpan where
toConstr _ = abstractConstr "SrcSpan"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "SrcSpan"
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[SrcSpan]{Source Spans}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
{- |
A SrcSpan delimits a portion of a text file. It could be represented
by a pair of (line,column) coordinates, but in fact we optimise
@@ -330,15 +328,15 @@ combineRealSrcSpans span1 span2
(line_end, col_end) = max (srcSpanEndLine span1, srcSpanEndCol span1)
(srcSpanEndLine span2, srcSpanEndCol span2)
file = srcSpanFile span1
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[SrcSpan-predicates]{Predicates}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Test if a 'SrcSpan' is "good", i.e. has precise location information
isGoodSrcSpan :: SrcSpan -> Bool
isGoodSrcSpan (RealSrcSpan _) = True
@@ -350,15 +348,13 @@ isOneLineSpan :: SrcSpan -> Bool
isOneLineSpan (RealSrcSpan s) = srcSpanStartLine s == srcSpanEndLine s
isOneLineSpan (UnhelpfulSpan _) = False
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[SrcSpan-unsafe-access-fns]{Unsafe access functions}
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
srcSpanStartLine :: RealSrcSpan -> Int
srcSpanEndLine :: RealSrcSpan -> Int
@@ -381,15 +377,13 @@ srcSpanEndCol SrcSpanOneLine{ srcSpanECol=c } = c
srcSpanEndCol SrcSpanMultiLine{ srcSpanECol=c } = c
srcSpanEndCol SrcSpanPoint{ srcSpanCol=c } = c
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[SrcSpan-access-fns]{Access functions}
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
-- | Returns the location at the start of the 'SrcSpan' or a "bad" 'SrcSpan' if that is unavailable
srcSpanStart :: SrcSpan -> SrcLoc
@@ -416,15 +410,13 @@ srcSpanFileName_maybe :: SrcSpan -> Maybe FastString
srcSpanFileName_maybe (RealSrcSpan s) = Just (srcSpanFile s)
srcSpanFileName_maybe (UnhelpfulSpan _) = Nothing
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[SrcSpan-instances]{Instances}
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
-- We want to order SrcSpans first by the start point, then by the end point.
instance Ord SrcSpan where
@@ -499,15 +491,15 @@ pprUserRealSpan show_path (SrcSpanPoint src_path line col)
= hcat [ ppWhen show_path (pprFastFilePath src_path <> colon)
, int line <> colon
, int col ]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Located]{Attaching SrcSpans to things}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | We attach SrcSpans to lots of things, so let's have a datatype for it.
data GenLocated l e = L l e
deriving (Eq, Ord, Typeable, Data)
@@ -556,15 +548,15 @@ instance (Outputable l, Outputable e) => Outputable (GenLocated l e) where
-- ifPprDebug (braces (pprUserSpan False l))
ifPprDebug (braces (ppr l))
$$ ppr e
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Ordering SrcSpans for InteractiveUI}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Alternative strategies for ordering 'SrcSpan's
leftmost_smallest, leftmost_largest, rightmost :: SrcSpan -> SrcSpan -> Ordering
rightmost = flip compare
@@ -587,5 +579,3 @@ isSubspanOf src parent
| srcSpanFileName_maybe parent /= srcSpanFileName_maybe src = False
| otherwise = srcSpanStart parent <= srcSpanStart src &&
srcSpanEnd parent >= srcSpanEnd src
-
-\end{code}
diff --git a/compiler/basicTypes/UniqSupply.lhs b/compiler/basicTypes/UniqSupply.hs
index d1a1efd298..3d0573dba0 100644
--- a/compiler/basicTypes/UniqSupply.lhs
+++ b/compiler/basicTypes/UniqSupply.hs
@@ -1,9 +1,8 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+-}
-\begin{code}
{-# LANGUAGE UnboxedTuples #-}
module UniqSupply (
@@ -33,15 +32,14 @@ import GHC.IO
import MonadUtils
import Control.Monad
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Splittable Unique supply: @UniqSupply@}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A value of type 'UniqSupply' is unique, and it can
-- supply /one/ distinct 'Unique'. Also, from the supply, one can
-- also manufacture an arbitrary number of further 'UniqueSupply' values,
@@ -50,9 +48,7 @@ data UniqSupply
= MkSplitUniqSupply FastInt -- make the Unique with this
UniqSupply UniqSupply
-- when split => these two supplies
-\end{code}
-\begin{code}
mkSplitUniqSupply :: Char -> IO UniqSupply
-- ^ Create a unique supply out of thin air. The character given must
-- be distinct from those of all calls to this function in the compiler
@@ -69,9 +65,7 @@ uniqsFromSupply :: UniqSupply -> [Unique] -- Infinite
-- ^ Obtain an infinite list of 'Unique' that can be generated by constant splitting of the supply
takeUniqFromSupply :: UniqSupply -> (Unique, UniqSupply)
-- ^ Obtain the 'Unique' from this particular 'UniqSupply', and a new supply
-\end{code}
-\begin{code}
mkSplitUniqSupply c
= case fastOrd (cUnbox c) `shiftLFastInt` _ILIT(24) of
mask -> let
@@ -93,21 +87,19 @@ foreign import ccall unsafe "genSym" genSym :: IO Int
splitUniqSupply (MkSplitUniqSupply _ s1 s2) = (s1, s2)
listSplitUniqSupply (MkSplitUniqSupply _ s1 s2) = s1 : listSplitUniqSupply s2
-\end{code}
-\begin{code}
uniqFromSupply (MkSplitUniqSupply n _ _) = mkUniqueGrimily (iBox n)
uniqsFromSupply (MkSplitUniqSupply n _ s2) = mkUniqueGrimily (iBox n) : uniqsFromSupply s2
takeUniqFromSupply (MkSplitUniqSupply n s1 _) = (mkUniqueGrimily (iBox n), s1)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection[UniqSupply-monad]{@UniqSupply@ monad: @UniqSM@}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A monad which just gives the ability to obtain 'Unique's
newtype UniqSM result = USM { unUSM :: UniqSupply -> (# result, UniqSupply #) }
@@ -139,10 +131,9 @@ initUs_ init_us m = case unUSM m init_us of { (# r, _ #) -> r }
{-# INLINE lazyThenUs #-}
{-# INLINE returnUs #-}
{-# INLINE splitUniqSupply #-}
-\end{code}
-@thenUs@ is where we split the @UniqSupply@.
-\begin{code}
+-- @thenUs@ is where we split the @UniqSupply@.
+
liftUSM :: UniqSM a -> UniqSupply -> (a, UniqSupply)
liftUSM (USM m) us = case m us of (# a, us' #) -> (a, us')
@@ -196,9 +187,7 @@ getUniqueUs = USM (\us -> case takeUniqFromSupply us of
getUniquesUs :: UniqSM [Unique]
getUniquesUs = USM (\us -> case splitUniqSupply us of
(us1,us2) -> (# uniqsFromSupply us1, us2 #))
-\end{code}
-\begin{code}
-- {-# SPECIALIZE mapM :: (a -> UniqSM b) -> [a] -> UniqSM [b] #-}
-- {-# SPECIALIZE mapAndUnzipM :: (a -> UniqSM (b,c)) -> [a] -> UniqSM ([b],[c]) #-}
-- {-# SPECIALIZE mapAndUnzip3M :: (a -> UniqSM (b,c,d)) -> [a] -> UniqSM ([b],[c],[d]) #-}
@@ -209,4 +198,3 @@ lazyMapUs f (x:xs)
= f x `lazyThenUs` \ r ->
lazyMapUs f xs `lazyThenUs` \ rs ->
returnUs (r:rs)
-\end{code}
diff --git a/compiler/basicTypes/Unique.lhs b/compiler/basicTypes/Unique.hs
index 8191db6ffd..ecff80fec8 100644
--- a/compiler/basicTypes/Unique.lhs
+++ b/compiler/basicTypes/Unique.hs
@@ -1,7 +1,7 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
@Uniques@ are used to distinguish entities in the compiler (@Ids@,
@Classes@, etc.) from each other. Thus, @Uniques@ are the basic
@@ -14,8 +14,8 @@ directed to that end.
Some of the other hair in this code is to be able to use a
``splittable @UniqueSupply@'' if requested/possible (not standard
Haskell).
+-}
-\begin{code}
{-# LANGUAGE CPP, BangPatterns, MagicHash #-}
module Unique (
@@ -70,30 +70,30 @@ import Util
import GHC.Exts (indexCharOffAddr#, Char(..))
import Data.Char ( chr, ord )
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Unique-type]{@Unique@ type and operations}
-%* *
-%************************************************************************
+* *
+************************************************************************
The @Chars@ are ``tag letters'' that identify the @UniqueSupply@.
Fast comparison is everything on @Uniques@:
+-}
-\begin{code}
--why not newtype Int?
-- | The type of unique identifiers that are used in many places in GHC
-- for fast ordering and equality tests. You should generate these with
-- the functions from the 'UniqSupply' module
data Unique = MkUnique FastInt
-\end{code}
+{-
Now come the functions which construct uniques from their pieces, and vice versa.
The stuff about unique *supplies* is handled further down this module.
+-}
-\begin{code}
unpkUnique :: Unique -> (Char, Int) -- The reverse
mkUniqueGrimily :: Int -> Unique -- A trap-door for UniqSupply
@@ -103,10 +103,7 @@ getKeyFastInt :: Unique -> FastInt -- for Var
incrUnique :: Unique -> Unique
deriveUnique :: Unique -> Int -> Unique
newTagUnique :: Unique -> Char -> Unique
-\end{code}
-
-\begin{code}
mkUniqueGrimily x = MkUnique (iUnbox x)
{-# INLINE getKey #-}
@@ -146,17 +143,15 @@ unpkUnique (MkUnique u)
i = iBox (u `bitAndFastInt` _ILIT(16777215){-``0x00ffffff''-})
in
(tag, i)
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Uniquable-class]{The @Uniquable@ class}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Class of things that we can obtain a 'Unique' from
class Uniquable a where
getUnique :: a -> Unique
@@ -169,20 +164,19 @@ instance Uniquable FastString where
instance Uniquable Int where
getUnique i = mkUniqueGrimily i
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Unique-instances]{Instance declarations for @Unique@}
-%* *
-%************************************************************************
+* *
+************************************************************************
And the whole point (besides uniqueness) is fast equality. We don't
use `deriving' because we want {\em precise} control of ordering
(equality on @Uniques@ is v common).
+-}
-\begin{code}
eqUnique, ltUnique, leUnique :: Unique -> Unique -> Bool
eqUnique (MkUnique u1) (MkUnique u2) = u1 ==# u2
ltUnique (MkUnique u1) (MkUnique u2) = u1 <# u2
@@ -206,10 +200,9 @@ instance Ord Unique where
-----------------
instance Uniquable Unique where
getUnique u = u
-\end{code}
-We do sometimes make strings with @Uniques@ in them:
-\begin{code}
+-- We do sometimes make strings with @Uniques@ in them:
+
showUnique :: Unique -> String
showUnique uniq
= case unpkUnique uniq of
@@ -230,19 +223,19 @@ instance Outputable Unique where
instance Show Unique where
show uniq = showUnique uniq
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Utils-base62]{Base-62 numbers}
-%* *
-%************************************************************************
+* *
+************************************************************************
A character-stingy way to read/write numbers (notably Uniques).
The ``62-its'' are \tr{[0-9a-zA-Z]}. We don't handle negative Ints.
Code stolen from Lennart.
+-}
-\begin{code}
iToBase62 :: Int -> String
iToBase62 n_
= ASSERT(n_ >= 0) go (iUnbox n_) ""
@@ -259,13 +252,13 @@ iToBase62 n_
{-# INLINE chooseChar62 #-}
chooseChar62 n = C# (indexCharOffAddr# chars62 n)
!chars62 = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"#
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Uniques-prelude]{@Uniques@ for wired-in Prelude things}
-%* *
-%************************************************************************
+* *
+************************************************************************
Allocation of unique supply characters:
v,t,u : for renumbering value-, type- and usage- vars.
@@ -285,8 +278,8 @@ Allocation of unique supply characters:
n Native codegen
r Hsc name cache
s simplifier
+-}
-\begin{code}
mkAlphaTyVarUnique :: Int -> Unique
mkPreludeClassUnique :: Int -> Unique
mkPreludeTyConUnique :: Int -> Unique
@@ -356,5 +349,3 @@ mkVarOccUnique fs = mkUnique 'i' (iBox (uniqueOfFS fs))
mkDataOccUnique fs = mkUnique 'd' (iBox (uniqueOfFS fs))
mkTvOccUnique fs = mkUnique 'v' (iBox (uniqueOfFS fs))
mkTcOccUnique fs = mkUnique 'c' (iBox (uniqueOfFS fs))
-\end{code}
-
diff --git a/compiler/basicTypes/Var.lhs b/compiler/basicTypes/Var.hs
index 62253c8642..925ffe3577 100644
--- a/compiler/basicTypes/Var.lhs
+++ b/compiler/basicTypes/Var.hs
@@ -1,10 +1,10 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section{@Vars@: Variables}
+-}
-\begin{code}
{-# LANGUAGE CPP, DeriveDataTypeable #-}
-- |
-- #name_types#
@@ -80,18 +80,17 @@ import FastString
import Outputable
import Data.Data
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Synonyms
-%* *
-%************************************************************************
+* *
+************************************************************************
-- These synonyms are here and not in Id because otherwise we need a very
-- large number of SOURCE imports of Id.hs :-(
+-}
-\begin{code}
type Id = Var -- A term-level identifier
type TyVar = Var -- Type *or* kind variable (historical)
@@ -110,8 +109,8 @@ type IpId = EvId -- A term-level implicit parameter
type EqVar = EvId -- Boxed equality evidence
type CoVar = Id -- See Note [Evidence: EvIds and CoVars]
-\end{code}
+{-
Note [Evidence: EvIds and CoVars]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* An EvId (evidence Id) is a *boxed*, term-level evidence variable
@@ -136,19 +135,19 @@ go over the whole compiler code to use:
- KindVar to mean kind variables
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{The main data type declarations}
-%* *
-%************************************************************************
+* *
+************************************************************************
Every @Var@ has a @Unique@, to uniquify it and for fast comparison, a
@Type@, and an @IdInfo@ (non-essential info about it, e.g.,
strictness). The essential info about different kinds of @Vars@ is
in its @VarDetails@.
+-}
-\begin{code}
-- | Essentially a typed 'Name', that may also contain some additional information
-- about the 'Var' and it's use sites.
data Var
@@ -185,8 +184,8 @@ data IdScope -- See Note [GlobalId/LocalId]
data ExportFlag
= NotExported -- ^ Not exported: may be discarded as dead code.
| Exported -- ^ Exported: kept alive
-\end{code}
+{-
Note [GlobalId/LocalId]
~~~~~~~~~~~~~~~~~~~~~~~
A GlobalId is
@@ -203,13 +202,13 @@ A LocalId is
* always treated as a candidate by the free-variable finder
After CoreTidy, top-level LocalIds are turned into GlobalIds
+-}
-\begin{code}
instance Outputable Var where
ppr var = ppr (varName var) <> getPprStyle (ppr_debug var)
ppr_debug :: Var -> PprStyle -> SDoc
-ppr_debug (TyVar {}) sty
+ppr_debug (TyVar {}) sty
| debugStyle sty = brackets (ptext (sLit "tv"))
ppr_debug (TcTyVar {tc_tv_details = d}) sty
| dumpStyle sty || debugStyle sty = brackets (pprTcTyVarDetails d)
@@ -243,10 +242,7 @@ instance Data Var where
toConstr _ = abstractConstr "Var"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "Var"
-\end{code}
-
-\begin{code}
varUnique :: Var -> Unique
varUnique var = mkUniqueGrimily (iBox (realUnique var))
@@ -262,16 +258,15 @@ setVarName var new_name
setVarType :: Id -> Type -> Id
setVarType id ty = id { varType = ty }
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Type and kind variables}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
tyVarName :: TyVar -> Name
tyVarName = varName
@@ -294,9 +289,7 @@ updateTyVarKindM :: (Monad m) => (Kind -> m Kind) -> TyVar -> m TyVar
updateTyVarKindM update tv
= do { k' <- update (tyVarKind tv)
; return $ tv {varType = k'} }
-\end{code}
-\begin{code}
mkTyVar :: Name -> Kind -> TyVar
mkTyVar name kind = TyVar { varName = name
, realUnique = getKeyFastInt (nameUnique name)
@@ -327,15 +320,14 @@ mkKindVar name kind = TyVar
, realUnique = getKeyFastInt (nameUnique name)
, varType = kind }
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Ids}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
idInfo :: Id -> IdInfo
idInfo (Id { id_info = info }) = info
idInfo other = pprPanic "idInfo" (ppr other)
@@ -394,15 +386,15 @@ setIdNotExported :: Id -> Id
-- ^ We can only do this to LocalIds
setIdNotExported id = ASSERT( isLocalId id )
id { idScope = LocalId NotExported }
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Predicates over variables}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
isTyVar :: Var -> Bool
isTyVar = isTKVar -- Historical
@@ -446,4 +438,3 @@ isExportedId :: Var -> Bool
isExportedId (Id { idScope = GlobalId }) = True
isExportedId (Id { idScope = LocalId Exported}) = True
isExportedId _ = False
-\end{code}
diff --git a/compiler/basicTypes/VarEnv.lhs b/compiler/basicTypes/VarEnv.hs
index 30d40c8efd..1d1c0604a3 100644
--- a/compiler/basicTypes/VarEnv.lhs
+++ b/compiler/basicTypes/VarEnv.hs
@@ -1,9 +1,8 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+-}
-\begin{code}
module VarEnv (
-- * Var, Id and TyVar environments (maps)
VarEnv, IdEnv, TyVarEnv, CoVarEnv,
@@ -60,16 +59,15 @@ import Outputable
import FastTypes
import StaticFlags
import FastString
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
In-scope sets
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A set of variables that are in scope at some point
data InScopeSet = InScope (VarEnv Var) FastInt
-- The (VarEnv Var) is just a VarSet. But we write it like
@@ -129,9 +127,7 @@ lookupInScope_Directly (InScope in_scope _) uniq
unionInScope :: InScopeSet -> InScopeSet -> InScopeSet
unionInScope (InScope s1 _) (InScope s2 n2)
= InScope (s1 `plusVarEnv` s2) n2
-\end{code}
-\begin{code}
-- | @uniqAway in_scope v@ finds a unique that is not used in the
-- in-scope set, and gives that to v.
uniqAway :: InScopeSet -> Var -> Var
@@ -158,15 +154,15 @@ uniqAway' (InScope set n) var
| otherwise = setVarUnique var uniq
where
uniq = deriveUnique orig_unique (iBox (n *# k))
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Dual renaming
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | When we are comparing (or matching) types or terms, we are faced with
-- \"going under\" corresponding binders. E.g. when comparing:
--
@@ -320,8 +316,8 @@ nukeRnEnvL, nukeRnEnvR :: RnEnv2 -> RnEnv2
-- ^ Wipe the left or right side renaming
nukeRnEnvL env = env { envL = emptyVarEnv }
nukeRnEnvR env = env { envR = emptyVarEnv }
-\end{code}
+{-
Note [Eta expansion]
~~~~~~~~~~~~~~~~~~~~
When matching
@@ -337,29 +333,28 @@ For example, if we don't do this, we can get silly matches like
succeeding with [a -> v y], which is bogus of course.
-%************************************************************************
-%* *
+************************************************************************
+* *
Tidying
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | When tidying up print names, we keep a mapping of in-scope occ-names
-- (the 'TidyOccEnv') and a Var-to-Var of the current renamings
type TidyEnv = (TidyOccEnv, VarEnv Var)
emptyTidyEnv :: TidyEnv
emptyTidyEnv = (emptyTidyOccEnv, emptyVarEnv)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{@VarEnv@s}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type VarEnv elt = UniqFM elt
type IdEnv elt = VarEnv elt
type TyVarEnv elt = VarEnv elt
@@ -399,9 +394,7 @@ lookupWithDefaultVarEnv :: VarEnv a -> a -> Var -> a
elemVarEnv :: Var -> VarEnv a -> Bool
elemVarEnvByKey :: Unique -> VarEnv a -> Bool
foldVarEnv :: (a -> b -> b) -> b -> VarEnv a -> b
-\end{code}
-\begin{code}
elemVarEnv = elemUFM
elemVarEnvByKey = elemUFM_Directly
alterVarEnv = alterUFM
@@ -439,12 +432,12 @@ zipVarEnv tyvars tys = mkVarEnv (zipEqual "zipVarEnv" tyvars tys)
lookupVarEnv_NF env id = case lookupVarEnv env id of
Just xx -> xx
Nothing -> panic "lookupVarEnv_NF: Nothing"
-\end{code}
+{-
@modifyVarEnv@: Look up a thing in the VarEnv,
then mash it with the modify function, and put it back.
+-}
-\begin{code}
modifyVarEnv mangle_fn env key
= case (lookupVarEnv env key) of
Nothing -> env
@@ -455,4 +448,3 @@ modifyVarEnv_Directly mangle_fn env key
= case (lookupUFM_Directly env key) of
Nothing -> env
Just xx -> addToUFM_Directly env key (mangle_fn xx)
-\end{code}
diff --git a/compiler/basicTypes/VarSet.lhs b/compiler/basicTypes/VarSet.hs
index 362f408d72..c13412484c 100644
--- a/compiler/basicTypes/VarSet.lhs
+++ b/compiler/basicTypes/VarSet.hs
@@ -1,9 +1,8 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module VarSet (
@@ -27,15 +26,15 @@ module VarSet (
import Var ( Var, TyVar, CoVar, Id )
import Unique
import UniqSet
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{@VarSet@s}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type VarSet = UniqSet Var
type IdSet = UniqSet Id
type TyVarSet = UniqSet TyVar
@@ -103,9 +102,7 @@ extendVarSet_C = addOneToUniqSet_C
delVarSetByKey = delOneFromUniqSet_Directly
elemVarSetByKey = elemUniqSet_Directly
partitionVarSet = partitionUniqSet
-\end{code}
-\begin{code}
mapUnionVarSet get_set xs = foldr (unionVarSet . get_set) emptyVarSet xs
-- See comments with type signatures
@@ -118,10 +115,6 @@ fixVarSet f s | new_s `subVarSet` s = s
| otherwise = fixVarSet f new_s
where
new_s = f s
-\end{code}
-\begin{code}
seqVarSet :: VarSet -> ()
seqVarSet s = sizeVarSet s `seq` ()
-\end{code}
-
diff --git a/compiler/coreSyn/CoreArity.lhs b/compiler/coreSyn/CoreArity.hs
index 37517d6190..5128891763 100644
--- a/compiler/coreSyn/CoreArity.lhs
+++ b/compiler/coreSyn/CoreArity.hs
@@ -1,11 +1,11 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
Arity and eta expansion
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
-- | Arity and eta expansion
@@ -34,13 +34,13 @@ import Outputable
import FastString
import Pair
import Util ( debugIsOn )
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
manifestArity and exprArity
-%* *
-%************************************************************************
+* *
+************************************************************************
exprArity is a cheap-and-cheerful version of exprEtaExpandArity.
It tells how many things the expression can be applied to before doing
@@ -65,8 +65,8 @@ won't be eta-expanded.
And in any case it seems more robust to have exprArity be a bit more intelligent.
But note that (\x y z -> f x y z)
should have arity 3, regardless of f's arity.
+-}
-\begin{code}
manifestArity :: CoreExpr -> Arity
-- ^ manifestArity sees how many leading value lambdas there are,
-- after looking through casts
@@ -142,8 +142,8 @@ exprBotStrictness_maybe e
env = AE { ae_ped_bot = True, ae_cheap_fn = \ _ _ -> False }
sig ar = mkClosedStrictSig (replicate ar topDmd) botRes
-- For this purpose we can be very simple
-\end{code}
+{-
Note [exprArity invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~
exprArity has the following invariant:
@@ -238,11 +238,11 @@ When we come to an application we check that the arg is trivial.
unknown, hence arity 0
-%************************************************************************
-%* *
+************************************************************************
+* *
Computing the "arity" of an expression
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Definition of arity]
~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -465,7 +465,8 @@ Then f :: AT [False,False] ATop
f <expensive> :: AT [] ATop
-------------------- Main arity code ----------------------------
-\begin{code}
+-}
+
-- See Note [ArityType]
data ArityType = ATop [OneShotInfo] | ABot Arity
-- There is always an explicit lambda
@@ -559,8 +560,8 @@ rhsEtaExpandArity dflags cheap_app e
has_lam (Tick _ e) = has_lam e
has_lam (Lam b e) = isId b || has_lam e
has_lam _ = False
-\end{code}
+{-
Note [Arity analysis]
~~~~~~~~~~~~~~~~~~~~~
The motivating example for arity analysis is this:
@@ -628,8 +629,8 @@ PAPSs
because that might in turn make g inline (if it has an inline pragma),
which we might not want. After all, INLINE pragmas say "inline only
when saturated" so we don't want to be too gung-ho about saturating!
+-}
-\begin{code}
arityLam :: Id -> ArityType -> ArityType
arityLam id (ATop as) = ATop (idOneShotInfo id : as)
arityLam _ (ABot n) = ABot (n+1)
@@ -660,8 +661,8 @@ andArityType (ATop as) (ATop bs) = ATop (as `combine` bs)
combine (a:as) (b:bs) = (a `bestOneShot` b) : combine as bs
combine [] bs = takeWhile isOneShotInfo bs
combine as [] = takeWhile isOneShotInfo as
-\end{code}
+{-
Note [Combining case branches]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
@@ -679,8 +680,8 @@ lambda wasn't one-shot we don't want to do this.
So we combine the best of the two branches, on the (slightly dodgy)
basis that if we know one branch is one-shot, then they all must be.
+-}
-\begin{code}
---------------------------
type CheapFun = CoreExpr -> Maybe Type -> Bool
-- How to decide if an expression is cheap
@@ -767,14 +768,13 @@ arityType env (Tick t e)
| not (tickishIsCode t) = arityType env e
arityType _ _ = vanillaArityType
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
The main eta-expander
-%* *
-%************************************************************************
+* *
+************************************************************************
We go for:
f = \x1..xn -> N ==> f = \x1..xn y1..ym -> N y1..ym
@@ -822,8 +822,8 @@ Note that SCCs are not treated specially by etaExpand. If we have
etaExpand 2 (\x -> scc "foo" e)
= (\xy -> (scc "foo" e) y)
So the costs of evaluating 'e' (not 'e y') are attributed to "foo"
+-}
-\begin{code}
-- | @etaExpand n us e ty@ returns an expression with
-- the same meaning as @e@, but with arity @n@.
--
@@ -1001,4 +1001,3 @@ freshEtaId n subst ty
eta_id' = uniqAway (getTvInScope subst) $
mkSysLocal (fsLit "eta") (mkBuiltinUnique n) ty'
subst' = extendTvInScope subst eta_id'
-\end{code}
diff --git a/compiler/coreSyn/CoreFVs.lhs b/compiler/coreSyn/CoreFVs.hs
index fc804d7c6e..af475bab3f 100644
--- a/compiler/coreSyn/CoreFVs.lhs
+++ b/compiler/coreSyn/CoreFVs.hs
@@ -1,10 +1,10 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
Taken quite directly from the Peyton Jones/Lester paper.
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
-- | A module concerned with finding the free variables of an expression.
@@ -20,7 +20,7 @@ module CoreFVs (
exprSomeFreeVars, exprsSomeFreeVars,
-- * Free variables of Rules, Vars and Ids
- varTypeTyVars,
+ varTypeTyVars,
idUnfoldingVars, idFreeVars, idRuleAndUnfoldingVars,
idRuleVars, idRuleRhsVars, stableUnfoldingVars,
ruleRhsFreeVars, ruleFreeVars, rulesFreeVars,
@@ -50,14 +50,13 @@ import Maybes( orElse )
import Util
import BasicTypes( Activation )
import Outputable
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\section{Finding the free variables of an expression}
-%* *
-%************************************************************************
+* *
+************************************************************************
This function simply finds the free variables of an expression.
So far as type variables are concerned, it only finds tyvars that are
@@ -66,8 +65,8 @@ So far as type variables are concerned, it only finds tyvars that are
* free in the type of a binder,
but not those that are free in the type of variable occurrence.
+-}
-\begin{code}
-- | Find all locally-defined free Ids or type variables in an expression
exprFreeVars :: CoreExpr -> VarSet
exprFreeVars = exprSomeFreeVars isLocalVar
@@ -101,14 +100,11 @@ exprsSomeFreeVars fv_cand = mapUnionVarSet (exprSomeFreeVars fv_cand)
-- | Predicate on possible free variables: returns @True@ iff the variable is interesting
type InterestingVarFun = Var -> Bool
-\end{code}
-
-\begin{code}
type FV = InterestingVarFun
-> VarSet -- Locally bound
-> VarSet -- Free vars
- -- Return the vars that are both (a) interesting
+ -- Return the vars that are both (a) interesting
-- and (b) not locally bound
-- See function keep_it
@@ -172,10 +168,7 @@ addBndr bndr fv fv_cand in_scope
addBndrs :: [CoreBndr] -> FV -> FV
addBndrs bndrs fv = foldr addBndr fv bndrs
-\end{code}
-
-\begin{code}
expr_fvs :: CoreExpr -> FV
expr_fvs (Type ty) = someVars (tyVarsOfType ty)
@@ -213,16 +206,15 @@ exprs_fvs exprs = foldr (union . expr_fvs) noVars exprs
tickish_fvs :: Tickish Id -> FV
tickish_fvs (Breakpoint _ ids) = someVars (mkVarSet ids)
tickish_fvs _ = noVars
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\section{Free names}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | ruleLhsOrphNames is used when deciding whether
-- a rule is an orphan. In particular, suppose that T is defined in this
-- module; we want to avoid declaring that a rule like:
@@ -268,15 +260,15 @@ exprOrphNames e
-- | Finds the free /external/ names of several expressions: see 'exprOrphNames' for details
exprsOrphNames :: [CoreExpr] -> NameSet
exprsOrphNames es = foldr (unionNameSet . exprOrphNames) emptyNameSet es
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\section[freevars-everywhere]{Attaching free variables to every sub-expression}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Those variables free in the right hand side of a rule
ruleRhsFreeVars :: CoreRule -> VarSet
ruleRhsFreeVars (BuiltinRule {}) = noFVs
@@ -314,8 +306,8 @@ ruleLhsFreeIds :: CoreRule -> VarSet
ruleLhsFreeIds (BuiltinRule {}) = noFVs
ruleLhsFreeIds (Rule { ru_bndrs = bndrs, ru_args = args })
= addBndrs bndrs (exprs_fvs args) isLocalId emptyVarSet
-\end{code}
+{-
Note [Rule free var hack] (Not a hack any more)
~~~~~~~~~~~~~~~~~~~~~~~~~
We used not to include the Id in its own rhs free-var set.
@@ -326,8 +318,8 @@ However, the occurrence analyser distinguishes "non-rule loop breakers"
from "rule-only loop breakers" (see BasicTypes.OccInfo). So it will
put this 'f' in a Rec block, but will mark the binding as a non-rule loop
breaker, which is perfectly inlinable.
+-}
-\begin{code}
-- |Free variables of a vectorisation declaration
vectsFreeVars :: [CoreVect] -> VarSet
vectsFreeVars = mapUnionVarSet vectFreeVars
@@ -338,19 +330,18 @@ vectsFreeVars = mapUnionVarSet vectFreeVars
vectFreeVars (VectClass _) = noFVs
vectFreeVars (VectInst _) = noFVs
-- this function is only concerned with values, not types
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\section[freevars-everywhere]{Attaching free variables to every sub-expression}
-%* *
-%************************************************************************
+* *
+************************************************************************
The free variable pass annotates every node in the expression with its
NON-GLOBAL free variables and type variables.
+-}
-\begin{code}
-- | Every node in a binding group annotated with its
-- (non-global) free variables, both Ids and TyVars
type CoreBindWithFVs = AnnBind Id VarSet
@@ -444,22 +435,21 @@ stableUnfoldingVars :: Unfolding -> Maybe VarSet
stableUnfoldingVars unf
= case unf of
CoreUnfolding { uf_tmpl = rhs, uf_src = src }
- | isStableSource src
+ | isStableSource src
-> Just (exprFreeVars rhs)
- DFunUnfolding { df_bndrs = bndrs, df_args = args }
+ DFunUnfolding { df_bndrs = bndrs, df_args = args }
-> Just (exprs_fvs args isLocalVar (mkVarSet bndrs))
-- DFuns are top level, so no fvs from types of bndrs
_other -> Nothing
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Free variables (and types)}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
freeVars :: CoreExpr -> CoreExprWithFVs
-- ^ Annotate a 'CoreExpr' with its (non-global) free type and value variables at every tree node
freeVars (Var v)
@@ -541,5 +531,3 @@ freeVars (Tick tickish expr)
freeVars (Type ty) = (tyVarsOfType ty, AnnType ty)
freeVars (Coercion co) = (tyCoVarsOfCo co, AnnCoercion co)
-\end{code}
-
diff --git a/compiler/coreSyn/CoreLint.lhs b/compiler/coreSyn/CoreLint.hs
index 7a050a801b..26519cc928 100644
--- a/compiler/coreSyn/CoreLint.lhs
+++ b/compiler/coreSyn/CoreLint.hs
@@ -1,12 +1,11 @@
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1993-1998
-%
A ``lint'' pass to check for Core correctness
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fprof-auto #-}
@@ -48,8 +47,8 @@ import Control.Monad
import MonadUtils
import Data.Maybe
import Pair
-\end{code}
+{-
Note [GHC Formalism]
~~~~~~~~~~~~~~~~~~~~
This file implements the type-checking algorithm for System FC, the "official"
@@ -62,11 +61,11 @@ just about anything in this file or you change other types/functions throughout
the Core language (all signposted to this note), you should update that
formalism. See docs/core-spec/README for more info about how to do so.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection[lintCoreBindings]{@lintCoreBindings@: Top-level interface}
-%* *
-%************************************************************************
+* *
+************************************************************************
Checks that a set of core bindings is well-formed. The PprStyle and String
just control what we print in the event of an error. The Bool value
@@ -111,9 +110,8 @@ to the type of the binding variable. lintBinders does this.
For Ids, the type-substituted Id is added to the in_scope set (which
itself is part of the TvSubst we are carrying down), and when we
find an occurrence of an Id, we fetch it from the in-scope set.
+-}
-
-\begin{code}
lintCoreBindings :: [Var] -> CoreProgram -> (Bag MsgDoc, Bag MsgDoc)
-- Returns (warnings, errors)
-- If you edit this function, you may need to update the GHC formalism
@@ -149,18 +147,18 @@ lintCoreBindings local_in_scope binds
-- See Note [GHC Formalism]
lint_bind (Rec prs) = mapM_ (lintSingleBinding TopLevel Recursive) prs
lint_bind (NonRec bndr rhs) = lintSingleBinding TopLevel NonRecursive (bndr,rhs)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[lintUnfolding]{lintUnfolding}
-%* *
-%************************************************************************
+* *
+************************************************************************
We use this to check all unfoldings that come in from interfaces
(it is very painful to catch errors otherwise):
+-}
-\begin{code}
lintUnfolding :: SrcLoc
-> [Var] -- Treat these as in scope
-> CoreExpr
@@ -185,17 +183,17 @@ lintExpr vars expr
(_warns, errs) = initL (addLoc TopLevelBindings $
addInScopeVars vars $
lintCoreExpr expr)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[lintCoreBinding]{lintCoreBinding}
-%* *
-%************************************************************************
+* *
+************************************************************************
Check a core binding, returning the list of variables bound.
+-}
-\begin{code}
lintSingleBinding :: TopLevelFlag -> RecFlag -> (Id, CoreExpr) -> LintM ()
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
@@ -263,15 +261,15 @@ lintIdUnfolding bndr bndr_ty (CoreUnfolding { uf_tmpl = rhs, uf_src = src })
; checkTys bndr_ty ty (mkRhsMsg bndr (ptext (sLit "unfolding")) ty) }
lintIdUnfolding _ _ _
= return () -- We could check more
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[lintCoreExpr]{lintCoreExpr}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
--type InKind = Kind -- Substitution not yet applied
type InType = Type
type InCoercion = Coercion
@@ -415,8 +413,7 @@ lintCoreExpr (Coercion co)
= do { (_kind, ty1, ty2, role) <- lintInCo co
; return (mkCoercionType role ty1 ty2) }
-\end{code}
-
+{-
Note [Kind instantiation in coercions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider the following coercion axiom:
@@ -436,16 +433,16 @@ kind coercions and produce the following substitution which is to be
applied in the type variables:
k_ag ~~> * -> *
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection[lintCoreArgs]{lintCoreArgs}
-%* *
-%************************************************************************
+* *
+************************************************************************
The basic version of these functions checks that the argument is a
subtype of the required type, as one would expect.
+-}
-\begin{code}
lintCoreArg :: OutType -> CoreArg -> LintM OutType
lintCoreArg fun_ty (Type arg_ty)
= do { arg_ty' <- applySubstTy arg_ty
@@ -496,9 +493,7 @@ lintValApp arg fun_ty arg_ty
where
err1 = mkAppMsg fun_ty arg_ty arg
err2 = mkNonFunAppMsg fun_ty arg_ty arg
-\end{code}
-\begin{code}
checkTyKind :: OutTyVar -> OutType -> LintM ()
-- Both args have had substitution applied
@@ -528,16 +523,15 @@ checkDeadIdOcc id
(ptext (sLit "Occurrence of a dead Id") <+> ppr id) }
| otherwise
= return ()
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[lintCoreAlts]{lintCoreAlts}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
checkCaseAlts :: CoreExpr -> OutType -> [CoreAlt] -> LintM ()
-- a) Check that the alts are non-empty
-- b1) Check that the DEFAULT comes first, if it exists
@@ -574,9 +568,7 @@ checkCaseAlts e ty alts =
is_infinite_ty = case tyConAppTyCon_maybe ty of
Nothing -> False
Just tycon -> isPrimTyCon tycon
-\end{code}
-\begin{code}
checkAltExpr :: CoreExpr -> OutType -> LintM ()
checkAltExpr expr ann_ty
= do { actual_ty <- lintCoreExpr expr
@@ -620,15 +612,15 @@ lintCoreAlt scrut_ty alt_ty alt@(DataAlt con, args, rhs)
| otherwise -- Scrut-ty is wrong shape
= addErrL (mkBadAltMsg scrut_ty alt)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[lint-types]{Types}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- When we lint binders, we (one at a time and in order):
-- 1. Lint var types or kinds (possibly substituting)
-- 2. Add the binder to the in scope set, and if its a coercion var,
@@ -675,20 +667,19 @@ lintAndScopeId id linterF
= do { ty <- lintInTy (idType id)
; let id' = setIdType id ty
; addInScopeVar id' $ (linterF id') }
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Types and kinds
-%* *
-%************************************************************************
+* *
+************************************************************************
We have a single linter for types and kinds. That is convenient
because sometimes it's not clear whether the thing we are looking
at is a type or a kind.
+-}
-\begin{code}
lintInTy :: InType -> LintM LintedType
-- Types only, not kinds
-- Check the type, and apply the substitution to it
@@ -746,10 +737,6 @@ lintType (ForAllTy tv ty)
lintType ty@(LitTy l) = lintTyLit l >> return (typeKind ty)
-\end{code}
-
-
-\begin{code}
lintKind :: OutKind -> LintM ()
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
@@ -757,10 +744,7 @@ lintKind k = do { sk <- lintType k
; unless (isSuperKind sk)
(addErrL (hang (ptext (sLit "Ill-kinded kind:") <+> ppr k)
2 (ptext (sLit "has kind:") <+> ppr sk))) }
-\end{code}
-
-\begin{code}
lintArrow :: SDoc -> LintedKind -> LintedKind -> LintM LintedKind
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
@@ -823,15 +807,15 @@ lint_app doc kfn kas
; return (substKiWith [kv] [ta] kfn) }
go_app _ _ = failWithL fail_msg
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Linting coercions
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
lintInCo :: InCoercion -> LintM (LintedKind, LintedType, LintedType, Role)
-- Check the coercion, and apply the substitution to it
-- See Note [Linting type lets]
@@ -1053,15 +1037,13 @@ lintCoercion this@(AxiomRuleCo co ts cs)
[ txt "Expected:" <+> int (n + length es)
, txt "Provided:" <+> int n ]
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[lint-monad]{The Lint monad}
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism]
@@ -1118,17 +1100,12 @@ data LintLocInfo
| TopLevelBindings
| InType Type -- Inside a type
| InCo Coercion -- Inside a coercion
-\end{code}
-
-\begin{code}
initL :: LintM a -> WarnsAndErrs -- Errors and warnings
initL m
= case unLintM m [] emptyTvSubst (emptyBag, emptyBag) of
(_, errs) -> errs
-\end{code}
-\begin{code}
checkL :: Bool -> MsgDoc -> LintM ()
checkL True _ = return ()
checkL False msg = failWithL msg
@@ -1195,9 +1172,7 @@ applySubstCo co = do { subst <- getTvSubst; return (substCo (tvCvSubst subst) co
extendSubstL :: TyVar -> Type -> LintM a -> LintM a
extendSubstL tv ty m
= LintM (\ loc subst errs -> unLintM m loc (Type.extendTvSubst subst tv ty) errs)
-\end{code}
-\begin{code}
lookupIdInScope :: Id -> LintM Id
lookupIdInScope id
| not (mustHaveLocalBinding id)
@@ -1247,15 +1222,14 @@ checkRole co r1 r2
ptext (sLit "got") <+> ppr r2 $$
ptext (sLit "in") <+> ppr co)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Error messages}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
dumpLoc :: LintLocInfo -> (SrcLoc, SDoc)
dumpLoc (RhsOf v)
@@ -1294,9 +1268,7 @@ pp_binders bs = sep (punctuate comma (map pp_binder bs))
pp_binder :: Var -> SDoc
pp_binder b | isId b = hsep [ppr b, dcolon, ppr (idType b)]
| otherwise = hsep [ppr b, dcolon, ppr (tyVarKind b)]
-\end{code}
-\begin{code}
------------------------------------------------------
-- Messages for case expressions
@@ -1468,4 +1440,3 @@ dupExtVars :: [[Name]] -> MsgDoc
dupExtVars vars
= hang (ptext (sLit "Duplicate top-level variables with the same qualified name"))
2 (ppr vars)
-\end{code}
diff --git a/compiler/coreSyn/CorePrep.lhs b/compiler/coreSyn/CorePrep.hs
index 537cc01b43..9037fcb126 100644
--- a/compiler/coreSyn/CorePrep.lhs
+++ b/compiler/coreSyn/CorePrep.hs
@@ -1,10 +1,10 @@
-%
-% (c) The University of Glasgow, 1994-2006
-%
+{-
+(c) The University of Glasgow, 1994-2006
+
Core pass to saturate constructors and PrimOps
+-}
-\begin{code}
{-# LANGUAGE BangPatterns, CPP #-}
module CorePrep (
@@ -56,8 +56,8 @@ import Config
import Data.Bits
import Data.List ( mapAccumL )
import Control.Monad
-\end{code}
+{-
-- ---------------------------------------------------------------------------
-- Overview
-- ---------------------------------------------------------------------------
@@ -142,21 +142,21 @@ Here is the syntax of the Core produced by CorePrep:
We define a synonym for each of these non-terminals. Functions
with the corresponding name produce a result in that syntax.
+-}
-\begin{code}
type CpeTriv = CoreExpr -- Non-terminal 'triv'
type CpeApp = CoreExpr -- Non-terminal 'app'
type CpeBody = CoreExpr -- Non-terminal 'body'
type CpeRhs = CoreExpr -- Non-terminal 'rhs'
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Top level stuff
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
corePrepPgm :: DynFlags -> HscEnv -> CoreProgram -> [TyCon] -> IO CoreProgram
corePrepPgm dflags hsc_env binds data_tycons = do
showPass dflags "CorePrep"
@@ -202,8 +202,8 @@ mkDataConWorkers data_tycons
| tycon <- data_tycons, -- CorePrep will eta-expand it
data_con <- tyConDataCons tycon,
let id = dataConWorkId data_con ]
-\end{code}
+{-
Note [Floating out of top level bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
NB: we do need to float out of top-level bindings
@@ -335,13 +335,13 @@ Into this one:
(Since f is not considered to be free in its own RHS.)
-%************************************************************************
-%* *
+************************************************************************
+* *
The main code
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
cpeBind :: TopLevelFlag -> CorePrepEnv -> CoreBind
-> UniqSM (CorePrepEnv, Floats)
cpeBind top_lvl env (NonRec bndr rhs)
@@ -349,7 +349,7 @@ cpeBind top_lvl env (NonRec bndr rhs)
; let dmd = idDemandInfo bndr
is_unlifted = isUnLiftedType (idType bndr)
; (floats, bndr2, rhs2) <- cpePair top_lvl NonRecursive
- dmd
+ dmd
is_unlifted
env bndr1 rhs
; let new_float = mkFloat dmd is_unlifted bndr2 rhs2
@@ -697,7 +697,7 @@ cpeApp env expr
-- ---------------------------------------------------------------------------
-- This is where we arrange that a non-trivial argument is let-bound
-cpeArg :: CorePrepEnv -> Demand
+cpeArg :: CorePrepEnv -> Demand
-> CoreArg -> Type -> UniqSM (Floats, CpeTriv)
cpeArg env dmd arg arg_ty
= do { (floats1, arg1) <- cpeRhsE env arg -- arg1 can be a lambda
@@ -719,8 +719,8 @@ cpeArg env dmd arg arg_ty
is_unlifted = isUnLiftedType arg_ty
is_strict = isStrictDmd dmd
want_float = wantFloatNested NonRecursive (is_strict || is_unlifted)
-\end{code}
+{-
Note [Floating unlifted arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider C (let v* = expensive in v)
@@ -741,8 +741,8 @@ because that has different strictness. Hence the use of 'allLazy'.
maybeSaturate deals with saturating primops and constructors
The type is the type of the entire application
+-}
-\begin{code}
maybeSaturate :: Id -> CpeApp -> Int -> UniqSM CpeRhs
maybeSaturate fn expr n_args
| Just DataToTagOp <- isPrimOpId_maybe fn -- DataToTag must have an evaluated arg
@@ -783,8 +783,8 @@ saturateDataToTag sat_expr
eval_data2tag_arg other -- Should not happen
= pprPanic "eval_data2tag" (ppr other)
-\end{code}
+{-
Note [dataToTag magic]
~~~~~~~~~~~~~~~~~~~~~~
Horrid: we must ensure that the arg of data2TagOp is evaluated
@@ -795,13 +795,13 @@ How might it not be evaluated? Well, we might have floated it out
of the scope of a `seq`, or dropped the `seq` altogether.
-%************************************************************************
-%* *
+************************************************************************
+* *
Simple CoreSyn operations
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- we don't ignore any Tickishes at the moment.
ignoreTickish :: Tickish Id -> Bool
ignoreTickish _ = False
@@ -817,8 +817,8 @@ cpe_ExprIsTrivial (Tick t e) = not (tickishIsCode t) && cpe_ExprIsTrivial e
cpe_ExprIsTrivial (Cast e _) = cpe_ExprIsTrivial e
cpe_ExprIsTrivial (Lam b body) | isTyVar b = cpe_ExprIsTrivial body
cpe_ExprIsTrivial _ = False
-\end{code}
+{-
-- -----------------------------------------------------------------------------
-- Eta reduction
-- -----------------------------------------------------------------------------
@@ -858,14 +858,14 @@ and now we do NOT want eta expansion to give
Instead CoreArity.etaExpand gives
f = /\a -> \y -> let s = h 3 in g s y
+-}
-\begin{code}
cpeEtaExpand :: Arity -> CpeRhs -> CpeRhs
cpeEtaExpand arity expr
| arity == 0 = expr
| otherwise = etaExpand arity expr
-\end{code}
+{-
-- -----------------------------------------------------------------------------
-- Eta reduction
-- -----------------------------------------------------------------------------
@@ -876,8 +876,8 @@ trivial (like f, or f Int). But for deLam it would be enough to
get to a partial application:
case x of { p -> \xs. map f xs }
==> case x of { p -> map f }
+-}
-\begin{code}
tryEtaReducePrep :: [CoreBndr] -> CoreExpr -> Maybe CoreExpr
tryEtaReducePrep bndrs expr@(App _ _)
| ok_to_eta_reduce f
@@ -910,20 +910,19 @@ tryEtaReducePrep bndrs (Let bind@(NonRec _ r) body)
fvs = exprFreeVars r
tryEtaReducePrep _ _ = Nothing
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Floats
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Pin demand info on floats]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We pin demand info on floated lets so that we can see the one-shot thunks.
+-}
-\begin{code}
data FloatingBind
= FloatLet CoreBind -- Rhs of bindings are CpeRhss
-- They are always of lifted type;
@@ -1093,16 +1092,15 @@ allLazyNested :: RecFlag -> Floats -> Bool
allLazyNested _ (Floats OkToSpec _) = True
allLazyNested _ (Floats NotOkToSpec _) = False
allLazyNested is_rec (Floats IfUnboxedOk _) = isNonRec is_rec
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Cloning
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- ---------------------------------------------------------------------------
-- The environment
-- ---------------------------------------------------------------------------
@@ -1208,4 +1206,3 @@ newVar ty
= seqType ty `seq` do
uniq <- getUniqueM
return (mkSysLocal (fsLit "sat") uniq ty)
-\end{code}
diff --git a/compiler/coreSyn/CoreSubst.lhs b/compiler/coreSyn/CoreSubst.hs
index 76f42f4bb9..82e18ca5ba 100644
--- a/compiler/coreSyn/CoreSubst.lhs
+++ b/compiler/coreSyn/CoreSubst.hs
@@ -1,11 +1,11 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
Utility functions on @Core@ syntax
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module CoreSubst (
-- * Main data types
@@ -82,16 +82,15 @@ import FastString
import Data.List
import TysWiredIn
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Substitutions}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A substitution environment, containing both 'Id' and 'TyVar' substitutions.
--
-- Some invariants apply to how you use the substitution:
@@ -124,8 +123,8 @@ data Subst
-- Types.TvSubstEnv
--
-- INVARIANT 3: See Note [Extending the Subst]
-\end{code}
+{-
Note [Extending the Subst]
~~~~~~~~~~~~~~~~~~~~~~~~~~
For a core Subst, which binds Ids as well, we make a different choice for Ids
@@ -179,8 +178,8 @@ TvSubstEnv and CvSubstEnv?
* For TyVars, only coercion variables can possibly change, and they are
easy to spot
+-}
-\begin{code}
-- | An environment for substituting for 'Id's
type IdSubstEnv = IdEnv CoreExpr
@@ -331,11 +330,9 @@ extendInScopeIds (Subst in_scope ids tvs cvs) vs
setInScope :: Subst -> InScopeSet -> Subst
setInScope (Subst _ ids tvs cvs) in_scope = Subst in_scope ids tvs cvs
-\end{code}
-Pretty printing, for debugging only
+-- Pretty printing, for debugging only
-\begin{code}
instance Outputable Subst where
ppr (Subst in_scope ids tvs cvs)
= ptext (sLit "<InScope =") <+> braces (fsep (map ppr (varEnvElts (getInScopeVars in_scope))))
@@ -343,16 +340,15 @@ instance Outputable Subst where
$$ ptext (sLit " TvSubst =") <+> ppr tvs
$$ ptext (sLit " CvSubst =") <+> ppr cvs
<> char '>'
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Substituting expressions
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Apply a substitution to an entire 'CoreExpr'. Remember, you may only
-- apply the substitution /once/: see "CoreSubst#apply_once"
--
@@ -428,9 +424,7 @@ substBind subst (Rec pairs) = (subst', Rec (bndrs' `zip` rhss'))
(bndrs, rhss) = unzip pairs
(subst', bndrs') = substRecBndrs subst bndrs
rhss' = map (subst_expr subst') rhss
-\end{code}
-\begin{code}
-- | De-shadowing the program is sometimes a useful pre-pass. It can be done simply
-- by running over the bindings with an empty substitution, because substitution
-- returns a result that has no-shadowing guaranteed.
@@ -442,21 +436,20 @@ substBind subst (Rec pairs) = (subst', Rec (bndrs' `zip` rhss'))
-- short and simple that I'm going to leave it here
deShadowBinds :: CoreProgram -> CoreProgram
deShadowBinds binds = snd (mapAccumL substBind emptySubst binds)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Substituting binders
-%* *
-%************************************************************************
+* *
+************************************************************************
Remember that substBndr and friends are used when doing expression
substitution only. Their only business is substitution, so they
preserve all IdInfo (suitably substituted). For example, we *want* to
preserve occ info in rules.
+-}
-\begin{code}
-- | Substitutes a 'Var' for another one according to the 'Subst' given, returning
-- the result and an updated 'Subst' that should be used by subsequent substitutions.
-- 'IdInfo' is preserved by this process, although it is substituted into appropriately.
@@ -476,10 +469,7 @@ substRecBndrs subst bndrs
= (new_subst, new_bndrs)
where -- Here's the reason we need to pass rec_subst to subst_id
(new_subst, new_bndrs) = mapAccumL (substIdBndr (text "rec-bndr") new_subst) subst bndrs
-\end{code}
-
-\begin{code}
substIdBndr :: SDoc
-> Subst -- ^ Substitution to use for the IdInfo
-> Subst -> Id -- ^ Substitution and Id to transform
@@ -513,12 +503,12 @@ substIdBndr _doc rec_subst subst@(Subst in_scope env tvs cvs) old_id
no_change = id1 == old_id
-- See Note [Extending the Subst]
-- it's /not/ necessary to check mb_new_info and no_type_change
-\end{code}
+{-
Now a variant that unconditionally allocates a new unique.
It also unconditionally zaps the OccInfo.
+-}
-\begin{code}
-- | Very similar to 'substBndr', but it always allocates a new 'Unique' for
-- each variable in its output. It substitutes the IdInfo though.
cloneIdBndr :: Subst -> UniqSupply -> Id -> (Subst, Id)
@@ -564,20 +554,19 @@ clone_id rec_subst subst@(Subst in_scope idvs tvs cvs) (old_id, uniq)
new_id = maybeModifyIdInfo (substIdInfo rec_subst id2 (idInfo old_id)) id2
(new_idvs, new_cvs) | isCoVar old_id = (idvs, extendVarEnv cvs old_id (mkCoVarCo new_id))
| otherwise = (extendVarEnv idvs old_id (Var new_id), cvs)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Types and Coercions
-%* *
-%************************************************************************
+* *
+************************************************************************
For types and coercions we just call the corresponding functions in
Type and Coercion, but we have to repackage the substitution, from a
Subst to a TvSubst.
+-}
-\begin{code}
substTyVarBndr :: Subst -> TyVar -> (Subst, TyVar)
substTyVarBndr (Subst in_scope id_env tv_env cv_env) tv
= case Type.substTyVarBndr (TvSubst in_scope tv_env) tv of
@@ -609,16 +598,15 @@ getCvSubst (Subst in_scope _ tenv cenv) = CvSubst in_scope tenv cenv
-- | See 'Coercion.substCo'
substCo :: Subst -> Coercion -> Coercion
substCo subst co = Coercion.substCo (getCvSubst subst) co
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\section{IdInfo substitution}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
substIdType :: Subst -> Id -> Id
substIdType subst@(Subst _ _ tv_env cv_env) id
| (isEmptyVarEnv tv_env && isEmptyVarEnv cv_env) || isEmptyVarSet (Type.tyVarsOfType old_ty) = id
@@ -760,8 +748,8 @@ for an Id in a breakpoint. We ensure this by never storing an Id with
an unlifted type in a Breakpoint - see Coverage.mkTickish.
Breakpoints can't handle free variables with unlifted types anyway.
-}
-\end{code}
+{-
Note [Worker inlining]
~~~~~~~~~~~~~~~~~~~~~~
A worker can get sustituted away entirely.
@@ -774,11 +762,11 @@ In all all these cases we simply drop the special case, returning to
InlVanilla. The WARN is just so I can see if it happens a lot.
-%************************************************************************
-%* *
+************************************************************************
+* *
The Very Simple Optimiser
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Optimise coercion boxes agressively]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -830,8 +818,8 @@ we wouldn't simplify this expression at all:
The rule LHS desugarer can't deal with Let at all, so we need to push that box into
the use sites.
+-}
-\begin{code}
simpleOptExpr :: CoreExpr -> CoreExpr
-- Do simple optimisation on an expression
-- The optimisation is very straightforward: just
@@ -1093,8 +1081,8 @@ simpleUnfoldingFun :: IdUnfoldingFun
simpleUnfoldingFun id
| isAlwaysActive (idInlineActivation id) = idUnfolding id
| otherwise = noUnfolding
-\end{code}
+{-
Note [Inline prag in simplOpt]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If there's an INLINE/NOINLINE pragma that restricts the phase in
@@ -1121,11 +1109,11 @@ match if we replace coerce by its unfolding on the LHS, because that is the
core that the rule matching engine will find. So do that for everything that
has a compulsory unfolding. Also see Note [Desugaring coerce as cast] in Desugar
-%************************************************************************
-%* *
+************************************************************************
+* *
exprIsConApp_maybe
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [exprIsConApp_maybe]
~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1157,8 +1145,8 @@ Just (':', [Char], ['a', unpackCString# "bc"]).
We need to be careful about UTF8 strings here. ""# contains a ByteString, so
we must parse it back into a FastString to split off the first character.
That way we can treat unpackCString# and unpackCStringUtf8# in the same way.
+-}
-\begin{code}
data ConCont = CC [CoreExpr] Coercion
-- Substitution already applied
@@ -1314,8 +1302,8 @@ stripTypeArgs :: [CoreExpr] -> [Type]
stripTypeArgs args = ASSERT2( all isTypeArg args, ppr args )
[ty | Type ty <- args]
-- We really do want isTypeArg here, not isTyCoArg!
-\end{code}
+{-
Note [Unfolding DFuns]
~~~~~~~~~~~~~~~~~~~~~~
DFuns look like
@@ -1333,8 +1321,8 @@ Note [DFun arity check]
Here we check that the total number of supplied arguments (inclding
type args) matches what the dfun is expecting. This may be *less*
than the ordinary arity of the dfun: see Note [DFun unfoldings] in CoreSyn
+-}
-\begin{code}
exprIsLiteral_maybe :: InScopeEnv -> CoreExpr -> Maybe Literal
-- Same deal as exprIsConApp_maybe, but much simpler
-- Nevertheless we do need to look through unfoldings for
@@ -1347,8 +1335,8 @@ exprIsLiteral_maybe env@(_, id_unf) e
Var v | Just rhs <- expandUnfolding_maybe (id_unf v)
-> exprIsLiteral_maybe env rhs
_ -> Nothing
-\end{code}
+{-
Note [exprIsLambda_maybe]
~~~~~~~~~~~~~~~~~~~~~~~~~~
exprIsLambda_maybe will, given an expression `e`, try to turn it into the form
@@ -1358,8 +1346,8 @@ has a greater arity than arguments are present.
Currently, it is used in Rules.match, and is required to make
"map coerce = coerce" match.
+-}
-\begin{code}
exprIsLambda_maybe :: InScopeEnv -> CoreExpr -> Maybe (Var, CoreExpr)
-- See Note [exprIsLambda_maybe]
@@ -1418,5 +1406,3 @@ pushCoercionIntoLambda in_scope x e co
| otherwise
= pprTrace "exprIsLambda_maybe: Unexpected lambda in case" (ppr (Lam x e))
Nothing
-
-\end{code}
diff --git a/compiler/coreSyn/CoreSyn.lhs b/compiler/coreSyn/CoreSyn.hs
index 47418e22ec..0c6ee7c38e 100644
--- a/compiler/coreSyn/CoreSyn.lhs
+++ b/compiler/coreSyn/CoreSyn.hs
@@ -1,9 +1,8 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+-}
-\begin{code}
{-# LANGUAGE CPP, DeriveDataTypeable, DeriveFunctor #-}
-- | CoreSyn holds all the main data types for use by for the Glasgow Haskell Compiler midsection
@@ -105,17 +104,17 @@ import Data.Word
infixl 4 `mkApps`, `mkTyApps`, `mkVarApps`, `App`, `mkCoApps`
-- Left associative, so that we can say (f `mkTyApps` xs `mkVarApps` ys)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{The main data types}
-%* *
-%************************************************************************
+* *
+************************************************************************
These data types are the heart of the compiler
+-}
-\begin{code}
-- | This is the data type that represents GHCs core intermediate language. Currently
-- GHC uses System FC <http://research.microsoft.com/~simonpj/papers/ext-f/> for this purpose,
-- which is closely related to the simpler and better known System F <http://en.wikipedia.org/wiki/System_F>.
@@ -287,8 +286,8 @@ data AltCon
data Bind b = NonRec b (Expr b)
| Rec [(b, (Expr b))]
deriving (Data, Typeable)
-\end{code}
+{-
Note [Shadowing]
~~~~~~~~~~~~~~~~
While various passes attempt to rename on-the-fly in a manner that
@@ -422,13 +421,13 @@ if for no other reason that we don't need to instantiate the (~) at an
unboxed type.
-%************************************************************************
-%* *
+************************************************************************
+* *
Ticks
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Allows attaching extra information to points in expressions
-- If you edit this type, you may need to update the GHC formalism
@@ -513,19 +512,18 @@ tickishCanSplit :: Tickish Id -> Bool
tickishCanSplit Breakpoint{} = False
tickishCanSplit HpcTick{} = False
tickishCanSplit _ = True
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Transformation rules}
-%* *
-%************************************************************************
+* *
+************************************************************************
The CoreRule type and its friends are dealt with mainly in CoreRules,
but CoreFVs, Subst, PprCore, CoreTidy also inspect the representation.
+-}
-\begin{code}
-- | A 'CoreRule' is:
--
-- * \"Local\" if the function it is a rule for is defined in the
@@ -620,36 +618,34 @@ isLocalRule = ru_local
-- | Set the 'Name' of the 'Id.Id' at the head of the rule left hand side
setRuleIdName :: Name -> CoreRule -> CoreRule
setRuleIdName nm ru = ru { ru_fn = nm }
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Vectorisation declarations}
-%* *
-%************************************************************************
+* *
+************************************************************************
Representation of desugared vectorisation declarations that are fed to the vectoriser (via
'ModGuts').
+-}
-\begin{code}
data CoreVect = Vect Id CoreExpr
| NoVect Id
| VectType Bool TyCon (Maybe TyCon)
| VectClass TyCon -- class tycon
| VectInst Id -- instance dfun (always SCALAR) !!!FIXME: should be superfluous now
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Unfoldings
-%* *
-%************************************************************************
+* *
+************************************************************************
The @Unfolding@ type is declared here to avoid numerous loops
+-}
-\begin{code}
-- | Records the /unfolding/ of an identifier, which is approximately the form the
-- identifier would have if we substituted its definition in for the identifier.
-- This type should be treated as abstract everywhere except in "CoreUnfold"
@@ -770,8 +766,8 @@ data UnfoldingGuidance
-- (where there are the right number of arguments.)
| UnfNever -- The RHS is big, so don't inline it
-\end{code}
+{-
Note [Historical note: unfoldings for wrappers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We used to have a nice clever scheme in interface files for
@@ -818,8 +814,8 @@ why we record the number of expected arguments in the DFunUnfolding.
Note that although it's an Arity, it's most convenient for it to give
the *total* number of arguments, both type and value. See the use
site in exprIsConApp_maybe.
+-}
-\begin{code}
-- Constants for the UnfWhen constructor
needSaturated, unSaturatedOk :: Bool
needSaturated = False
@@ -853,9 +849,7 @@ seqUnfolding _ = ()
seqGuidance :: UnfoldingGuidance -> ()
seqGuidance (UnfIfGoodArgs ns n b) = n `seq` sum ns `seq` b `seq` ()
seqGuidance _ = ()
-\end{code}
-\begin{code}
isStableSource :: UnfoldingSource -> Bool
-- Keep the unfolding template
isStableSource InlineCompulsory = True
@@ -963,8 +957,8 @@ neverUnfoldGuidance _ = False
canUnfold :: Unfolding -> Bool
canUnfold (CoreUnfolding { uf_guidance = g }) = not (neverUnfoldGuidance g)
canUnfold _ = False
-\end{code}
+{-
Note [InlineRules]
~~~~~~~~~~~~~~~~~
When you say
@@ -1008,13 +1002,13 @@ the occurrence info is wrong
without a loop breaker marked
-%************************************************************************
-%* *
+************************************************************************
+* *
AltCon
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- The Ord is needed for the FiniteMap used in the lookForConstructor
-- in SimplEnv. If you declared that lookForConstructor *ignores*
-- constructor-applications with LitArg args, then you could get
@@ -1044,13 +1038,13 @@ cmpAltCon (LitAlt _) DEFAULT = GT
cmpAltCon con1 con2 = WARN( True, text "Comparing incomparable AltCons" <+>
ppr con1 <+> ppr con2 )
LT
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Useful synonyms}
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [CoreProgram]
~~~~~~~~~~~~~~~~~~
@@ -1071,8 +1065,7 @@ a list of CoreBind
bindings where possible. So the program typically starts life as a
single giant Rec, which is then dependency-analysed into smaller
chunks.
-
-\begin{code}
+-}
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.lhs
@@ -1089,15 +1082,15 @@ type CoreArg = Arg CoreBndr
type CoreBind = Bind CoreBndr
-- | Case alternatives where binders are 'CoreBndr's
type CoreAlt = Alt CoreBndr
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Tagging}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Binders are /tagged/ with a t
data TaggedBndr t = TB CoreBndr t -- TB for "tagged binder"
@@ -1132,16 +1125,15 @@ deTagBind (Rec prs) = Rec [(b, deTagExpr rhs) | (TB b _, rhs) <- prs
deTagAlt :: TaggedAlt t -> CoreAlt
deTagAlt (con, bndrs, rhs) = (con, [b | TB b _ <- bndrs], deTagExpr rhs)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Core-constructing functions with checking}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Apply a list of argument expressions to a function expression in a nested fashion. Prefer to
-- use 'MkCore.mkCoreApps' if possible
mkApps :: Expr b -> [Arg b] -> Expr b
@@ -1253,16 +1245,15 @@ varToCoreExpr v | isTyVar v = Type (mkTyVarTy v)
varsToCoreExprs :: [CoreBndr] -> [Expr b]
varsToCoreExprs vs = map varToCoreExpr vs
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Simple access functions}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Extract every variable by this group
bindersOf :: Bind b -> [b]
-- If you edit this function, you may need to update the GHC formalism
@@ -1287,9 +1278,7 @@ flattenBinds :: [Bind b] -> [(b, Expr b)]
flattenBinds (NonRec b r : binds) = (b,r) : flattenBinds binds
flattenBinds (Rec prs1 : binds) = prs1 ++ flattenBinds binds
flattenBinds [] = []
-\end{code}
-\begin{code}
-- | We often want to strip off leading lambdas before getting down to
-- business. This function is your friend.
collectBinders :: Expr b -> ([b], Expr b)
@@ -1325,9 +1314,7 @@ collectValBinders expr
where
go ids (Lam b e) | isId b = go (b:ids) e
go ids body = (reverse ids, body)
-\end{code}
-\begin{code}
-- | Takes a nested application expression and returns the the function
-- being applied and the arguments to which it is applied
collectArgs :: Expr b -> (Expr b, [Arg b])
@@ -1336,20 +1323,20 @@ collectArgs expr
where
go (App f a) as = go f (a:as)
go e as = (e, as)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Predicates}
-%* *
-%************************************************************************
+* *
+************************************************************************
At one time we optionally carried type arguments through to runtime.
@isRuntimeVar v@ returns if (Lam v _) really becomes a lambda at runtime,
i.e. if type applications are actual lambdas because types are kept around
at runtime. Similarly isRuntimeArg.
+-}
-\begin{code}
-- | Will this variable exist at runtime?
isRuntimeVar :: Var -> Bool
isRuntimeVar = isId
@@ -1384,16 +1371,15 @@ valBndrCount = count isId
-- | The number of argument expressions that are values rather than types at their top level
valArgCount :: [Arg b] -> Int
valArgCount = count isValArg
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Seq stuff}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
seqExpr :: CoreExpr -> ()
seqExpr (Var v) = v `seq` ()
seqExpr (Lit lit) = lit `seq` ()
@@ -1439,15 +1425,15 @@ seqRules [] = ()
seqRules (Rule { ru_bndrs = bndrs, ru_args = args, ru_rhs = rhs } : rules)
= seqBndrs bndrs `seq` seqExprs (rhs:args) `seq` seqRules rules
seqRules (BuiltinRule {} : rules) = seqRules rules
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Annotated core}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Annotated core: allows annotation at every node in the tree
type AnnExpr bndr annot = (annot, AnnExpr' bndr annot)
@@ -1472,9 +1458,7 @@ type AnnAlt bndr annot = (AltCon, [bndr], AnnExpr bndr annot)
data AnnBind bndr annot
= AnnNonRec bndr (AnnExpr bndr annot)
| AnnRec [(bndr, AnnExpr bndr annot)]
-\end{code}
-\begin{code}
-- | Takes a nested application expression and returns the the function
-- being applied and the arguments to which it is applied
collectAnnArgs :: AnnExpr b a -> (AnnExpr b a, [AnnExpr b a])
@@ -1483,9 +1467,7 @@ collectAnnArgs expr
where
go (_, AnnApp f a) as = go f (a:as)
go e as = (e, as)
-\end{code}
-\begin{code}
deAnnotate :: AnnExpr bndr annot -> Expr bndr
deAnnotate (_, e) = deAnnotate' e
@@ -1510,9 +1492,7 @@ deAnnotate' (AnnCase scrut v t alts)
deAnnAlt :: AnnAlt bndr annot -> Alt bndr
deAnnAlt (con,args,rhs) = (con,args,deAnnotate rhs)
-\end{code}
-\begin{code}
-- | As 'collectBinders' but for 'AnnExpr' rather than 'Expr'
collectAnnBndrs :: AnnExpr bndr annot -> ([bndr], AnnExpr bndr annot)
collectAnnBndrs e
@@ -1520,4 +1500,3 @@ collectAnnBndrs e
where
collect bs (_, AnnLam b body) = collect (b:bs) body
collect bs body = (reverse bs, body)
-\end{code}
diff --git a/compiler/coreSyn/CoreTidy.lhs b/compiler/coreSyn/CoreTidy.hs
index 810a71ca6c..7f09c68ca2 100644
--- a/compiler/coreSyn/CoreTidy.lhs
+++ b/compiler/coreSyn/CoreTidy.hs
@@ -1,12 +1,12 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The AQUA Project, Glasgow University, 1996-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The AQUA Project, Glasgow University, 1996-1998
+
This module contains "tidying" code for *nested* expressions, bindings, rules.
The code for *top-level* bindings is in TidyPgm.
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module CoreTidy (
tidyExpr, tidyVarOcc, tidyRule, tidyRules, tidyUnfolding
@@ -27,16 +27,15 @@ import Name hiding (tidyNameOcc)
import SrcLoc
import Maybes
import Data.List
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Tidying expressions, rules}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
tidyBind :: TidyEnv
-> CoreBind
-> (TidyEnv, CoreBind)
@@ -105,16 +104,15 @@ tidyRule env rule@(Rule { ru_bndrs = bndrs, ru_args = args, ru_rhs = rhs,
ru_rhs = tidyExpr env' rhs,
ru_fn = tidyNameOcc env fn,
ru_rough = map (fmap (tidyNameOcc env')) mb_ns }
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Tidying non-top-level binders}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
tidyNameOcc :: TidyEnv -> Name -> Name
-- In rules and instances, we have Names, and we must tidy them too
-- Fortunately, we can lookup in the VarEnv with a name
@@ -223,8 +221,8 @@ tidyUnfolding tidy_env
| otherwise
= unf_from_rhs
tidyUnfolding _ unf _ = unf -- NoUnfolding or OtherCon
-\end{code}
+{-
Note [Tidy IdInfo]
~~~~~~~~~~~~~~~~~~
All nested Ids now have the same IdInfo, namely vanillaIdInfo, which
@@ -268,9 +266,7 @@ optimisation pipeline, leaving only the OneShotInfo on the lambda. Hence we
must preserve this info in inlinings.
This applies to lambda binders only, hence it is stored in IfaceLamBndr.
+-}
-
-\begin{code}
(=:) :: a -> (a -> b) -> b
m =: k = m `seq` k m
-\end{code}
diff --git a/compiler/coreSyn/CoreUnfold.lhs b/compiler/coreSyn/CoreUnfold.hs
index fd485ae2b7..dc9f95e73a 100644
--- a/compiler/coreSyn/CoreUnfold.lhs
+++ b/compiler/coreSyn/CoreUnfold.hs
@@ -1,7 +1,7 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The AQUA Project, Glasgow University, 1994-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The AQUA Project, Glasgow University, 1994-1998
+
Core-syntax unfoldings
@@ -13,8 +13,8 @@ unfoldings, capturing ``higher-level'' things we know about a binding,
usually things that the simplifier found out (e.g., ``it's a
literal''). In the corner of a @CoreUnfolding@ unfolding, you will
find, unsurprisingly, a Core expression.
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module CoreUnfold (
@@ -66,16 +66,15 @@ import ForeignCall
import qualified Data.ByteString as BS
import Data.Maybe
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Making unfoldings}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
mkTopUnfolding :: DynFlags -> Bool -> CoreExpr -> Unfolding
mkTopUnfolding dflags = mkUnfolding dflags InlineRhs True {- Top level -}
@@ -184,8 +183,8 @@ specUnfolding _ _ _ _ _ = noUnfolding
spec_doc :: SDoc
spec_doc = ptext (sLit "specUnfolding")
-\end{code}
+{-
Note [Specialising unfoldings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we specialise a function for some given type-class arguments, we use
@@ -214,9 +213,8 @@ specUnfolding to specialise its unfolding. Some important points:
we keep it (so the specialised thing too will always inline)
if a stable unfolding has UnfoldingGuidance of UnfIfGoodArgs
(which arises from INLINEABLE), we discard it
+-}
-
-\begin{code}
mkCoreUnfolding :: UnfoldingSource -> Bool -> CoreExpr
-> UnfoldingGuidance -> Unfolding
-- Occurrence-analyses the expression before capturing it
@@ -253,8 +251,8 @@ mkUnfolding dflags src top_lvl is_bottoming expr
guidance = calcUnfoldingGuidance dflags expr
-- NB: *not* (calcUnfoldingGuidance (occurAnalyseExpr expr))!
-- See Note [Calculate unfolding guidance on the non-occ-anal'd expression]
-\end{code}
+{-
Note [Occurrence analysis of unfoldings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We do occurrence-analysis of unfoldings once and for all, when the
@@ -297,13 +295,13 @@ it gets fixed up next round. And it should be rare, because large
let-bound things that are dead are usually caught by preInlineUnconditionally
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{The UnfoldingGuidance type}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
inlineBoringOk :: CoreExpr -> Bool
-- See Note [INLINE for small functions]
-- True => the result of inlining the expression is
@@ -361,8 +359,8 @@ calcUnfoldingGuidance dflags expr
plus_disc | isFunTy (idType bndr) = max
| otherwise = (+)
-- See Note [Function and non-function discounts]
-\end{code}
+{-
Note [Computing the size of an expression]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The basic idea of sizeExpr is obvious enough: count nodes. But getting the
@@ -457,8 +455,8 @@ Things to note:
NB: you might think that PostInlineUnconditionally would do this
but it doesn't fire for top-level things; see SimplUtils
Note [Top level and postInlineUnconditionally]
+-}
-\begin{code}
uncondInline :: CoreExpr -> Arity -> Int -> Bool
-- Inline unconditionally if there no size increase
-- Size of call is arity (+1 for the function)
@@ -466,10 +464,7 @@ uncondInline :: CoreExpr -> Arity -> Int -> Bool
uncondInline rhs arity size
| arity > 0 = size <= 10 * (arity + 1) -- See Note [INLINE for small functions] (1)
| otherwise = exprIsTrivial rhs -- See Note [INLINE for small functions] (4)
-\end{code}
-
-\begin{code}
sizeExpr :: DynFlags
-> FastInt -- Bomb out if it gets bigger than this
-> [Id] -- Arguments; we're interested in which of these
@@ -630,10 +625,7 @@ sizeExpr dflags bOMB_OUT_SIZE top_args expr
-- an expression of type State# RealWorld must be a variable
isRealWorldExpr (Var id) = isRealWorldId id
isRealWorldExpr _ = False
-\end{code}
-
-\begin{code}
-- | Finds a nominal size of a string literal.
litSize :: Literal -> Int
-- Used by CoreUnfold.sizeExpr
@@ -699,8 +691,8 @@ conSize dc n_val_args
-- See Note [Constructor size and result discount]
| otherwise = SizeIs (_ILIT(10)) emptyBag (iUnbox (10 * (1 + n_val_args)))
-\end{code}
+{-
Note [Constructor size and result discount]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Treat a constructors application as size 10, regardless of how many
@@ -771,8 +763,8 @@ There's no point in doing so -- any optimisations will see the S#
through n's unfolding. Nor will a big size inhibit unfoldings functions
that mention a literal Integer, because the float-out pass will float
all those constants to top level.
+-}
-\begin{code}
primOpSize :: PrimOp -> Int -> ExprSize
primOpSize op n_val_args
= if primOpOutOfLine op
@@ -800,8 +792,8 @@ augmentSize = SizeIs (_ILIT(0)) emptyBag (_ILIT(40))
lamScrutDiscount :: DynFlags -> ExprSize -> ExprSize
lamScrutDiscount dflags (SizeIs n vs _) = SizeIs n vs (iUnbox (ufFunAppDiscount dflags))
lamScrutDiscount _ TooBig = TooBig
-\end{code}
+{-
Note [addAltSize result discounts]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When adding the size of alternatives, we *add* the result discounts
@@ -854,8 +846,8 @@ In a function application (f a b)
get a saturated application)
Code for manipulating sizes
+-}
-\begin{code}
data ExprSize = TooBig
| SizeIs FastInt -- Size found
!(Bag (Id,Int)) -- Arguments cased herein, and discount for each such
@@ -886,21 +878,20 @@ sizeN :: Int -> ExprSize
sizeZero = SizeIs (_ILIT(0)) emptyBag (_ILIT(0))
sizeN n = SizeIs (iUnbox n) emptyBag (_ILIT(0))
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[considerUnfolding]{Given all the info, do (not) do the unfolding}
-%* *
-%************************************************************************
+* *
+************************************************************************
We use 'couldBeSmallEnoughToInline' to avoid exporting inlinings that
we ``couldn't possibly use'' on the other side. Can be overridden w/
flaggery. Just the same as smallEnoughToInline, except that it has no
actual arguments.
+-}
-\begin{code}
couldBeSmallEnoughToInline :: DynFlags -> Int -> CoreExpr -> Bool
couldBeSmallEnoughToInline dflags threshold rhs
= case sizeExpr dflags (iUnbox threshold) [] body of
@@ -947,8 +938,8 @@ certainlyWillInline _ unf@(DFunUnfolding {})
certainlyWillInline _ _
= Nothing
-\end{code}
+{-
Note [certainlyWillInline: be careful of thunks]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Don't claim that thunks will certainly inline, because that risks work
@@ -959,11 +950,11 @@ found that the WorkWrap phase thought that
was certainlyWillInline, so the addition got duplicated.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{callSiteInline}
-%* *
-%************************************************************************
+* *
+************************************************************************
This is the key function. It decides whether to inline a variable at a call site
@@ -980,8 +971,8 @@ NOTE: we don't want to inline top-level functions that always diverge.
It just makes the code bigger. Tt turns out that the convenient way to prevent
them inlining is to give them a NOINLINE pragma, which we do in
StrictAnal.addStrictnessInfoToTopId
+-}
-\begin{code}
callSiteInline :: DynFlags
-> Id -- The Id
-> Bool -- True <=> unfolding is active
@@ -1117,8 +1108,8 @@ tryUnfolding dflags id lone_variable
RhsCtxt -> uf_arity > 0 --
_ -> not is_top && uf_arity > 0 -- Note [Nested functions]
-- Note [Inlining in ArgCtxt]
-\end{code}
+{-
Note [Unfold into lazy contexts], Note [RHS of lets]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When the call is the argument of a function with a RULE, or the RHS of a let,
@@ -1310,8 +1301,8 @@ This kind of thing can occur if you have
foo = let x = e in (x,x)
which Roman did.
+-}
-\begin{code}
computeDiscount :: DynFlags -> [Int] -> Int -> [ArgSummary] -> CallCtxt
-> Int
computeDiscount dflags arg_discounts res_discount arg_infos cont_info
@@ -1361,13 +1352,13 @@ computeDiscount dflags arg_discounts res_discount arg_infos cont_info
-- Otherwise we, rather arbitrarily, threshold it. Yuk.
-- But we want to aovid inlining large functions that return
-- constructors into contexts that are simply "interesting"
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Interesting arguments
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Interesting arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1398,8 +1389,8 @@ where df is con-like. Then we'd really like to inline 'f' so that the
rule for (*) (df d) can fire. To do this
a) we give a discount for being an argument of a class-op (eg (*) d)
b) we say that a con-like argument (eg (df d)) is interesting
+-}
-\begin{code}
data ArgSummary = TrivArg -- Nothing interesting
| NonTrivArg -- Arg has structure
| ValueArg -- Arg is a con-app or PAP
@@ -1439,4 +1430,3 @@ interestingArg e = go e 0
nonTriv :: ArgSummary -> Bool
nonTriv TrivArg = False
nonTriv _ = True
-\end{code}
diff --git a/compiler/coreSyn/CoreUtils.lhs b/compiler/coreSyn/CoreUtils.hs
index 86db946f26..ffb327523c 100644
--- a/compiler/coreSyn/CoreUtils.lhs
+++ b/compiler/coreSyn/CoreUtils.hs
@@ -1,11 +1,11 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
Utility functions on @Core@ syntax
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
-- | Commonly useful utilites for manipulating the Core language
@@ -71,16 +71,15 @@ import Platform
import Util
import Pair
import Data.List
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Find the type of a Core atom/expression}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
exprType :: CoreExpr -> Type
-- ^ Recover the type of a well-typed Core expression. Fails when
-- applied to the actual 'CoreSyn.Type' expression as it cannot
@@ -88,7 +87,7 @@ exprType :: CoreExpr -> Type
exprType (Var var) = idType var
exprType (Lit lit) = literalType lit
exprType (Coercion co) = coercionType co
-exprType (Let bind body)
+exprType (Let bind body)
| NonRec tv rhs <- bind -- See Note [Type bindings]
, Type ty <- rhs = substTyWith [tv] [ty] (exprType body)
| otherwise = exprType body
@@ -116,15 +115,15 @@ coreAltsType :: [CoreAlt] -> Type
-- ^ Returns the type of the first alternative, which should be the same as for all alternatives
coreAltsType (alt:_) = coreAltType alt
coreAltsType [] = panic "corAltsType"
-\end{code}
+{-
Note [Type bindings]
~~~~~~~~~~~~~~~~~~~~
Core does allow type bindings, although such bindings are
not much used, except in the output of the desuguarer.
Example:
let a = Int in (\x:a. x)
-Given this, exprType must be careful to substitute 'a' in the
+Given this, exprType must be careful to substitute 'a' in the
result type (Trac #8522).
Note [Existential variables and silly type synonyms]
@@ -150,8 +149,8 @@ Various possibilities suggest themselves:
- Expand synonyms on the fly, when the problem arises. That is what
we are doing here. It's not too expensive, I think.
+-}
-\begin{code}
applyTypeToArg :: Type -> CoreExpr -> Type
-- ^ Determines the type resulting from applying an expression with given type
-- to a given argument expression
@@ -180,15 +179,15 @@ applyTypeToArgs e op_ty args
panic_msg = vcat [ ptext (sLit "Expression:") <+> pprCoreExpr e
, ptext (sLit "Type:") <+> ppr op_ty
, ptext (sLit "Args:") <+> ppr args ]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Attaching notes}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Wrap the given expression in the coercion safely, dropping
-- identity coercions and coalescing nested coercions
mkCast :: CoreExpr -> Coercion -> CoreExpr
@@ -196,7 +195,7 @@ mkCast e co | ASSERT2( coercionRole co == Representational
, ptext (sLit "coercion") <+> ppr co <+> ptext (sLit "passed to mkCast") <+> ppr e <+> ptext (sLit "has wrong role") <+> ppr (coercionRole co) )
isReflCo co = e
-mkCast (Coercion e_co) co
+mkCast (Coercion e_co) co
| isCoVarType (pSnd (coercionKind co))
-- The guard here checks that g has a (~#) on both sides,
-- otherwise decomposeCo fails. Can in principle happen
@@ -219,9 +218,7 @@ mkCast expr co
-- else
WARN(not (from_ty `eqType` exprType expr), text "Trying to coerce" <+> text "(" <> ppr expr $$ text "::" <+> ppr (exprType expr) <> text ")" $$ ppr co $$ ppr (coercionType co))
(Cast expr co)
-\end{code}
-\begin{code}
-- | Wraps the given expression in the source annotation, dropping the
-- annotation if possible.
mkTick :: Tickish Id -> CoreExpr -> CoreExpr
@@ -288,15 +285,15 @@ tickHNFArgs t e = push t e
push t (App f (Type u)) = App (push t f) (Type u)
push t (App f arg) = App (push t f) (mkTick t arg)
push _t e = e
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Other expression construction}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
bindNonRec :: Id -> CoreExpr -> CoreExpr -> CoreExpr
-- ^ @bindNonRec x r b@ produces either:
--
@@ -323,9 +320,7 @@ needsCaseBinding ty rhs = isUnLiftedType ty && not (exprOkForSpeculation rhs)
-- Make a case expression instead of a let
-- These can arise either from the desugarer,
-- or from beta reductions: (\x.e) (x +# y)
-\end{code}
-\begin{code}
mkAltExpr :: AltCon -- ^ Case alternative constructor
-> [CoreBndr] -- ^ Things bound by the pattern match
-> [Type] -- ^ The type arguments to the case alternative
@@ -338,19 +333,18 @@ mkAltExpr (LitAlt lit) [] []
= Lit lit
mkAltExpr (LitAlt _) _ _ = panic "mkAltExpr LitAlt"
mkAltExpr DEFAULT _ _ = panic "mkAltExpr DEFAULT"
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Taking expressions apart}
-%* *
-%************************************************************************
+* *
+************************************************************************
The default alternative must be first, if it exists at all.
This makes it easy to find, though it makes matching marginally harder.
+-}
-\begin{code}
-- | Extract the default case alternative
findDefault :: [(AltCon, [a], b)] -> ([(AltCon, [a], b)], Maybe b)
findDefault ((DEFAULT,args,rhs) : alts) = ASSERT( null args ) (alts, Just rhs)
@@ -404,16 +398,14 @@ trimConArgs :: AltCon -> [CoreArg] -> [CoreArg]
trimConArgs DEFAULT args = ASSERT( null args ) []
trimConArgs (LitAlt _) args = ASSERT( null args ) []
trimConArgs (DataAlt dc) args = dropList (dataConUnivTyVars dc) args
-\end{code}
-\begin{code}
filterAlts :: [Unique] -- ^ Supply of uniques used in case we have to manufacture a new AltCon
-> Type -- ^ Type of scrutinee (used to prune possibilities)
-> [AltCon] -- ^ 'imposs_cons': constructors known to be impossible due to the form of the scrutinee
-> [(AltCon, [Var], a)] -- ^ Alternatives
-> ([AltCon], Bool, [(AltCon, [Var], a)])
-- Returns:
- -- 1. Constructors that will never be encountered by the
+ -- 1. Constructors that will never be encountered by the
-- *default* case (if any). A superset of imposs_cons
-- 2. Whether we managed to refine the default alternative into a specific constructor (for statistics only)
-- 3. The new alternatives, trimmed by
@@ -424,13 +416,13 @@ filterAlts :: [Unique] -- ^ Supply of uniques used in case we have t
--
-- NB: the final list of alternatives may be empty:
-- This is a tricky corner case. If the data type has no constructors,
- -- which GHC allows, or if the imposs_cons covers all constructors (after taking
+ -- which GHC allows, or if the imposs_cons covers all constructors (after taking
-- account of GADTs), then no alternatives can match.
--
-- If callers need to preserve the invariant that there is always at least one branch
-- in a "case" statement then they will need to manually add a dummy case branch that just
-- calls "error" or similar.
-filterAlts us ty imposs_cons alts
+filterAlts us ty imposs_cons alts
| Just (tycon, inst_tys) <- splitTyConApp_maybe ty
= filter_alts tycon inst_tys
| otherwise
@@ -439,31 +431,31 @@ filterAlts us ty imposs_cons alts
(alts_wo_default, maybe_deflt) = findDefault alts
alt_cons = [con | (con,_,_) <- alts_wo_default]
- filter_alts tycon inst_tys
+ filter_alts tycon inst_tys
= (imposs_deflt_cons, refined_deflt, merged_alts)
where
trimmed_alts = filterOut (impossible_alt inst_tys) alts_wo_default
imposs_deflt_cons = nub (imposs_cons ++ alt_cons)
- -- "imposs_deflt_cons" are handled
- -- EITHER by the context,
+ -- "imposs_deflt_cons" are handled
+ -- EITHER by the context,
-- OR by a non-DEFAULT branch in this case expression.
merged_alts = mergeAlts trimmed_alts (maybeToList maybe_deflt')
- -- We need the mergeAlts in case the new default_alt
+ -- We need the mergeAlts in case the new default_alt
-- has turned into a constructor alternative.
-- The merge keeps the inner DEFAULT at the front, if there is one
-- and interleaves the alternatives in the right order
(refined_deflt, maybe_deflt') = case maybe_deflt of
Nothing -> (False, Nothing)
- Just deflt_rhs
- | isAlgTyCon tycon -- It's a data type, tuple, or unboxed tuples.
+ Just deflt_rhs
+ | isAlgTyCon tycon -- It's a data type, tuple, or unboxed tuples.
, not (isNewTyCon tycon) -- We can have a newtype, if we are just doing an eval:
-- case x of { DEFAULT -> e }
-- and we don't want to fill in a default for them!
, Just all_cons <- tyConDataCons_maybe tycon
- , let imposs_data_cons = [con | DataAlt con <- imposs_deflt_cons] -- We now know it's a data type
+ , let imposs_data_cons = [con | DataAlt con <- imposs_deflt_cons] -- We now know it's a data type
impossible con = con `elem` imposs_data_cons || dataConCannotMatch inst_tys con
-> case filterOut impossible all_cons of
-- Eliminate the default alternative
@@ -489,8 +481,8 @@ filterAlts us ty imposs_cons alts
impossible_alt _ (con, _, _) | con `elem` imposs_cons = True
impossible_alt inst_tys (DataAlt con, _, _) = dataConCannotMatch inst_tys con
impossible_alt _ _ = False
-\end{code}
+{-
Note [Unreachable code]
~~~~~~~~~~~~~~~~~~~~~~~
It is possible (although unusual) for GHC to find a case expression
@@ -521,11 +513,11 @@ Similar things can happen (augmented by GADTs) when the Simplifier
filters down the matching alternatives in Simplify.rebuildCase.
-%************************************************************************
-%* *
+************************************************************************
+* *
exprIsTrivial
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [exprIsTrivial]
~~~~~~~~~~~~~~~~~~~~
@@ -552,8 +544,8 @@ Note [Tick trivial]
Ticks are not trivial. If we treat "tick<n> x" as trivial, it will be
inlined inside lambdas and the entry count will be skewed, for
example. Furthermore "scc<n> x" will turn into just "x" in mkTick.
+-}
-\begin{code}
exprIsTrivial :: CoreExpr -> Bool
exprIsTrivial (Var _) = True -- See Note [Variables are trivial]
exprIsTrivial (Type _) = True
@@ -564,14 +556,14 @@ exprIsTrivial (Tick _ _) = False -- See Note [Tick trivial]
exprIsTrivial (Cast e _) = exprIsTrivial e
exprIsTrivial (Lam b body) = not (isRuntimeVar b) && exprIsTrivial body
exprIsTrivial _ = False
-\end{code}
+{-
When substituting in a breakpoint we need to strip away the type cruft
from a trivial expression and get back to the Id. The invariant is
that the expression we're substituting was originally trivial
according to exprIsTrivial.
+-}
-\begin{code}
getIdFromTrivialExpr :: CoreExpr -> Id
getIdFromTrivialExpr e = go e
where go (Var v) = v
@@ -579,14 +571,14 @@ getIdFromTrivialExpr e = go e
go (Cast e _) = go e
go (Lam b e) | not (isRuntimeVar b) = go e
go e = pprPanic "getIdFromTrivialExpr" (ppr e)
-\end{code}
+{-
exprIsBottom is a very cheap and cheerful function; it may return
False for bottoming expressions, but it never costs much to ask. See
also CoreArity.exprBotStrictness_maybe, but that's a bit more
expensive.
+-}
-\begin{code}
exprIsBottom :: CoreExpr -> Bool
exprIsBottom e
= go 0 e
@@ -598,14 +590,13 @@ exprIsBottom e
go n (Cast e _) = go n e
go n (Let _ e) = go n e
go _ _ = False
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
exprIsDupable
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [exprIsDupable]
~~~~~~~~~~~~~~~~~~~~
@@ -618,9 +609,8 @@ Note [exprIsDupable]
Its only purpose is to avoid fruitless let-binding
and then inlining of case join points
+-}
-
-\begin{code}
exprIsDupable :: DynFlags -> CoreExpr -> Bool
exprIsDupable dflags e
= isJust (go dupAppSize e)
@@ -644,13 +634,13 @@ dupAppSize = 8 -- Size of term we are prepared to duplicate
-- This is *just* big enough to make test MethSharing
-- inline enough join points. Really it should be
-- smaller, and could be if we fixed Trac #4960.
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
exprIsCheap, exprIsExpandable
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [exprIsWorkFree]
~~~~~~~~~~~~~~~~~~~~~
@@ -676,11 +666,11 @@ The function 'noFactor' is heap-allocated and then called. Turns out
that 'notDivBy' is strict in its THIRD arg, but that is invisible to
the caller of noFactor, which therefore cannot do w/w and
heap-allocates noFactor's argument. At the moment (May 12) we are just
-going to put up with this, because the previous more aggressive inlining
-(which treated 'noFactor' as work-free) was duplicating primops, which
+going to put up with this, because the previous more aggressive inlining
+(which treated 'noFactor' as work-free) was duplicating primops, which
in turn was making inner loops of array calculations runs slow (#5623)
+-}
-\begin{code}
exprIsWorkFree :: CoreExpr -> Bool
-- See Note [exprIsWorkFree]
exprIsWorkFree e = go 0 e
@@ -689,7 +679,7 @@ exprIsWorkFree e = go 0 e
go _ (Type {}) = True
go _ (Coercion {}) = True
go n (Cast e _) = go n e
- go n (Case scrut _ _ alts) = foldl (&&) (exprIsWorkFree scrut)
+ go n (Case scrut _ _ alts) = foldl (&&) (exprIsWorkFree scrut)
[ go n rhs | (_,_,rhs) <- alts ]
-- See Note [Case expressions are work-free]
go _ (Let {}) = False
@@ -700,8 +690,8 @@ exprIsWorkFree e = go 0 e
| otherwise = go n e
go n (App f e) | isRuntimeArg e = exprIsWorkFree e && go (n+1) f
| otherwise = go n f
-\end{code}
+{-
Note [Case expressions are work-free]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Are case-expressions work-free? Consider
@@ -750,8 +740,8 @@ Note that exprIsHNF does not imply exprIsCheap. Eg
let x = fac 20 in Just x
This responds True to exprIsHNF (you can discard a seq), but
False to exprIsCheap.
+-}
-\begin{code}
exprIsCheap :: CoreExpr -> Bool
exprIsCheap = exprIsCheap' isCheapApp
@@ -793,17 +783,17 @@ exprIsCheap' good_app other_expr -- Applications and variables
go (App f a) val_args | isRuntimeArg a = go f (a:val_args)
| otherwise = go f val_args
- go (Var _) [] = True
+ go (Var _) [] = True
-- Just a type application of a variable
-- (f t1 t2 t3) counts as WHNF
-- This case is probably handeld by the good_app case
-- below, which should have a case for n=0, but putting
-- it here too is belt and braces; and it's such a common
- -- case that checking for null directly seems like a
+ -- case that checking for null directly seems like a
-- good plan
go (Var f) args
- | good_app f (length args)
+ | good_app f (length args)
= go_pap args
| otherwise
@@ -845,16 +835,16 @@ exprIsCheap' good_app other_expr -- Applications and variables
-- there's no guarantee that (sel d x) will be too. Hence (n_val_args == 1)
-------------------------------------
-type CheapAppFun = Id -> Int -> Bool
- -- Is an application of this function to n *value* args
- -- always cheap, assuming the arguments are cheap?
+type CheapAppFun = Id -> Int -> Bool
+ -- Is an application of this function to n *value* args
+ -- always cheap, assuming the arguments are cheap?
-- Mainly true of partial applications, data constructors,
-- and of course true if the number of args is zero
isCheapApp :: CheapAppFun
isCheapApp fn n_val_args
- = isDataConWorkId fn
- || n_val_args == 0
+ = isDataConWorkId fn
+ || n_val_args == 0
|| n_val_args < idArity fn
isExpandableApp :: CheapAppFun
@@ -872,8 +862,8 @@ isExpandableApp fn n_val_args
| Just (arg, ty) <- splitFunTy_maybe ty
, isPredTy arg = go (n_val_args-1) ty
| otherwise = False
-\end{code}
+{-
Note [Expandable overloadings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose the user wrote this
@@ -887,13 +877,13 @@ So we treat the application of a function (negate in this case) to a
it's applied only to dictionaries.
-%************************************************************************
-%* *
+************************************************************************
+* *
exprOkForSpeculation
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-----------------------------
-- | 'exprOkForSpeculation' returns True of an expression that is:
--
@@ -1030,8 +1020,8 @@ isDivOp WordRemOp = True
isDivOp FloatDivOp = True
isDivOp DoubleDivOp = True
isDivOp _ = False
-\end{code}
+{-
Note [exprOkForSpeculation: case expressions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's always sound for exprOkForSpeculation to return False, and we
@@ -1104,13 +1094,13 @@ We say "yes", even though 'x' may not be evaluated. Reasons
before code gen. Until then, it's not guaranteed
-%************************************************************************
-%* *
+************************************************************************
+* *
exprIsHNF, exprIsConLike
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- Note [exprIsHNF] See also Note [exprIsCheap and exprIsHNF]
-- ~~~~~~~~~~~~~~~~
-- | exprIsHNF returns true for expressions that are certainly /already/
@@ -1144,9 +1134,7 @@ We say "yes", even though 'x' may not be evaluated. Reasons
-- unboxed type must be ok-for-speculation (or trivial).
exprIsHNF :: CoreExpr -> Bool -- True => Value-lambda, constructor, PAP
exprIsHNF = exprIsHNFlike isDataConWorkId isEvaldUnfolding
-\end{code}
-\begin{code}
-- | Similar to 'exprIsHNF' but includes CONLIKE functions as well as
-- data constructors. Conlike arguments are considered interesting by the
-- inliner.
@@ -1209,18 +1197,17 @@ regarded as HNF if the expression they surround is HNF, because the
tick is there to tell us that the expression was evaluated, so we
don't want to discard a seq on it.
-}
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Instantiating data constructors
-%* *
-%************************************************************************
+* *
+************************************************************************
These InstPat functions go here to avoid circularity between DataCon and Id
+-}
-\begin{code}
dataConRepInstPat :: [Unique] -> DataCon -> [Type] -> ([TyVar], [Id])
dataConRepFSInstPat :: [FastString] -> [Unique] -> DataCon -> [Type] -> ([TyVar], [Id])
@@ -1297,8 +1284,8 @@ dataConInstPat fss uniqs con inst_tys
info | isMarkedStrict str = vanillaIdInfo `setUnfoldingInfo` evaldUnfolding
| otherwise = vanillaIdInfo
-- See Note [Mark evaluated arguments]
-\end{code}
+{-
Note [Mark evaluated arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When pattern matching on a constructor with strict fields, the binder
@@ -1313,13 +1300,13 @@ case in the RHS of the binding for 'v' is fine. But only if we
c.f. add_evals in Simplify.simplAlt
-%************************************************************************
-%* *
+************************************************************************
+* *
Equality
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A cheap equality test which bales out fast!
-- If it returns @True@ the arguments are definitely equal,
-- otherwise, they may or may not be equal.
@@ -1339,9 +1326,7 @@ cheapEqExpr (Cast e1 t1) (Cast e2 t2)
= e1 `cheapEqExpr` e2 && t1 `coreEqCoercion` t2
cheapEqExpr _ _ = False
-\end{code}
-\begin{code}
exprIsBig :: Expr b -> Bool
-- ^ Returns @True@ of expressions that are too big to be compared by 'cheapEqExpr'
exprIsBig (Lit _) = False
@@ -1352,9 +1337,7 @@ exprIsBig (Lam _ e) = exprIsBig e
exprIsBig (App f a) = exprIsBig f || exprIsBig a
exprIsBig (Cast e _) = exprIsBig e -- Hopefully coercions are not too big!
exprIsBig _ = True
-\end{code}
-\begin{code}
eqExpr :: InScopeSet -> CoreExpr -> CoreExpr -> Bool
-- Compares for equality, modulo alpha
eqExpr in_scope e1 e2
@@ -1402,21 +1385,21 @@ eqExpr in_scope e1 e2
go_tickish env (Breakpoint lid lids) (Breakpoint rid rids)
= lid == rid && map (rnOccL env) lids == map (rnOccR env) rids
go_tickish _ l r = l == r
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{The size of an expression}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data CoreStats = CS { cs_tm :: Int -- Terms
, cs_ty :: Int -- Types
, cs_co :: Int } -- Coercions
-instance Outputable CoreStats where
+instance Outputable CoreStats where
ppr (CS { cs_tm = i1, cs_ty = i2, cs_co = i3 })
= braces (sep [ptext (sLit "terms:") <+> intWithCommas i1 <> comma,
ptext (sLit "types:") <+> intWithCommas i2 <> comma,
@@ -1471,10 +1454,7 @@ tyStats ty = zeroCS { cs_ty = typeSize ty }
coStats :: Coercion -> CoreStats
coStats co = zeroCS { cs_co = coercionSize co }
-\end{code}
-
-\begin{code}
coreBindsSize :: [CoreBind] -> Int
-- We use coreBindStats for user printout
-- but this one is a quick and dirty basis for
@@ -1518,14 +1498,13 @@ pairSize (b,e) = bndrSize b + exprSize e
altSize :: CoreAlt -> Int
altSize (c,bs,e) = c `seq` bndrsSize bs + exprSize e
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Eta reduction
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Eta reduction conditions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1612,8 +1591,8 @@ It's true that we could also hope to eta reduce these:
(\xy. (f x y) |> g)
But the simplifier pushes those casts outwards, so we don't
need to address that here.
+-}
-\begin{code}
tryEtaReduce :: [Var] -> CoreExpr -> Maybe CoreExpr
tryEtaReduce bndrs body
= go (reverse bndrs) body (mkReflCo Representational (exprType body))
@@ -1627,7 +1606,7 @@ tryEtaReduce bndrs body
-- See Note [Eta reduction with casted arguments]
-- for why we have an accumulating coercion
go [] fun co
- | ok_fun fun
+ | ok_fun fun
, let used_vars = exprFreeVars fun `unionVarSet` tyCoVarsOfCo co
, not (any (`elemVarSet` used_vars) bndrs)
= Just (mkCast fun co) -- Check for any of the binders free in the result
@@ -1654,7 +1633,7 @@ tryEtaReduce bndrs body
| isLocalId fun
, isStrongLoopBreaker (idOccInfo fun) = 0
| arity > 0 = arity
- | isEvaldUnfolding (idUnfolding fun) = 1
+ | isEvaldUnfolding (idUnfolding fun) = 1
-- See Note [Eta reduction of an eval'd function]
| otherwise = 0
where
@@ -1681,28 +1660,28 @@ tryEtaReduce bndrs body
-- The simplifier combines multiple casts into one,
-- so we can have a simple-minded pattern match here
ok_arg _ _ _ = Nothing
-\end{code}
+{-
Note [Eta reduction of an eval'd function]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In Haskell is is not true that f = \x. f x
because f might be bottom, and 'seq' can distinguish them.
-But it *is* true that f = f `seq` \x. f x
+But it *is* true that f = f `seq` \x. f x
and we'd like to simplify the latter to the former. This amounts
-to the rule that
+to the rule that
* when there is just *one* value argument,
* f is not bottom
we can eta-reduce \x. f x ===> f
-This turned up in Trac #7542.
+This turned up in Trac #7542.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Determining non-updatable right-hand-sides}
-%* *
-%************************************************************************
+* *
+************************************************************************
Top-level constructor applications can usually be allocated
statically, but they can't if the constructor, or any of the
@@ -1711,8 +1690,8 @@ labels in other DLLs).
If this happens we simply make the RHS into an updatable thunk,
and 'execute' it rather than allocating it statically.
+-}
-\begin{code}
-- | This function is called only on *top-level* right-hand sides.
-- Returns @True@ if the RHS can be allocated statically in the output,
-- with no thunks involved at all.
@@ -1826,4 +1805,3 @@ rhsIsStatic platform is_dynamic_name rhs = is_static False rhs
= case isDataConWorkId_maybe f of
Just dc -> n_val_args == dataConRepArity dc
Nothing -> False
-\end{code}
diff --git a/compiler/coreSyn/MkCore.lhs b/compiler/coreSyn/MkCore.hs
index 81f05338b3..6905641f56 100644
--- a/compiler/coreSyn/MkCore.lhs
+++ b/compiler/coreSyn/MkCore.hs
@@ -1,4 +1,3 @@
-\begin{code}
{-# LANGUAGE CPP #-}
-- | Handy functions for creating much Core syntax
@@ -91,15 +90,15 @@ import Data.Word ( Word )
#endif
infixl 4 `mkCoreApp`, `mkCoreApps`
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Basic CoreSyn construction}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
sortQuantVars :: [Var] -> [Var]
-- Sort the variables (KindVars, TypeVars, and Ids)
-- into order: Kind, then Type, then Id
@@ -219,26 +218,26 @@ castBottomExpr e res_ty
| otherwise = Case e (mkWildValBinder e_ty) res_ty []
where
e_ty = exprType e
-\end{code}
+{-
The functions from this point don't really do anything cleverer than
their counterparts in CoreSyn, but they are here for consistency
+-}
-\begin{code}
-- | Create a lambda where the given expression has a number of variables
-- bound over it. The leftmost binder is that bound by the outermost
-- lambda in the result
mkCoreLams :: [CoreBndr] -> CoreExpr -> CoreExpr
mkCoreLams = mkLams
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Making literals}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Create a 'CoreExpr' which will evaluate to the given @Int@
mkIntExpr :: DynFlags -> Integer -> CoreExpr -- Result = I# i :: Int
mkIntExpr dflags i = mkConApp intDataCon [mkIntLit dflags i]
@@ -295,9 +294,6 @@ mkStringExprFS str
where
chars = unpackFS str
safeChar c = ord c >= 1 && ord c <= 0x7F
-\end{code}
-
-\begin{code}
-- This take a ~# b (or a ~# R b) and returns a ~ b (or Coercible a b)
mkEqBox :: Coercion -> CoreExpr
@@ -310,15 +306,14 @@ mkEqBox co = ASSERT2( typeKind ty2 `eqKind` k, ppr co $$ ppr ty1 $$ ppr ty2 $$ p
Representational -> coercibleDataCon
Phantom -> pprPanic "mkEqBox does not support boxing phantom coercions"
(ppr co)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Tuple constructors}
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
-- $big_tuples
-- #big_tuples#
@@ -361,8 +356,7 @@ chunkify xs
split [] = []
split xs = take mAX_TUPLE_SIZE xs : split (drop mAX_TUPLE_SIZE xs)
-\end{code}
-
+{-
Creating tuples and their types for Core expressions
@mkBigCoreVarTup@ builds a tuple; the inverse to @mkTupleSelector@.
@@ -371,8 +365,7 @@ Creating tuples and their types for Core expressions
* If there are more elements than a big tuple can have, it nests
the tuples.
-
-\begin{code}
+-}
-- | Build a small tuple holding the specified variables
mkCoreVarTup :: [Id] -> CoreExpr
@@ -404,16 +397,15 @@ mkBigCoreTup = mkChunkified mkCoreTup
-- | Build the type of a big tuple that holds the specified type of thing
mkBigCoreTupTy :: [Type] -> Type
mkBigCoreTupTy = mkChunkified mkBoxedTupleTy
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Floats
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data FloatBind
= FloatLet CoreBind
| FloatCase CoreExpr Id AltCon [Var]
@@ -428,15 +420,15 @@ instance Outputable FloatBind where
wrapFloat :: FloatBind -> CoreExpr -> CoreExpr
wrapFloat (FloatLet defns) body = Let defns body
wrapFloat (FloatCase e b con bs) body = Case e b (exprType body) [(con, bs, body)]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Tuple destructors}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Builds a selector which scrutises the given
-- expression and extracts the one name from the list given.
-- If you want the no-shadowing rule to apply, the caller
@@ -475,9 +467,7 @@ mkTupleSelector vars the_var scrut_var scrut
tpl_vs = mkTemplateLocals tpl_tys
[(tpl_v, group)] = [(tpl,gp) | (tpl,gp) <- zipEqual "mkTupleSelector" tpl_vs vars_s,
the_var `elem` gp ]
-\end{code}
-\begin{code}
-- | Like 'mkTupleSelector' but for tuples that are guaranteed
-- never to be \"big\".
--
@@ -495,9 +485,7 @@ mkSmallTupleSelector vars the_var scrut_var scrut
= ASSERT( notNull vars )
Case scrut scrut_var (idType the_var)
[(DataAlt (tupleCon BoxedTuple (length vars)), vars, Var the_var)]
-\end{code}
-\begin{code}
-- | A generalization of 'mkTupleSelector', allowing the body
-- of the case to be an arbitrary expression.
--
@@ -535,9 +523,7 @@ mkTupleCase uniqs vars body scrut_var scrut
(mkBoxedTupleTy (map idType chunk_vars))
body' = mkSmallTupleCase chunk_vars body scrut_var (Var scrut_var)
in (us', scrut_var:vs, body')
-\end{code}
-\begin{code}
-- | As 'mkTupleCase', but for a tuple that is small enough to be guaranteed
-- not to need nesting.
mkSmallTupleCase
@@ -552,18 +538,18 @@ mkSmallTupleCase [var] body _scrut_var scrut
mkSmallTupleCase vars body scrut_var scrut
-- One branch no refinement?
= Case scrut scrut_var (exprType body) [(DataAlt (tupleCon BoxedTuple (length vars)), vars, body)]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Common list manipulation expressions}
-%* *
-%************************************************************************
+* *
+************************************************************************
Call the constructor Ids when building explicit lists, so that they
interact well with rules.
+-}
-\begin{code}
-- | Makes a list @[]@ for lists of the specified type
mkNilExpr :: Type -> CoreExpr
mkNilExpr ty = mkConApp nilDataCon [Type ty]
@@ -613,16 +599,15 @@ mkBuildExpr elt_ty mk_build_inside = do
newTyVars tyvar_tmpls = do
uniqs <- getUniquesM
return (zipWith setTyVarUnique tyvar_tmpls uniqs)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Error expressions
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
mkRuntimeErrorApp
:: Id -- Should be of type (forall a. Addr# -> a)
-- where Addr# points to a UTF8 encoded string
@@ -638,13 +623,13 @@ mkRuntimeErrorApp err_id res_ty err_msg
mkImpossibleExpr :: Type -> CoreExpr
mkImpossibleExpr res_ty
= mkRuntimeErrorApp rUNTIME_ERROR_ID res_ty "Impossible case alternative"
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Error Ids
-%* *
-%************************************************************************
+* *
+************************************************************************
GHC randomly injects these into the code.
@@ -660,8 +645,8 @@ crash).
@parError@ is a special version of @error@ which the compiler does
not know to be a bottoming Id. It is used in the @_par_@ and @_seq_@
templates, but we don't ever expect to generate code for it.
+-}
-\begin{code}
errorIds :: [Id]
errorIds
= [ eRROR_ID, -- This one isn't used anywhere else in the compiler
@@ -719,9 +704,7 @@ mkRuntimeErrorId name = pc_bottoming_Id1 name runtimeErrorTy
runtimeErrorTy :: Type
-- The runtime error Ids take a UTF8-encoded string as argument
runtimeErrorTy = mkSigmaTy [openAlphaTyVar] [] (mkFunTy addrPrimTy openAlphaTy)
-\end{code}
-\begin{code}
errorName :: Name
errorName = mkWiredInIdName gHC_ERR (fsLit "error") errorIdKey eRROR_ID
@@ -739,8 +722,8 @@ uNDEFINED_ID = pc_bottoming_Id0 undefinedName undefinedTy
undefinedTy :: Type -- See Note [Error and friends have an "open-tyvar" forall]
undefinedTy = mkSigmaTy [openAlphaTyVar] [] openAlphaTy
-\end{code}
+{-
Note [Error and friends have an "open-tyvar" forall]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
'error' and 'undefined' have types
@@ -754,13 +737,13 @@ This is OK because it never returns, so the return type is irrelevant.
See Note [OpenTypeKind accepts foralls] in TcUnify.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Utilities}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
pc_bottoming_Id1 :: Name -> Type -> Id
-- Function of arity 1, which diverges after being given one argument
pc_bottoming_Id1 name ty
@@ -789,4 +772,3 @@ pc_bottoming_Id0 name ty
where
bottoming_info = vanillaIdInfo `setStrictnessInfo` strict_sig
strict_sig = mkClosedStrictSig [] botRes
-\end{code}
diff --git a/compiler/coreSyn/PprCore.lhs b/compiler/coreSyn/PprCore.hs
index 593c670cae..acc6c79fa1 100644
--- a/compiler/coreSyn/PprCore.lhs
+++ b/compiler/coreSyn/PprCore.hs
@@ -1,11 +1,11 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The AQUA Project, Glasgow University, 1996-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The AQUA Project, Glasgow University, 1996-1998
+
Printing of Core syntax
+-}
-\begin{code}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module PprCore (
pprCoreExpr, pprParendExpr,
@@ -29,17 +29,17 @@ import BasicTypes
import Util
import Outputable
import FastString
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Public interfaces for Core printing (excluding instances)}
-%* *
-%************************************************************************
+* *
+************************************************************************
@pprParendCoreExpr@ puts parens around non-atomic Core expressions.
+-}
-\begin{code}
pprCoreBindings :: OutputableBndr b => [Bind b] -> SDoc
pprCoreBinding :: OutputableBndr b => Bind b -> SDoc
pprCoreExpr :: OutputableBndr b => Expr b -> SDoc
@@ -53,16 +53,15 @@ instance OutputableBndr b => Outputable (Bind b) where
instance OutputableBndr b => Outputable (Expr b) where
ppr expr = pprCoreExpr expr
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{The guts}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
pprTopBinds :: OutputableBndr a => [Bind a] -> SDoc
pprTopBinds binds = vcat (map pprTopBind binds)
@@ -78,9 +77,7 @@ pprTopBind (Rec (b:bs))
vcat [blankLine $$ ppr_binding b | b <- bs],
ptext (sLit "end Rec }"),
blankLine]
-\end{code}
-\begin{code}
ppr_bind :: OutputableBndr b => Bind b -> SDoc
ppr_bind (NonRec val_bdr expr) = ppr_binding (val_bdr, expr)
@@ -92,17 +89,13 @@ ppr_binding :: OutputableBndr b => (b, Expr b) -> SDoc
ppr_binding (val_bdr, expr)
= pprBndr LetBind val_bdr $$
hang (ppr val_bdr <+> equals) 2 (pprCoreExpr expr)
-\end{code}
-\begin{code}
pprParendExpr expr = ppr_expr parens expr
pprCoreExpr expr = ppr_expr noParens expr
noParens :: SDoc -> SDoc
noParens pp = pp
-\end{code}
-\begin{code}
ppr_expr :: OutputableBndr b => (SDoc -> SDoc) -> Expr b -> SDoc
-- The function adds parens in context that need
-- an atomic value (e.g. function args)
@@ -158,7 +151,7 @@ ppr_expr add_par (Case expr var ty [(con,args,rhs)])
= sdocWithDynFlags $ \dflags ->
if gopt Opt_PprCaseAsLet dflags
then add_par $ -- See Note [Print case as let]
- sep [ sep [ ptext (sLit "let! {")
+ sep [ sep [ ptext (sLit "let! {")
<+> ppr_case_pat con args
<+> ptext (sLit "~")
<+> ppr_bndr var
@@ -252,23 +245,23 @@ pprArg (Type ty)
else ptext (sLit "@") <+> pprParendType ty
pprArg (Coercion co) = ptext (sLit "@~") <+> pprParendCo co
pprArg expr = pprParendExpr expr
-\end{code}
+{-
Note [Print case as let]
~~~~~~~~~~~~~~~~~~~~~~~~
Single-branch case expressions are very common:
- case x of y { I# x' ->
+ case x of y { I# x' ->
case p of q { I# p' -> ... } }
These are, in effect, just strict let's, with pattern matching.
With -dppr-case-as-let we print them as such:
let! { I# x' ~ y <- x } in
let! { I# p' ~ q <- p } in ...
-
+
Other printing bits-and-bobs used with the general @pprCoreBinding@
and @pprCoreExpr@ functions.
+-}
-\begin{code}
instance OutputableBndr Var where
pprBndr = pprCoreBinder
pprInfixOcc = pprInfixName . varName
@@ -351,7 +344,7 @@ pprIdBndrInfo info
has_prag = not (isDefaultInlinePragma prag_info)
has_occ = not (isNoOcc occ_info)
- has_dmd = not $ isTopDmd dmd_info
+ has_dmd = not $ isTopDmd dmd_info
has_lbv = not (hasNoOneShotInfo lbv_info)
doc = showAttributes
@@ -360,14 +353,13 @@ pprIdBndrInfo info
, (has_dmd, ptext (sLit "Dmd=") <> ppr dmd_info)
, (has_lbv , ptext (sLit "OS=") <> ppr lbv_info)
]
-\end{code}
-
+{-
-----------------------------------------------------
-- IdDetails and IdInfo
-----------------------------------------------------
+-}
-\begin{code}
ppIdInfo :: Id -> IdInfo -> SDoc
ppIdInfo id info
= sdocWithDynFlags $ \dflags ->
@@ -412,13 +404,13 @@ showAttributes stuff
| otherwise = brackets (sep (punctuate comma docs))
where
docs = [d | (True,d) <- stuff]
-\end{code}
+{-
-----------------------------------------------------
-- Unfolding and UnfoldingGuidance
-----------------------------------------------------
+-}
-\begin{code}
instance Outputable UnfoldingGuidance where
ppr UnfNever = ptext (sLit "NEVER")
ppr (UnfWhen { ug_arity = arity, ug_unsat_ok = unsat_ok, ug_boring_ok = boring_ok })
@@ -441,7 +433,7 @@ instance Outputable Unfolding where
ppr NoUnfolding = ptext (sLit "No unfolding")
ppr (OtherCon cs) = ptext (sLit "OtherCon") <+> ppr cs
ppr (DFunUnfolding { df_bndrs = bndrs, df_con = con, df_args = args })
- = hang (ptext (sLit "DFun:") <+> ptext (sLit "\\")
+ = hang (ptext (sLit "DFun:") <+> ptext (sLit "\\")
<+> sep (map (pprBndr LambdaBind) bndrs) <+> arrow)
2 (ppr con <+> sep (map ppr args))
ppr (CoreUnfolding { uf_src = src
@@ -463,13 +455,13 @@ instance Outputable Unfolding where
| otherwise = empty
-- Don't print the RHS or we get a quadratic
-- blowup in the size of the printout!
-\end{code}
+{-
-----------------------------------------------------
-- Rules
-----------------------------------------------------
+-}
-\begin{code}
instance Outputable CoreRule where
ppr = pprRule
@@ -489,13 +481,13 @@ pprRule (Rule { ru_name = name, ru_act = act, ru_fn = fn,
nest 2 (ppr fn <+> sep (map pprArg tpl_args)),
nest 2 (ptext (sLit "=") <+> pprCoreExpr rhs)
])
-\end{code}
+{-
-----------------------------------------------------
-- Tickish
-----------------------------------------------------
+-}
-\begin{code}
instance Outputable id => Outputable (Tickish id) where
ppr (HpcTick modl ix) =
hcat [ptext (sLit "tick<"),
@@ -514,13 +506,13 @@ instance Outputable id => Outputable (Tickish id) where
(True,True) -> hcat [ptext (sLit "scctick<"), ppr cc, char '>']
(True,False) -> hcat [ptext (sLit "tick<"), ppr cc, char '>']
_ -> hcat [ptext (sLit "scc<"), ppr cc, char '>']
-\end{code}
+{-
-----------------------------------------------------
-- Vectorisation declarations
-----------------------------------------------------
+-}
-\begin{code}
instance Outputable CoreVect where
ppr (Vect var e) = hang (ptext (sLit "VECTORISE") <+> ppr var <+> char '=')
4 (pprCoreExpr e)
@@ -533,4 +525,3 @@ instance Outputable CoreVect where
char '=' <+> ppr tc
ppr (VectClass tc) = ptext (sLit "VECTORISE class") <+> ppr tc
ppr (VectInst var) = ptext (sLit "VECTORISE SCALAR instance") <+> ppr var
-\end{code}
diff --git a/compiler/coreSyn/TrieMap.lhs b/compiler/coreSyn/TrieMap.hs
index d552506b10..57f360e181 100644
--- a/compiler/coreSyn/TrieMap.lhs
+++ b/compiler/coreSyn/TrieMap.hs
@@ -1,9 +1,8 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+-}
-\begin{code}
{-# LANGUAGE RankNTypes, TypeFamilies #-}
module TrieMap(
CoreMap, emptyCoreMap, extendCoreMap, lookupCoreMap, foldCoreMap,
@@ -34,8 +33,8 @@ import VarEnv
import NameEnv
import Outputable
import Control.Monad( (>=>) )
-\end{code}
+{-
This module implements TrieMaps, which are finite mappings
whose key is a structured value like a CoreExpr or Type.
@@ -43,13 +42,13 @@ The code is very regular and boilerplate-like, but there is
some neat handling of *binders*. In effect they are deBruijn
numbered on the fly.
-%************************************************************************
-%* *
+************************************************************************
+* *
The TrieMap class
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type XT a = Maybe a -> Maybe a -- How to alter a non-existent elt (Nothing)
-- or an existing elt (Just)
@@ -94,15 +93,15 @@ x |> f = f x
deMaybe :: TrieMap m => Maybe (m a) -> m a
deMaybe Nothing = emptyTM
deMaybe (Just m) = m
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
IntMaps
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
instance TrieMap IntMap.IntMap where
type Key IntMap.IntMap = Int
emptyTM = IntMap.empty
@@ -129,19 +128,18 @@ instance TrieMap UniqFM where
alterTM k f m = alterUFM f m k
foldTM k m z = foldUFM k z m
mapTM f m = mapUFM f m
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Lists
-%* *
-%************************************************************************
+* *
+************************************************************************
If m is a map from k -> val
then (MaybeMap m) is a map from (Maybe k) -> val
+-}
-\begin{code}
data MaybeMap m a = MM { mm_nothing :: Maybe a, mm_just :: m a }
instance TrieMap m => TrieMap (MaybeMap m) where
@@ -205,16 +203,15 @@ fdList k m = foldMaybe k (lm_nil m)
foldMaybe :: (a -> b -> b) -> Maybe a -> b -> b
foldMaybe _ Nothing b = b
foldMaybe k (Just a) b = k a b
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Basic maps
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
lkNamed :: NamedThing n => n -> NameEnv a -> Maybe a
lkNamed n env = lookupNameEnv env (getName n)
@@ -232,13 +229,13 @@ lkLit = lookupTM
xtLit :: Literal -> XT a -> LiteralMap a -> LiteralMap a
xtLit = alterTM
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
CoreMap
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Binders]
~~~~~~~~~~~~~~
@@ -268,8 +265,8 @@ is that it's unnecesary, so we have two fields (cm_case and cm_ecase)
for the two possibilities. Only cm_ecase looks at the type.
See also Note [Empty case alternatives] in CoreSyn.
+-}
-\begin{code}
data CoreMap a
= EmptyCM
| CM { cm_var :: VarMap a
@@ -449,15 +446,15 @@ fdA :: (a -> b -> b) -> AltMap a -> b -> b
fdA k m = foldTM k (am_deflt m)
. foldTM (foldTM k) (am_data m)
. foldTM (foldTM k) (am_lit m)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Coercions
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data CoercionMap a
= EmptyKM
| KM { km_refl :: RoleMap (TypeMap a)
@@ -586,10 +583,6 @@ fdC k m = foldTM (foldTM k) (km_refl m)
. foldTM k (km_sub m)
. foldTM (foldTM (foldTM k)) (km_axiom_rule m)
-\end{code}
-
-\begin{code}
-
newtype RoleMap a = RM { unRM :: (IntMap.IntMap a) }
instance TrieMap RoleMap where
@@ -616,16 +609,14 @@ fdR f (RM m) = foldTM f m
mapR :: (a -> b) -> RoleMap a -> RoleMap b
mapR f = RM . mapTM f . unRM
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Types
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data TypeMap a
= EmptyTM
| TM { tm_var :: VarMap a
@@ -764,16 +755,15 @@ xtTyLit l f m =
foldTyLit :: (a -> b -> b) -> TyLitMap a -> b -> b
foldTyLit l m = flip (Map.fold l) (tlm_string m)
. flip (Map.fold l) (tlm_number m)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Variables
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type BoundVar = Int -- Bound variables are deBruijn numbered
type BoundVarMap a = IntMap.IntMap a
@@ -837,4 +827,3 @@ lkFreeVar var env = lookupVarEnv env var
xtFreeVar :: Var -> XT a -> VarEnv a -> VarEnv a
xtFreeVar v f m = alterVarEnv f m v
-\end{code}
diff --git a/compiler/hsSyn/Convert.lhs b/compiler/hsSyn/Convert.hs
index 56282db541..8dc60d6831 100644
--- a/compiler/hsSyn/Convert.lhs
+++ b/compiler/hsSyn/Convert.hs
@@ -1,11 +1,11 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
This module converts Template Haskell syntax into HsSyn
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module Convert( convertToHsExpr, convertToPat, convertToHsDecls,
@@ -1140,7 +1140,7 @@ okOcc ns str
| OccName.isVarNameSpace ns = okVarOcc str
| OccName.isDataConNameSpace ns = okConOcc str
| otherwise = okTcOcc str
-
+
-- Determine the name space of a name in a type
--
isVarName :: TH.Name -> Bool
@@ -1216,8 +1216,8 @@ mk_pkg pkg = stringToPackageKey (TH.pkgString pkg)
mk_uniq :: Int -> Unique
mk_uniq u = mkUniqueGrimily u
-\end{code}
+{-
Note [Binders in Template Haskell]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this TH term construction:
@@ -1270,3 +1270,4 @@ the way System Names are printed.
There's a small complication of course; see Note [Looking up Exact
RdrNames] in RnEnv.
+-}
diff --git a/compiler/hsSyn/HsBinds.lhs b/compiler/hsSyn/HsBinds.hs
index cc68870ce5..555139ac12 100644
--- a/compiler/hsSyn/HsBinds.lhs
+++ b/compiler/hsSyn/HsBinds.hs
@@ -1,12 +1,12 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[HsBinds]{Abstract syntax: top-level bindings and signatures}
Datatype for: @BindGroup@, @Bind@, @Sig@, @Bind@.
+-}
-\begin{code}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
@@ -50,17 +50,17 @@ import Control.Applicative hiding (empty)
#else
import Control.Applicative ((<$>))
#endif
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Bindings: @BindGroup@}
-%* *
-%************************************************************************
+* *
+************************************************************************
Global bindings (where clauses)
+-}
-\begin{code}
-- During renaming, we need bindings where the left-hand sides
-- have been renamed but the the right-hand sides have not.
-- the ...LR datatypes are parametrized by two id types,
@@ -234,8 +234,7 @@ data PatSynBind idL idR
deriving instance (DataId idL, DataId idR )
=> Data (PatSynBind idL idR)
-\end{code}
-
+{-
Note [AbsBinds]
~~~~~~~~~~~~~~~
The AbsBinds constructor is used in the output of the type checker, to record
@@ -333,8 +332,8 @@ Specifically,
* Before renaming, and after typechecking, the field is unused;
it's just an error thunk
+-}
-\begin{code}
instance (OutputableBndr idL, OutputableBndr idR) => Outputable (HsLocalBindsLR idL idR) where
ppr (HsValBinds bs) = ppr bs
ppr (HsIPBinds bs) = ppr bs
@@ -427,8 +426,8 @@ getTypeSigNames (ValBindsOut _ sigs)
= mkNameSet [unLoc n | L _ (TypeSig names _ _) <- sigs, n <- names]
getTypeSigNames _
= panic "HsBinds.getTypeSigNames"
-\end{code}
+{-
What AbsBinds means
~~~~~~~~~~~~~~~~~~~
AbsBinds tvs
@@ -452,8 +451,8 @@ So the desugarer tries to do a better job:
tp = /\ [a,b] -> \ [d1,d2] -> letrec DBINDS and BIND
in (fm,gm)
+-}
-\begin{code}
instance (OutputableBndr idL, OutputableBndr idR) => Outputable (HsBindLR idL idR) where
ppr mbind = ppr_monobind mbind
@@ -507,10 +506,7 @@ instance (OutputableBndr idL, OutputableBndr idR) => Outputable (PatSynBind idL
ImplicitBidirectional -> ppr_simple equals
ExplicitBidirectional mg -> ppr_simple (ptext (sLit "<-")) <+> ptext (sLit "where") $$
(nest 2 $ pprFunBind psyn is_infix mg)
-\end{code}
-
-\begin{code}
pprTicks :: SDoc -> SDoc -> SDoc
-- Print stuff about ticks only when -dppr-debug is on, to avoid
-- them appearing in error messages (from the desugarer); see Trac # 3263
@@ -520,15 +516,15 @@ pprTicks pp_no_debug pp_when_debug
= getPprStyle (\ sty -> if debugStyle sty || dumpStyle sty
then pp_when_debug
else pp_no_debug)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Implicit parameter bindings
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data HsIPBinds id
= IPBinds
[LIPBind id]
@@ -565,21 +561,20 @@ instance (OutputableBndr id) => Outputable (IPBind id) where
where name = case lr of
Left ip -> pprBndr LetBind ip
Right id -> pprBndr LetBind id
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{@Sig@: type signatures and value-modifying user pragmas}
-%* *
-%************************************************************************
+* *
+************************************************************************
It is convenient to lump ``value-modifying'' user-pragmas (e.g.,
``specialise this function to these four types...'') in with type
signatures. Then all the machinery to move them into place, etc.,
serves for both.
+-}
-\begin{code}
type LSig name = Located (Sig name)
-- | Signatures and pragmas
@@ -769,13 +764,13 @@ hsSigDoc (InlineSig _ prag) = ppr (inlinePragmaSpec prag) <+> ptext (sLit "p
hsSigDoc (SpecInstSig {}) = ptext (sLit "SPECIALISE instance pragma")
hsSigDoc (FixSig {}) = ptext (sLit "fixity declaration")
hsSigDoc (MinimalSig {}) = ptext (sLit "MINIMAL pragma")
-\end{code}
+{-
Check if signatures overlap; this is used when checking for duplicate
signatures. Since some of the signatures contain a list of names, testing for
equality is not enough -- we have to check if they overlap.
+-}
-\begin{code}
instance (OutputableBndr name) => Outputable (Sig name) where
ppr sig = ppr_sig sig
@@ -835,15 +830,15 @@ instance Outputable TcSpecPrag where
pprMinimalSig :: OutputableBndr name => BooleanFormula (Located name) -> SDoc
pprMinimalSig bf = ptext (sLit "MINIMAL") <+> ppr (fmap unLoc bf)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[PatSynBind]{A pattern synonym definition}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data HsPatSynDetails a
= InfixPatSyn a a
| PrefixPatSyn [a]
@@ -885,4 +880,3 @@ data HsPatSynDir id
| ExplicitBidirectional (MatchGroup id (LHsExpr id))
deriving (Typeable)
deriving instance (DataId id) => Data (HsPatSynDir id)
-\end{code}
diff --git a/compiler/hsSyn/HsDecls.lhs b/compiler/hsSyn/HsDecls.hs
index f4e5a46bc5..f81d0a1ece 100644
--- a/compiler/hsSyn/HsDecls.lhs
+++ b/compiler/hsSyn/HsDecls.hs
@@ -1,9 +1,8 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable, DeriveFunctor, DeriveFoldable,
DeriveTraversable #-}
@@ -107,15 +106,15 @@ import Data.Foldable ( Foldable )
import Data.Traversable ( Traversable )
#endif
import Data.Maybe
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[HsDecl]{Declarations}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type LHsDecl id = Located (HsDecl id)
-- ^ When in a list this may have
--
@@ -246,9 +245,7 @@ appendGroups
hs_ruleds = rulds1 ++ rulds2,
hs_vects = vects1 ++ vects2,
hs_docs = docs1 ++ docs2 }
-\end{code}
-\begin{code}
instance OutputableBndr name => Outputable (HsDecl name) where
ppr (TyClD dcl) = ppr dcl
ppr (ValD binds) = ppr binds
@@ -315,14 +312,13 @@ deriving instance (DataId id) => Data (SpliceDecl id)
instance OutputableBndr name => Outputable (SpliceDecl name) where
ppr (SpliceDecl (L _ e) _) = pprUntypedSplice e
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[SynDecl]{@data@, @newtype@ or @type@ (synonym) type declaration}
-%* *
-%************************************************************************
+* *
+************************************************************************
--------------------------------
THE NAMING STORY
@@ -455,9 +451,8 @@ Interface file code:
- RnHsSyn.instDeclFVs treats the dict fun name as free in the decl, so that we
suck in the dfun binding
+-}
-
-\begin{code}
type LTyClDecl name = Located (TyClDecl name)
-- | A type or class declaration.
@@ -555,12 +550,11 @@ data FamilyInfo name
deriving( Typeable )
deriving instance (DataId name) => Data (FamilyInfo name)
-\end{code}
-
+{-
------------------------------
Simple classifiers
+-}
-\begin{code}
-- | @True@ <=> argument is a @data@\/@newtype@
-- declaration.
isDataDecl :: TyClDecl name -> Bool
@@ -605,11 +599,8 @@ isDataFamilyDecl :: TyClDecl name -> Bool
isDataFamilyDecl (FamDecl (FamilyDecl { fdInfo = DataFamily })) = True
isDataFamilyDecl _other = False
-\end{code}
-
-Dealing with names
+-- Dealing with names
-\begin{code}
tyFamInstDeclName :: OutputableBndr name
=> TyFamInstDecl name -> name
tyFamInstDeclName = unLoc . tyFamInstDeclLName
@@ -630,9 +621,7 @@ tcdName = unLoc . tyClDeclLName
tyClDeclTyVars :: OutputableBndr name => TyClDecl name -> LHsTyVarBndrs name
tyClDeclTyVars (FamDecl { tcdFam = FamilyDecl { fdTyVars = tvs } }) = tvs
tyClDeclTyVars d = tcdTyVars d
-\end{code}
-\begin{code}
countTyClDecls :: [TyClDecl name] -> (Int, Int, Int, Int, Int)
-- class, synonym decls, data, newtype, family decls
countTyClDecls decls
@@ -669,8 +658,8 @@ famDeclHasCusk (FamilyDecl { fdInfo = ClosedTypeFamily _
, fdKindSig = m_sig })
= hsTvbAllKinded tyvars && isJust m_sig
famDeclHasCusk _ = True -- all open families have CUSKs!
-\end{code}
+{-
Note [Complete user-supplied kind signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We kind-check declarations differently if they have a complete, user-supplied
@@ -693,8 +682,8 @@ RHS are annotated with kinds.
variables and its return type are annotated.
- An open type family always has a CUSK -- unannotated type variables (and return type) default to *.
+-}
-\begin{code}
instance OutputableBndr name
=> Outputable (TyClDecl name) where
@@ -777,15 +766,14 @@ pprTyClDeclFlavour (FamDecl { tcdFam = FamilyDecl { fdInfo = info }})
= pprFlavour info
pprTyClDeclFlavour (DataDecl { tcdDataDefn = HsDataDefn { dd_ND = nd } })
= ppr nd
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[ConDecl]{A data-constructor declaration}
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
data HsDataDefn name -- The payload of a data type defn
-- Used *both* for vanilla data declarations,
@@ -920,10 +908,7 @@ instance Outputable ty => Outputable (ResType ty) where
-- Debugging only
ppr ResTyH98 = ptext (sLit "ResTyH98")
ppr (ResTyGADT ty) = ptext (sLit "ResTyGADT") <+> ppr ty
-\end{code}
-
-\begin{code}
pp_data_defn :: OutputableBndr name
=> (HsContext name -> SDoc) -- Printing the header
-> HsDataDefn name
@@ -1005,13 +990,13 @@ instance (Outputable name) => OutputableBndr [Located name] where
pprInfixOcc [x] = ppr x
pprInfixOcc xs = cat $ punctuate comma (map ppr xs)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Instance declarations
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Type family instance declarations in HsSyn]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1029,8 +1014,8 @@ It is parameterised over its tfe_pats field:
type T a b
type T a b = a -> b -- The default instance
It is represented by a TyFamDefltEqn, with *type variables8 in the tfe_pats field.
+-}
-\begin{code}
----------------- Type synonym family instances -------------
type LTyFamInstEqn name = Located (TyFamInstEqn name)
-- ^ May have 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnSemi'
@@ -1125,8 +1110,8 @@ data InstDecl name -- Both class and family instances
{ tfid_inst :: TyFamInstDecl name }
deriving (Typeable)
deriving instance (DataId id) => Data (InstDecl id)
-\end{code}
+{-
Note [Family instance declaration binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A {Ty|Data}FamInstDecl is a data/type family instance declaration
@@ -1145,8 +1130,8 @@ tvs are fv(pat_tys), *including* ones that are already in scope
type F (a8,b9) x10 = x10->a8
so that we can compare the type patter in the 'instance' decl and
in the associated 'type' decl
+-}
-\begin{code}
instance (OutputableBndr name) => Outputable (TyFamInstDecl name) where
ppr = pprTyFamInstDecl TopLevel
@@ -1231,15 +1216,15 @@ instDeclDataFamInsts inst_decls
= map unLoc fam_insts
do_one (L _ (DataFamInstD { dfid_inst = fam_inst })) = [fam_inst]
do_one (L _ (TyFamInstD {})) = []
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[DerivDecl]{A stand-alone instance deriving declaration}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type LDerivDecl name = Located (DerivDecl name)
data DerivDecl name = DerivDecl
@@ -1256,19 +1241,19 @@ deriving instance (DataId name) => Data (DerivDecl name)
instance (OutputableBndr name) => Outputable (DerivDecl name) where
ppr (DerivDecl ty o)
= hsep [ptext (sLit "deriving instance"), ppOverlapPragma o, ppr ty]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[DefaultDecl]{A @default@ declaration}
-%* *
-%************************************************************************
+* *
+************************************************************************
There can only be one default declaration per module, but it is hard
for the parser to check that; we pass them all through in the abstract
syntax, and that restriction must be checked in the front end.
+-}
-\begin{code}
type LDefaultDecl name = Located (DefaultDecl name)
data DefaultDecl name
@@ -1284,15 +1269,14 @@ instance (OutputableBndr name)
ppr (DefaultDecl tys)
= ptext (sLit "default") <+> parens (interpp'SP tys)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Foreign function interface declaration}
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
-- foreign declarations are distinguished as to whether they define or use a
-- Haskell name
@@ -1408,16 +1392,15 @@ instance Outputable ForeignImport where
instance Outputable ForeignExport where
ppr (CExport (L _ (CExportStatic lbl cconv)) _) =
ppr cconv <+> char '"' <> ppr lbl <> char '"'
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Transformation rules}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type LRuleDecl name = Located (RuleDecl name)
data RuleDecl name
@@ -1464,14 +1447,13 @@ instance OutputableBndr name => Outputable (RuleDecl name) where
instance OutputableBndr name => Outputable (RuleBndr name) where
ppr (RuleBndr name) = ppr name
ppr (RuleBndrSig name ty) = ppr name <> dcolon <> ppr ty
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Vectorisation declarations}
-%* *
-%************************************************************************
+* *
+************************************************************************
A vectorisation pragma, one of
@@ -1481,8 +1463,8 @@ A vectorisation pragma, one of
{-# VECTORISE type T = ty #-}
{-# VECTORISE SCALAR type T #-}
+-}
-\begin{code}
type LVectDecl name = Located (VectDecl name)
data VectDecl name
@@ -1565,15 +1547,14 @@ instance OutputableBndr name => Outputable (VectDecl name) where
= sep [text "{-# VECTORISE SCALAR instance" <+> ppr ty <+> text "#-}" ]
ppr (HsVectInstOut i)
= sep [text "{-# VECTORISE SCALAR instance" <+> ppr i <+> text "#-}" ]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[DocDecl]{Document comments}
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
type LDocDecl = Located (DocDecl)
@@ -1594,17 +1575,16 @@ docDeclDoc (DocCommentPrev d) = d
docDeclDoc (DocCommentNamed _ d) = d
docDeclDoc (DocGroup _ d) = d
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[DeprecDecl]{Deprecations}
-%* *
-%************************************************************************
+* *
+************************************************************************
We use exported entities for things to deprecate.
+-}
-\begin{code}
type LWarnDecl name = Located (WarnDecl name)
data WarnDecl name = Warning name WarningTxt
@@ -1613,15 +1593,15 @@ data WarnDecl name = Warning name WarningTxt
instance OutputableBndr name => Outputable (WarnDecl name) where
ppr (Warning thing txt)
= hsep [text "{-# DEPRECATED", ppr thing, doubleQuotes (ppr txt), text "#-}"]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[AnnDecl]{Annotations}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type LAnnDecl name = Located (AnnDecl name)
data AnnDecl name = HsAnnotation (AnnProvenance name) (Located (HsExpr name))
@@ -1651,15 +1631,15 @@ pprAnnProvenance :: OutputableBndr name => AnnProvenance name -> SDoc
pprAnnProvenance ModuleAnnProvenance = ptext (sLit "ANN module")
pprAnnProvenance (ValueAnnProvenance name) = ptext (sLit "ANN") <+> ppr name
pprAnnProvenance (TypeAnnProvenance name) = ptext (sLit "ANN type") <+> ppr name
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[RoleAnnot]{Role annotations}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type LRoleAnnotDecl name = Located (RoleAnnotDecl name)
-- See #8185 for more info about why role annotations are
@@ -1681,5 +1661,3 @@ instance OutputableBndr name => Outputable (RoleAnnotDecl name) where
roleAnnotDeclName :: RoleAnnotDecl name -> name
roleAnnotDeclName (RoleAnnotDecl (L _ name) _) = name
-
-\end{code}
diff --git a/compiler/hsSyn/HsExpr.lhs b/compiler/hsSyn/HsExpr.hs
index 82098e2b9f..1861811570 100644
--- a/compiler/hsSyn/HsExpr.lhs
+++ b/compiler/hsSyn/HsExpr.hs
@@ -1,8 +1,8 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
-\begin{code}
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+-}
+
{-# LANGUAGE CPP, DeriveDataTypeable, ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
@@ -40,15 +40,15 @@ import Type
-- libraries:
import Data.Data hiding (Fixity)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Expressions proper}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- * Expressions proper
type LHsExpr id = Located (HsExpr id)
@@ -87,8 +87,7 @@ noSyntaxExpr = HsLit (HsString "" (fsLit "noSyntaxExpr"))
type CmdSyntaxTable id = [(Name, SyntaxExpr id)]
-- See Note [CmdSyntaxTable]
-\end{code}
-
+{-
Note [CmdSyntaxtable]
~~~~~~~~~~~~~~~~~~~~~
Used only for arrow-syntax stuff (HsCmdTop), the CmdSyntaxTable keeps
@@ -122,9 +121,8 @@ is Less Cool because
than the rest of rebindable syntax, where the type is less
pre-ordained. (And this flexibility is useful; for example we can
typecheck do-notation with (>>=) :: m1 a -> (a -> m2 b) -> m2 b.)
+-}
-
-\begin{code}
-- | A Haskell expression.
data HsExpr id
= HsVar id -- ^ Variable
@@ -296,7 +294,7 @@ data HsExpr id
(ArithSeqInfo id)
-- | Arithmetic sequence for parallel array
- | PArrSeq
+ | PArrSeq
PostTcExpr -- [:e1..e2:] or [:e1, e2..e3:]
(ArithSeqInfo id)
@@ -442,8 +440,8 @@ deriving instance (DataId id) => Data (HsTupArg id)
tupArgPresent :: LHsTupArg id -> Bool
tupArgPresent (L _ (Present {})) = True
tupArgPresent (L _ (Missing {})) = False
-\end{code}
+{-
Note [Parens in HsSyn]
~~~~~~~~~~~~~~~~~~~~~~
HsPar (and ParPat in patterns, HsParTy in types) is used as follows
@@ -478,13 +476,11 @@ whereas that would not be possible using a all to a polymorphic function
(because you can't call a polymorphic function at an unboxed type).
So we use Nothing to mean "use the old built-in typing rule".
+-}
-\begin{code}
instance OutputableBndr id => Outputable (HsExpr id) where
ppr expr = pprExpr expr
-\end{code}
-\begin{code}
-----------------------
-- pprExpr, pprLExpr, pprBinds call pprDeeper;
-- the underscore versions do not
@@ -696,8 +692,7 @@ ppr_expr (HsArrForm op _ args)
ppr_expr (HsUnboundVar nm)
= ppr nm
-\end{code}
-
+{-
HsSyn records exactly where the user put parens, with HsPar.
So generally speaking we print without adding any parens.
However, some code is internally generated, and in some places
@@ -707,8 +702,8 @@ pprParendExpr (but don't print double parens of course).
For operator applications we don't add parens, because the oprerator
fixities should do the job, except in debug mode (-dppr-debug) so we
can see the structure of the parse tree.
+-}
-\begin{code}
pprDebugParendExpr :: OutputableBndr id => LHsExpr id -> SDoc
pprDebugParendExpr expr
= getPprStyle (\sty ->
@@ -754,17 +749,17 @@ isAtomicHsExpr (HsUnboundVar {}) = True
isAtomicHsExpr (HsWrap _ e) = isAtomicHsExpr e
isAtomicHsExpr (HsPar e) = isAtomicHsExpr (unLoc e)
isAtomicHsExpr _ = False
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Commands (in arrow abstractions)}
-%* *
-%************************************************************************
+* *
+************************************************************************
We re-use HsExpr to represent these.
+-}
-\begin{code}
type LHsCmd id = Located (HsCmd id)
data HsCmd id
@@ -816,13 +811,12 @@ deriving instance (DataId id) => Data (HsCmd id)
data HsArrAppType = HsHigherOrderApp | HsFirstOrderApp
deriving (Data, Typeable)
-\end{code}
-
+{-
Top-level command, introducing a new arrow.
This may occur inside a proc (where the stack is empty) or as an
argument of a command-forming operator.
+-}
-\begin{code}
type LHsCmdTop id = Located (HsCmdTop id)
data HsCmdTop id
@@ -832,10 +826,7 @@ data HsCmdTop id
(CmdSyntaxTable id) -- See Note [CmdSyntaxTable]
deriving (Typeable)
deriving instance (DataId id) => Data (HsCmdTop id)
-\end{code}
-
-\begin{code}
instance OutputableBndr id => Outputable (HsCmd id) where
ppr cmd = pprCmd cmd
@@ -923,24 +914,22 @@ pprCmdArg (HsCmdTop cmd _ _ _)
instance OutputableBndr id => Outputable (HsCmdTop id) where
ppr = pprCmdArg
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Record binds}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type HsRecordBinds id = HsRecFields id (LHsExpr id)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{@Match@, @GRHSs@, and @GRHS@ datatypes}
-%* *
-%************************************************************************
+* *
+************************************************************************
@Match@es are sets of pattern bindings and right hand sides for
functions, patterns or case branches. For example, if a function @g@
@@ -955,8 +944,8 @@ It is always the case that each element of an @[Match]@ list has the
same number of @pats@s inside it. This corresponds to saying that
a function defined by pattern matching must have the same number of
patterns in each equation.
+-}
-\begin{code}
data MatchGroup id body
= MG { mg_alts :: [LMatch id body] -- The alternatives
, mg_arg_tys :: [PostTc id Type] -- Types of the arguments, t1..tn
@@ -1013,11 +1002,9 @@ data GRHS id body = GRHS [GuardLStmt id] -- Guards
body -- Right hand side
deriving (Typeable)
deriving instance (Data body,DataId id) => Data (GRHS id body)
-\end{code}
-We know the list must have at least one @Match@ in it.
+-- We know the list must have at least one @Match@ in it.
-\begin{code}
pprMatches :: (OutputableBndr idL, OutputableBndr idR, Outputable body)
=> HsMatchContext idL -> MatchGroup idR body -> SDoc
pprMatches ctxt (MG { mg_alts = matches })
@@ -1087,15 +1074,15 @@ pprGRHS ctxt (GRHS guards body)
pp_rhs :: Outputable body => HsMatchContext idL -> body -> SDoc
pp_rhs ctxt rhs = matchSeparator ctxt <+> pprDeeper (ppr rhs)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Do stmts and list comprehensions}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type LStmt id body = Located (StmtLR id id body)
type LStmtLR idL idR body = Located (StmtLR idL idR body)
@@ -1223,8 +1210,8 @@ data ParStmtBlock idL idR
(SyntaxExpr idR) -- The return operator
deriving( Typeable )
deriving instance (DataId idL, DataId idR) => Data (ParStmtBlock idL idR)
-\end{code}
+{-
Note [The type of bind in Stmts]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Some Stmts, notably BindStmt, keep the (>>=) bind operator.
@@ -1359,9 +1346,8 @@ Parallel statements require the 'Control.Monad.Zip.mzip' function:
In any other context than 'MonadComp', the fields for most of these
'SyntaxExpr's stay bottom.
+-}
-
-\begin{code}
instance (OutputableBndr idL, OutputableBndr idR)
=> Outputable (ParStmtBlock idL idR) where
ppr (ParStmtBlock stmts _ _) = interpp'SP stmts
@@ -1436,15 +1422,15 @@ pprQuals :: (OutputableBndr id, Outputable body)
=> [LStmt id body] -> SDoc
-- Show list comprehension qualifiers separated by commas
pprQuals quals = interpp'SP quals
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Template Haskell quotation brackets
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data HsSplice id
= HsSplice -- $z or $(f 4)
id -- A unique name to identify this splice point
@@ -1471,8 +1457,8 @@ type PendingTcSplice = PendingSplice Id
deriving instance (DataId id) => Data (HsSplice id)
deriving instance (DataId id) => Data (PendingSplice id)
-\end{code}
+{-
Note [Pending Splices]
~~~~~~~~~~~~~~~~~~~~~~
When we rename an untyped bracket, we name and lift out all the nested
@@ -1529,8 +1515,8 @@ e.g., in a type error message, we *do not* want to print out the pending
splices. In contrast, when pretty printing the output of the type checker, we
*do* want to print the pending splices. So splitting them up seems to make
sense, although I hate to add another constructor to HsExpr.
+-}
-\begin{code}
instance OutputableBndr id => Outputable (HsSplice id) where
ppr (HsSplice n e) = angleBrackets (ppr n <> comma <+> ppr e)
@@ -1599,15 +1585,15 @@ instance Outputable PendingRnSplice where
ppr (PendingRnTypeSplice s) = ppr s
ppr (PendingRnDeclSplice s) = ppr s
ppr (PendingRnCrossStageSplice name) = ppr name
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Enumerations and list comprehensions}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data ArithSeqInfo id
= From (LHsExpr id)
| FromThen (LHsExpr id)
@@ -1619,9 +1605,7 @@ data ArithSeqInfo id
(LHsExpr id)
deriving (Typeable)
deriving instance (DataId id) => Data (ArithSeqInfo id)
-\end{code}
-\begin{code}
instance OutputableBndr id => Outputable (ArithSeqInfo id) where
ppr (From e1) = hcat [ppr e1, pp_dotdot]
ppr (FromThen e1 e2) = hcat [ppr e1, comma, space, ppr e2, pp_dotdot]
@@ -1631,16 +1615,15 @@ instance OutputableBndr id => Outputable (ArithSeqInfo id) where
pp_dotdot :: SDoc
pp_dotdot = ptext (sLit " .. ")
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{HsMatchCtxt}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data HsMatchContext id -- Context of a Match
= FunRhs id Bool -- Function binding for f; True <=> written infix
| LambdaExpr -- Patterns of a lambda
@@ -1675,9 +1658,7 @@ data HsStmtContext id
| ParStmtCtxt (HsStmtContext id) -- A branch of a parallel stmt
| TransStmtCtxt (HsStmtContext id) -- A branch of a transform stmt
deriving (Data, Typeable)
-\end{code}
-\begin{code}
isListCompExpr :: HsStmtContext id -> Bool
-- Uses syntax [ e | quals ]
isListCompExpr ListComp = True
@@ -1692,9 +1673,7 @@ isMonadCompExpr MonadComp = True
isMonadCompExpr (ParStmtCtxt ctxt) = isMonadCompExpr ctxt
isMonadCompExpr (TransStmtCtxt ctxt) = isMonadCompExpr ctxt
isMonadCompExpr _ = False
-\end{code}
-\begin{code}
matchSeparator :: HsMatchContext id -> SDoc
matchSeparator (FunRhs {}) = ptext (sLit "=")
matchSeparator CaseAlt = ptext (sLit "->")
@@ -1707,9 +1686,7 @@ matchSeparator RecUpd = panic "unused"
matchSeparator ThPatSplice = panic "unused"
matchSeparator ThPatQuote = panic "unused"
matchSeparator PatSyn = panic "unused"
-\end{code}
-\begin{code}
pprMatchContext :: Outputable id => HsMatchContext id -> SDoc
pprMatchContext ctxt
| want_an ctxt = ptext (sLit "an") <+> pprMatchContextNoun ctxt
@@ -1792,9 +1769,7 @@ matchContextErrString (StmtCtxt MDoExpr) = ptext (sLit "'mdo' block")
matchContextErrString (StmtCtxt ListComp) = ptext (sLit "list comprehension")
matchContextErrString (StmtCtxt MonadComp) = ptext (sLit "monad comprehension")
matchContextErrString (StmtCtxt PArrComp) = ptext (sLit "array comprehension")
-\end{code}
-\begin{code}
pprMatchInCtxt :: (OutputableBndr idL, OutputableBndr idR, Outputable body)
=> HsMatchContext idL -> Match idR body -> SDoc
pprMatchInCtxt ctxt match = hang (ptext (sLit "In") <+> pprMatchContext ctxt <> colon)
@@ -1814,4 +1789,3 @@ pprStmtInCtxt ctxt stmt
ppr_stmt (TransStmt { trS_by = by, trS_using = using
, trS_form = form }) = pprTransStmt by using form
ppr_stmt stmt = pprStmt stmt
-\end{code}
diff --git a/compiler/hsSyn/HsExpr.lhs-boot b/compiler/hsSyn/HsExpr.hs-boot
index 387a83ebb7..51cbd29505 100644
--- a/compiler/hsSyn/HsExpr.lhs-boot
+++ b/compiler/hsSyn/HsExpr.hs-boot
@@ -1,4 +1,3 @@
-\begin{code}
{-# LANGUAGE CPP, KindSignatures #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-} -- Note [Pass sensitive types]
@@ -54,18 +53,17 @@ instance OutputableBndr id => Outputable (HsCmd id)
type LHsExpr a = Located (HsExpr a)
type SyntaxExpr a = HsExpr a
-pprLExpr :: (OutputableBndr i) =>
+pprLExpr :: (OutputableBndr i) =>
LHsExpr i -> SDoc
-pprExpr :: (OutputableBndr i) =>
+pprExpr :: (OutputableBndr i) =>
HsExpr i -> SDoc
-pprUntypedSplice :: (OutputableBndr i) =>
+pprUntypedSplice :: (OutputableBndr i) =>
HsSplice i -> SDoc
pprPatBind :: (OutputableBndr bndr, OutputableBndr id, Outputable body)
=> LPat bndr -> GRHSs id body -> SDoc
-pprFunBind :: (OutputableBndr idL, OutputableBndr idR, Outputable body)
+pprFunBind :: (OutputableBndr idL, OutputableBndr idR, Outputable body)
=> idL -> Bool -> MatchGroup idR body -> SDoc
-\end{code}
diff --git a/compiler/hsSyn/HsImpExp.lhs b/compiler/hsSyn/HsImpExp.hs
index d627591cd7..166dddc10e 100644
--- a/compiler/hsSyn/HsImpExp.lhs
+++ b/compiler/hsSyn/HsImpExp.hs
@@ -1,11 +1,11 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
HsImpExp: Abstract syntax: imports, exports, interfaces
+-}
-\begin{code}
{-# LANGUAGE DeriveDataTypeable #-}
module HsImpExp where
@@ -19,16 +19,17 @@ import FastString
import SrcLoc
import Data.Data
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Import and export declaration lists}
-%* *
-%************************************************************************
+* *
+************************************************************************
One per \tr{import} declaration in a module.
-\begin{code}
+-}
+
type LImportDecl name = Located (ImportDecl name)
-- ^ When in a list this may have
--
@@ -76,9 +77,7 @@ simpleImportDecl mn = ImportDecl {
ideclAs = Nothing,
ideclHiding = Nothing
}
-\end{code}
-\begin{code}
instance (OutputableBndr name, HasOccName name) => Outputable (ImportDecl name) where
ppr (ImportDecl { ideclName = mod', ideclPkgQual = pkg
, ideclSource = from, ideclSafe = safe
@@ -112,15 +111,15 @@ instance (OutputableBndr name, HasOccName name) => Outputable (ImportDecl name)
ppr_ies [] = ptext (sLit "()")
ppr_ies ies = char '(' <+> interpp'SP ies <+> char ')'
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Imported and exported entities}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type LIE name = Located (IE name)
-- ^ When in a list this may have
--
@@ -154,9 +153,7 @@ data IE name
| IEDoc HsDocString -- ^ Some documentation
| IEDocNamed String -- ^ Reference to named doc
deriving (Eq, Data, Typeable)
-\end{code}
-\begin{code}
ieName :: IE name -> name
ieName (IEVar (L _ n)) = n
ieName (IEThingAbs n) = n
@@ -173,9 +170,6 @@ ieNames (IEModuleContents _ ) = []
ieNames (IEGroup _ _ ) = []
ieNames (IEDoc _ ) = []
ieNames (IEDocNamed _ ) = []
-\end{code}
-
-\begin{code}
pprImpExp :: (HasOccName name, OutputableBndr name) => name -> SDoc
pprImpExp name = type_pref <+> pprPrefixOcc name
@@ -196,4 +190,3 @@ instance (HasOccName name, OutputableBndr name) => Outputable (IE name) where
ppr (IEGroup n _) = text ("<IEGroup: " ++ (show n) ++ ">")
ppr (IEDoc doc) = ppr doc
ppr (IEDocNamed string) = text ("<IEDocNamed: " ++ string ++ ">")
-\end{code}
diff --git a/compiler/hsSyn/HsLit.lhs b/compiler/hsSyn/HsLit.hs
index 2bde0cdc29..5e673ad1f4 100644
--- a/compiler/hsSyn/HsLit.lhs
+++ b/compiler/hsSyn/HsLit.hs
@@ -1,10 +1,10 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[HsLit]{Abstract syntax: source-language literals}
+-}
-\begin{code}
{-# LANGUAGE CPP, DeriveDataTypeable #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE StandaloneDeriving #-}
@@ -28,20 +28,15 @@ import Lexer ( SourceText )
import Data.ByteString (ByteString)
import Data.Data hiding ( Fixity )
-\end{code}
-
-
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[HsLit]{Literals}
-%* *
-%************************************************************************
-
+* *
+************************************************************************
+-}
-\begin{code}
-- Note [literal source text] for SourceText fields in the following
data HsLit
= HsChar SourceText Char -- Character
@@ -98,8 +93,8 @@ data OverLitVal
overLitType :: HsOverLit a -> PostTc a Type
overLitType = ol_type
-\end{code}
+{-
Note [literal source text]
~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -157,8 +152,8 @@ calls, which wouldn't be possible if the desguarar made the application.
The PostTcType in each branch records the type the overload literal is
found to have.
+-}
-\begin{code}
-- Comparison operations are needed when grouping literals
-- for compiling pattern-matching (module MatchLit)
instance Eq (HsOverLit id) where
@@ -183,9 +178,7 @@ instance Ord OverLitVal where
compare (HsIsString _ s1) (HsIsString _ s2) = s1 `compare` s2
compare (HsIsString _ _) (HsIntegral _ _) = GT
compare (HsIsString _ _) (HsFractional _) = GT
-\end{code}
-\begin{code}
instance Outputable HsLit where
-- Use "show" because it puts in appropriate escapes
ppr (HsChar _ c) = pprHsChar c
@@ -211,4 +204,3 @@ instance Outputable OverLitVal where
ppr (HsIntegral _ i) = integer i
ppr (HsFractional f) = ppr f
ppr (HsIsString _ s) = pprHsString s
-\end{code}
diff --git a/compiler/hsSyn/HsPat.lhs b/compiler/hsSyn/HsPat.hs
index 48c707b51f..f38665f209 100644
--- a/compiler/hsSyn/HsPat.lhs
+++ b/compiler/hsSyn/HsPat.hs
@@ -1,10 +1,10 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[PatSyntax]{Abstract Haskell syntax---patterns}
+-}
-\begin{code}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE StandaloneDeriving #-}
@@ -52,10 +52,7 @@ import FastString
-- libraries:
import Data.Data hiding (TyCon,Fixity)
import Data.Maybe
-\end{code}
-
-\begin{code}
type InPat id = LPat id -- No 'Out' constructors
type OutPat id = LPat id -- No 'In' constructors
@@ -114,7 +111,7 @@ data Pat id
pat_con :: Located ConLike,
pat_arg_tys :: [Type], -- The univeral arg types, 1-1 with the universal
-- tyvars of the constructor/pattern synonym
- -- Use (conLikeResTy pat_con pat_arg_tys) to get
+ -- Use (conLikeResTy pat_con pat_arg_tys) to get
-- the type of the pattern
pat_tvs :: [TyVar], -- Existentially bound type variables (tyvars only)
@@ -173,11 +170,9 @@ data Pat id
-- the scrutinee, followed by a match on 'pat'
deriving (Typeable)
deriving instance (DataId id) => Data (Pat id)
-\end{code}
-HsConDetails is use for patterns/expressions *and* for data type declarations
+-- HsConDetails is use for patterns/expressions *and* for data type declarations
-\begin{code}
data HsConDetails arg rec
= PrefixCon [arg] -- C p1 p2 p3
| RecCon rec -- C { x = p1, y = p2 }
@@ -190,12 +185,12 @@ hsConPatArgs :: HsConPatDetails id -> [LPat id]
hsConPatArgs (PrefixCon ps) = ps
hsConPatArgs (RecCon fs) = map (hsRecFieldArg . unLoc) (rec_flds fs)
hsConPatArgs (InfixCon p1 p2) = [p1,p2]
-\end{code}
+{-
However HsRecFields is used only for patterns and expressions
(not data type declarations)
+-}
-\begin{code}
data HsRecFields id arg -- A bunch of record fields
-- { x = 3, y = True }
-- Used for both expressions and patterns
@@ -239,15 +234,15 @@ data HsRecField id arg = HsRecField {
hsRecFields :: HsRecFields id arg -> [id]
hsRecFields rbinds = map (unLoc . hsRecFieldId . unLoc) (rec_flds rbinds)
-\end{code}
-%************************************************************************
-%* *
-%* Printing patterns
-%* *
-%************************************************************************
+{-
+************************************************************************
+* *
+* Printing patterns
+* *
+************************************************************************
+-}
-\begin{code}
instance (OutputableBndr name) => Outputable (Pat name) where
ppr = pprPat
@@ -324,16 +319,15 @@ instance (OutputableBndr id, Outputable arg)
ppr (HsRecField { hsRecFieldId = f, hsRecFieldArg = arg,
hsRecPun = pun })
= ppr f <+> (ppUnless pun $ equals <+> ppr arg)
-\end{code}
-
-%************************************************************************
-%* *
-%* Building patterns
-%* *
-%************************************************************************
+{-
+************************************************************************
+* *
+* Building patterns
+* *
+************************************************************************
+-}
-\begin{code}
mkPrefixConPat :: DataCon -> [OutPat id] -> [Type] -> OutPat id
-- Make a vanilla Prefix constructor pattern
mkPrefixConPat dc pats tys
@@ -347,14 +341,13 @@ mkNilPat ty = mkPrefixConPat nilDataCon [] [ty]
mkCharLitPat :: String -> Char -> OutPat id
mkCharLitPat src c = mkPrefixConPat charDataCon
[noLoc $ LitPat (HsCharPrim src c)] []
-\end{code}
-
-%************************************************************************
-%* *
-%* Predicates for checking things about pattern-lists in EquationInfo *
-%* *
-%************************************************************************
+{-
+************************************************************************
+* *
+* Predicates for checking things about pattern-lists in EquationInfo *
+* *
+************************************************************************
\subsection[Pat-list-predicates]{Look for interesting things in patterns}
@@ -379,7 +372,8 @@ A pattern is in {\em exactly one} of the above three categories; `as'
patterns are treated specially, of course.
The 1.3 report defines what ``irrefutable'' and ``failure-free'' patterns are.
-\begin{code}
+-}
+
isStrictLPat :: LPat id -> Bool
isStrictLPat (L _ (ParPat p)) = isStrictLPat p
isStrictLPat (L _ (BangPat {})) = True
@@ -394,7 +388,7 @@ isStrictHsBind _ = False
looksLazyPatBind :: HsBind id -> Bool
-- Returns True of anything *except*
--- a StrictHsBind (as above) or
+-- a StrictHsBind (as above) or
-- a VarPat
-- In particular, returns True of a pattern binding with a compound pattern, like (I# x)
looksLazyPatBind (PatBind { pat_lhs = p }) = looksLazyLPat p
@@ -452,7 +446,7 @@ isIrrefutableHsPat pat
-- Both should be gotten rid of by renamer before
-- isIrrefutablePat is called
- go1 (SplicePat {}) = urk pat
+ go1 (SplicePat {}) = urk pat
go1 (QuasiQuotePat {}) = urk pat
urk pat = pprPanic "isIrrefutableHsPat:" (ppr pat)
@@ -483,4 +477,3 @@ conPatNeedsParens :: HsConDetails a b -> Bool
conPatNeedsParens (PrefixCon args) = not (null args)
conPatNeedsParens (InfixCon {}) = True
conPatNeedsParens (RecCon {}) = True
-\end{code}
diff --git a/compiler/hsSyn/HsPat.lhs-boot b/compiler/hsSyn/HsPat.hs-boot
index cb8cb0a5bc..114425b526 100644
--- a/compiler/hsSyn/HsPat.lhs-boot
+++ b/compiler/hsSyn/HsPat.hs-boot
@@ -1,4 +1,3 @@
-\begin{code}
{-# LANGUAGE CPP, KindSignatures #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-} -- Note [Pass sensitive types]
@@ -29,4 +28,3 @@ instance Typeable1 Pat
instance (DataId id) => Data (Pat id)
instance (OutputableBndr name) => Outputable (Pat name)
-\end{code}
diff --git a/compiler/hsSyn/HsSyn.lhs b/compiler/hsSyn/HsSyn.hs
index fe31bd57e1..e75939ea2f 100644
--- a/compiler/hsSyn/HsSyn.lhs
+++ b/compiler/hsSyn/HsSyn.hs
@@ -1,14 +1,14 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section{Haskell abstract syntax definition}
This module glues together the pieces of the Haskell abstract syntax,
which is declared in the various \tr{Hs*} modules. This module,
therefore, is almost nothing but re-exporting.
+-}
-\begin{code}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
@@ -54,9 +54,7 @@ import FastString
-- libraries:
import Data.Data hiding ( Fixity )
-\end{code}
-\begin{code}
-- | All we actually declare here is the top-level structure for a module.
data HsModule name
= HsModule {
@@ -105,10 +103,7 @@ data HsModule name
--
deriving (Typeable)
deriving instance (DataId name) => Data (HsModule name)
-\end{code}
-
-\begin{code}
instance (OutputableBndr name, HasOccName name)
=> Outputable (HsModule name) where
@@ -143,4 +138,3 @@ pp_mb Nothing = empty
pp_nonnull :: Outputable t => [t] -> SDoc
pp_nonnull [] = empty
pp_nonnull xs = vcat (map ppr xs)
-\end{code}
diff --git a/compiler/hsSyn/HsTypes.lhs b/compiler/hsSyn/HsTypes.hs
index 37aaa56039..5d368b33be 100644
--- a/compiler/hsSyn/HsTypes.lhs
+++ b/compiler/hsSyn/HsTypes.hs
@@ -1,11 +1,11 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
HsTypes: Abstract syntax: user-defined types
+-}
-\begin{code}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
@@ -18,7 +18,7 @@ HsTypes: Abstract syntax: user-defined types
module HsTypes (
HsType(..), LHsType, HsKind, LHsKind,
HsTyOp,LHsTyOp,
- HsTyVarBndr(..), LHsTyVarBndr,
+ HsTyVarBndr(..), LHsTyVarBndr,
LHsTyVarBndrs(..),
HsWithBndrs(..),
HsTupleSort(..), HsExplicitFlag(..),
@@ -28,11 +28,11 @@ module HsTypes (
HsTyLit(..),
HsIPName(..), hsIPNameFS,
- LBangType, BangType, HsBang(..),
- getBangType, getBangStrictness,
+ LBangType, BangType, HsBang(..),
+ getBangType, getBangStrictness,
ConDeclField(..), LConDeclField, pprConDeclFields,
-
+
mkHsQTvs, hsQTvBndrs, isHsKindedTyVar, hsTvbAllKinded,
mkExplicitHsForAllTy, mkImplicitHsForAllTy, mkQualifiedHsForAllTy,
hsExplicitTvs,
@@ -68,17 +68,16 @@ import Maybes( isJust )
import Data.Data hiding ( Fixity )
import Data.Maybe ( fromMaybe )
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Quasi quotes; used in types and elsewhere
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-data HsQuasiQuote id = HsQuasiQuote
+data HsQuasiQuote id = HsQuasiQuote
id -- The quasi-quoter
SrcSpan -- The span of the enclosed string
FastString -- The enclosed string
@@ -91,16 +90,15 @@ ppr_qq :: OutputableBndr id => HsQuasiQuote id -> SDoc
ppr_qq (HsQuasiQuote quoter _ quote) =
char '[' <> ppr quoter <> ptext (sLit "|") <>
ppr quote <> ptext (sLit "|]")
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Bang annotations}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type LBangType name = Located (BangType name)
type BangType name = HsType name -- Bangs are in the HsType data type
@@ -111,20 +109,19 @@ getBangType ty = ty
getBangStrictness :: LHsType a -> HsBang
getBangStrictness (L _ (HsBangTy s _)) = s
getBangStrictness _ = HsNoBang
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Data types}
-%* *
-%************************************************************************
+* *
+************************************************************************
This is the syntax for types as seen in type signatures.
Note [HsBSig binder lists]
~~~~~~~~~~~~~~~~~~~~~~~~~~
-Consider a binder (or pattern) decoarated with a type or kind,
+Consider a binder (or pattern) decoarated with a type or kind,
\ (x :: a -> a). blah
forall (a :: k -> *) (b :: k). blah
Then we use a LHsBndrSig on the binder, so that the
@@ -132,8 +129,8 @@ renamer can decorate it with the variables bound
by the pattern ('a' in the first example, 'k' in the second),
assuming that neither of them is in scope already
See also Note [Kind and type-variable binders] in RnTypes
+-}
-\begin{code}
type LHsContext name = Located (HsContext name)
type HsContext name = [LHsType name]
@@ -274,16 +271,16 @@ data HsType name
| HsQuasiQuoteTy (HsQuasiQuote name)
- | HsSpliceTy (HsSplice name)
+ | HsSpliceTy (HsSplice name)
(PostTc name Kind)
| HsDocTy (LHsType name) LHsDocString -- A documented type
- | HsBangTy HsBang (LHsType name) -- Bang-style type annotations
+ | HsBangTy HsBang (LHsType name) -- Bang-style type annotations
| HsRecTy [LConDeclField name] -- Only in data type declarations
- | HsCoreTy Type -- An escape hatch for tunnelling a *closed*
- -- Core Type through HsSyn.
+ | HsCoreTy Type -- An escape hatch for tunnelling a *closed*
+ -- Core Type through HsSyn.
| HsExplicitListTy -- A promoted explicit list
(PostTc name Kind) -- See Note [Promoted lists and tuples]
@@ -318,8 +315,8 @@ type HsTyOp name = (HsTyWrapper, name)
mkHsOpTy :: LHsType name -> Located name -> LHsType name -> HsType name
mkHsOpTy ty1 op ty2 = HsOpTy ty1 (WpKiApps [], op) ty2
-\end{code}
+{-
Note [HsForAllTy tyvar binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
After parsing:
@@ -347,8 +344,8 @@ Note [Unit tuples]
~~~~~~~~~~~~~~~~~~
Consider the type
type instance F Int = ()
-We want to parse that "()"
- as HsTupleTy HsBoxedOrConstraintTuple [],
+We want to parse that "()"
+ as HsTupleTy HsBoxedOrConstraintTuple [],
NOT as HsTyVar unitTyCon
Why? Because F might have kind (* -> Constraint), so we when parsing we
@@ -378,18 +375,18 @@ Notice the difference between
HsListTy HsExplicitListTy
HsTupleTy HsExplicitListTupleTy
-E.g. f :: [Int] HsListTy
+E.g. f :: [Int] HsListTy
- g3 :: T '[] All these use
- g2 :: T '[True] HsExplicitListTy
- g1 :: T '[True,False]
+ g3 :: T '[] All these use
+ g2 :: T '[True] HsExplicitListTy
+ g1 :: T '[True,False]
g1a :: T [True,False] (can omit ' where unambiguous)
kind of T :: [Bool] -> * This kind uses HsListTy!
-E.g. h :: (Int,Bool) HsTupleTy; f is a pair
- k :: S '(True,False) HsExplicitTypleTy; S is indexed by
- a type-level pair of booleans
+E.g. h :: (Int,Bool) HsTupleTy; f is a pair
+ k :: S '(True,False) HsExplicitTypleTy; S is indexed by
+ a type-level pair of booleans
kind of S :: (Bool,Bool) -> * This kind uses HsExplicitTupleTy
Note [Distinguishing tuple kinds]
@@ -407,15 +404,15 @@ HsTupleTy, a HsTupleSort). We can tell if a tuple is unboxed while parsing,
because of the #. However, with -XConstraintKinds we can only distinguish
between constraint and boxed tuples during type checking, in general. Hence the
four constructors of HsTupleSort:
-
+
HsUnboxedTuple -> Produced by the parser
HsBoxedTuple -> Certainly a boxed tuple
HsConstraintTuple -> Certainly a constraint tuple
- HsBoxedOrConstraintTuple -> Could be a boxed or a constraint
+ HsBoxedOrConstraintTuple -> Could be a boxed or a constraint
tuple. Produced by the parser only,
disappears after type checking
+-}
-\begin{code}
data HsTupleSort = HsUnboxedTuple
| HsBoxedTuple
| HsConstraintTuple
@@ -436,7 +433,7 @@ data ConDeclField name -- Record fields have Haddoc docs on them
deriving instance (DataId name) => Data (ConDeclField name)
-----------------------
--- Combine adjacent for-alls.
+-- Combine adjacent for-alls.
-- The following awkward situation can happen otherwise:
-- f :: forall a. ((Num a) => Int)
-- might generate HsForAll (Just [a]) [] (HsForAll Nothing [Num a] t)
@@ -474,7 +471,7 @@ mk_forall_ty exp tvs (L _ (HsParTy ty)) = mk_forall_ty exp tvs ty
mk_forall_ty exp tvs ty = HsForAllTy exp Nothing (mkHsQTvs tvs) (noLoc []) ty
-- Even if tvs is empty, we still make a HsForAll!
-- In the Implicit case, this signals the place to do implicit quantification
- -- In the Explicit case, it prevents implicit quantification
+ -- In the Explicit case, it prevents implicit quantification
-- (see the sigtype production in Parser.y)
-- so that (forall. ty) isn't implicitly quantified
@@ -520,10 +517,7 @@ isWildcardTy _ = False
isNamedWildcardTy :: HsType a -> Bool
isNamedWildcardTy (HsNamedWildcardTy _) = True
isNamedWildcardTy _ = False
-\end{code}
-
-\begin{code}
splitHsAppTys :: LHsType n -> [LHsType n] -> (LHsType n, [LHsType n])
splitHsAppTys (L _ (HsAppTy f a)) as = splitHsAppTys f (a:as)
splitHsAppTys (L _ (HsParTy f)) as = splitHsAppTys f as
@@ -548,12 +542,12 @@ mkHsAppTys fun_ty [] = pprPanic "mkHsAppTys" (ppr fun_ty)
mkHsAppTys fun_ty (arg_ty:arg_tys)
= foldl mk_app (HsAppTy fun_ty arg_ty) arg_tys
where
- mk_app fun arg = HsAppTy (noLoc fun) arg
- -- Add noLocs for inner nodes of the application;
- -- they are never used
+ mk_app fun arg = HsAppTy (noLoc fun) arg
+ -- Add noLocs for inner nodes of the application;
+ -- they are never used
splitLHsInstDeclTy_maybe
- :: LHsType name
+ :: LHsType name
-> Maybe (LHsTyVarBndrs name, HsContext name, Located name, [LHsType name])
-- Split up an instance decl type, returning the pieces
splitLHsInstDeclTy_maybe inst_ty = do
@@ -562,7 +556,7 @@ splitLHsInstDeclTy_maybe inst_ty = do
return (tvs, cxt, cls, tys)
splitLHsForAllTy
- :: LHsType name
+ :: LHsType name
-> (LHsTyVarBndrs name, HsContext name, LHsType name)
splitLHsForAllTy poly_ty
= case unLoc poly_ty of
@@ -575,7 +569,7 @@ splitHsClassTy_maybe :: HsType name -> Maybe (name, [LHsType name])
splitHsClassTy_maybe ty = fmap (\(L _ n, tys) -> (n, tys)) $ splitLHsClassTy_maybe (noLoc ty)
splitLHsClassTy_maybe :: LHsType name -> Maybe (Located name, [LHsType name])
---- Watch out.. in ...deriving( Show )... we use this on
+--- Watch out.. in ...deriving( Show )... we use this on
--- the list of partially applied predicates in the deriving,
--- so there can be zero args.
@@ -593,19 +587,19 @@ splitLHsClassTy_maybe ty
_ -> Nothing
-- splitHsFunType decomposes a type (t1 -> t2 ... -> tn)
--- Breaks up any parens in the result type:
+-- Breaks up any parens in the result type:
-- splitHsFunType (a -> (b -> c)) = ([a,b], c)
-- Also deals with (->) t1 t2; that is why it only works on LHsType Name
-- (see Trac #9096)
splitHsFunType :: LHsType Name -> ([LHsType Name], LHsType Name)
-splitHsFunType (L _ (HsParTy ty))
+splitHsFunType (L _ (HsParTy ty))
= splitHsFunType ty
splitHsFunType (L _ (HsFunTy x y))
| (args, res) <- splitHsFunType y
= (x:args, res)
-splitHsFunType orig_ty@(L _ (HsAppTy t1 t2))
+splitHsFunType orig_ty@(L _ (HsAppTy t1 t2))
= go t1 [t2]
where -- Look for (->) t1 t2, possibly with parenthesisation
go (L _ (HsTyVar fn)) tys | fn == funTyConName
@@ -617,16 +611,15 @@ splitHsFunType orig_ty@(L _ (HsAppTy t1 t2))
go _ _ = ([], orig_ty) -- Failure to match
splitHsFunType other = ([], other)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Pretty printing}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
instance (OutputableBndr name) => Outputable (HsType name) where
ppr ty = pprHsType ty
@@ -634,7 +627,7 @@ instance Outputable HsTyLit where
ppr = ppr_tylit
instance (OutputableBndr name) => Outputable (LHsTyVarBndrs name) where
- ppr (HsQTvs { hsq_kvs = kvs, hsq_tvs = tvs })
+ ppr (HsQTvs { hsq_kvs = kvs, hsq_tvs = tvs })
= sep [ ifPprDebug $ braces (interppSP kvs), interppSP tvs ]
instance (OutputableBndr name) => Outputable (HsTyVarBndr name) where
@@ -693,8 +686,8 @@ pprConDeclFields fields = braces (sep (punctuate comma (map ppr_fld fields)))
= ppr_names ns <+> dcolon <+> ppr ty <+> ppr_mbDoc doc
ppr_names [n] = ppr n
ppr_names ns = sep (punctuate comma (map ppr ns))
-\end{code}
+{-
Note [Printing KindedTyVars]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Trac #3830 reminded me that we should really only print the kind
@@ -705,8 +698,8 @@ rather than converting to KindedTyVars as before.
(As it happens, the message in #3830 comes out a different way now,
and the problem doesn't show up; but having the flag on a KindedTyVar
seems like the Right Thing anyway.)
+-}
-\begin{code}
-- Printing works more-or-less as for Types
pprHsType, pprParendHsType :: (OutputableBndr name) => HsType name -> SDoc
@@ -786,7 +779,7 @@ ppr_mono_ty _ (HsParTy ty)
-- But we still use the precedence stuff to add parens because
-- toHsType doesn't put in any HsParTys, so we may still need them
-ppr_mono_ty ctxt_prec (HsDocTy ty doc)
+ppr_mono_ty ctxt_prec (HsDocTy ty doc)
= maybeParen ctxt_prec TyOpPrec $
ppr_mono_lty TyOpPrec ty <+> ppr (unLoc doc)
-- we pretty print Haddock comments on types as if they were
@@ -805,6 +798,3 @@ ppr_fun_ty ctxt_prec ty1 ty2
ppr_tylit :: HsTyLit -> SDoc
ppr_tylit (HsNumTy i) = integer i
ppr_tylit (HsStrTy s) = text (show s)
-\end{code}
-
-
diff --git a/compiler/hsSyn/HsUtils.lhs b/compiler/hsSyn/HsUtils.hs
index ed78964a63..57109fbb33 100644
--- a/compiler/hsSyn/HsUtils.lhs
+++ b/compiler/hsSyn/HsUtils.hs
@@ -1,7 +1,6 @@
+{-
+(c) The University of Glasgow, 1992-2006
-%
-% (c) The University of Glasgow, 1992-2006
-%
Here we collect a variety of helper functions that construct or
analyse HsSyn. All these functions deal with generic HsSyn; functions
@@ -12,8 +11,8 @@ which deal with the instantiated versions are located elsewhere:
RdrName parser/RdrHsSyn
Name rename/RnHsSyn
Id typecheck/TcHsSyn
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
@@ -103,20 +102,19 @@ import Outputable
import Data.Either
import Data.Function
import Data.List
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Some useful helpers for constructing syntax
-%* *
-%************************************************************************
+* *
+************************************************************************
These functions attempt to construct a not-completely-useless SrcSpan
from their components, compared with the nl* functions below which
just attach noSrcSpan to everything.
+-}
-\begin{code}
mkHsPar :: LHsExpr id -> LHsExpr id
mkHsPar e = L (getLoc e) (HsPar e)
@@ -312,16 +310,15 @@ mkHsString s = HsString s (mkFastString s)
userHsTyVarBndrs :: SrcSpan -> [name] -> [Located (HsTyVarBndr name)]
-- Caller sets location
userHsTyVarBndrs loc bndrs = [ L loc (UserTyVar v) | v <- bndrs ]
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Constructing syntax with no location info
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
nlHsVar :: id -> LHsExpr id
nlHsVar n = noLoc (HsVar n)
@@ -407,12 +404,12 @@ nlHsFunTy a b = noLoc (HsFunTy a b)
nlHsTyConApp :: name -> [LHsType name] -> LHsType name
nlHsTyConApp tycon tys = foldl nlHsAppTy (nlHsTyVar tycon) tys
-\end{code}
+{-
Tuples. All these functions are *pre-typechecker* because they lack
types on the tuple.
+-}
-\begin{code}
mkLHsTupleExpr :: [LHsExpr a] -> LHsExpr a
-- Makes a pre-typechecker boxed tuple, deals with 1 case
mkLHsTupleExpr [e] = e
@@ -426,18 +423,17 @@ nlTuplePat pats box = noLoc (TuplePat pats box [])
missingTupArg :: HsTupArg RdrName
missingTupArg = Missing placeHolderType
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Converting a Type to an HsType RdrName
-%* *
-%************************************************************************
+* *
+************************************************************************
This is needed to implement GeneralizedNewtypeDeriving.
+-}
-\begin{code}
toHsType :: Type -> LHsType RdrName
toHsType ty
| [] <- tvs_only
@@ -471,9 +467,6 @@ toHsType ty
toHsKind :: Kind -> LHsKind RdrName
toHsKind = toHsType
-\end{code}
-
-\begin{code}
--------- HsWrappers: type args, dict args, casts ---------
mkLHsWrap :: HsWrapper -> LHsExpr id -> LHsExpr id
mkLHsWrap co_fn (L loc e) = L loc (mkHsWrap co_fn e)
@@ -515,15 +508,16 @@ mkHsWrapPatCo co pat ty | isTcReflCo co = pat
mkHsDictLet :: TcEvBinds -> LHsExpr Id -> LHsExpr Id
mkHsDictLet ev_binds expr = mkLHsWrap (mkWpLet ev_binds) expr
-\end{code}
+
+{-
l
-%************************************************************************
-%* *
+************************************************************************
+* *
Bindings; with a location at the top
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
mkFunBind :: Located RdrName -> [LMatch RdrName (LHsExpr RdrName)]
-> HsBind RdrName
-- Not infix, with place holders for coercion and free vars
@@ -574,14 +568,13 @@ mkMatch pats expr binds
where
paren lp@(L l p) | hsPatNeedsParens p = L l (ParPat lp)
| otherwise = lp
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Collecting binders
-%* *
-%************************************************************************
+* *
+************************************************************************
Get all the binders in some HsBindGroups, IN THE ORDER OF APPEARANCE. eg.
@@ -599,8 +592,8 @@ These functions should only be used on HsSyn *after* the renamer,
to return a [Name] or [Id]. Before renaming the record punning
and wild-card mechanism makes it hard to know what is bound.
So these functions should not be applied to (HsSyn RdrName)
+-}
-\begin{code}
----------------- Bindings --------------------------
collectLocalBinders :: HsLocalBindsLR idL idR -> [idL]
collectLocalBinders (HsValBinds val_binds) = collectHsValBinders val_binds
@@ -703,8 +696,8 @@ collect_lpat (L _ pat) bndrs
go (SplicePat _) = bndrs
go (QuasiQuotePat _) = bndrs
go (CoPat _ pat _) = go pat
-\end{code}
+{-
Note [Dictionary binders in ConPatOut] See also same Note in DsArrows
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Do *not* gather (a) dictionary and (b) dictionary bindings as binders
@@ -730,8 +723,8 @@ Here, the pattern (C (n+1)) binds a hidden dictionary (d::Num a),
and *also* uses that dictionary to match the (n+1) pattern. Yet, the
variables bound by the lazy pattern are n,m, *not* the dictionary d.
So in mkSelectorBinds in DsUtils, we want just m,n as the variables bound.
+-}
-\begin{code}
hsGroupBinders :: HsGroup Name -> [Name]
hsGroupBinders (HsGroup { hs_valds = val_decls, hs_tyclds = tycl_decls,
hs_instds = inst_decls, hs_fords = foreign_decls })
@@ -817,8 +810,7 @@ hsConDeclsBinders cons = go id cons
L loc (ConDecl { con_names = names }) ->
(map (L loc . unLoc) names) ++ go remSeen rs
-\end{code}
-
+{-
Note [Binders in family instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In a type or data family instance declaration, the type
@@ -827,19 +819,19 @@ constructor is an *occurrence* not a binding site
data instance S Bool = S1 | S2 -- Binders are S1,S2
-%************************************************************************
-%* *
+************************************************************************
+* *
Collecting binders the user did not write
-%* *
-%************************************************************************
+* *
+************************************************************************
The job of this family of functions is to run through binding sites and find the set of all Names
that were defined "implicitly", without being explicitly written by the user.
The main purpose is to find names introduced by record wildcards so that we can avoid
warning the user when they don't use those names (#4404)
+-}
-\begin{code}
lStmtsImplicits :: [LStmtLR Name idR (Located (body idR))] -> NameSet
lStmtsImplicits = hs_lstmts
where
@@ -903,4 +895,3 @@ lPatImplicits = hs_lpat
(unLoc fld)
pat_explicit = maybe True (i<) (rec_dotdot fs)]
details (InfixCon p1 p2) = hs_lpat p1 `unionNameSet` hs_lpat p2
-\end{code}
diff --git a/compiler/iface/BuildTyCl.lhs b/compiler/iface/BuildTyCl.hs
index 460dc2b603..33be51ff7f 100644
--- a/compiler/iface/BuildTyCl.lhs
+++ b/compiler/iface/BuildTyCl.hs
@@ -1,9 +1,8 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module BuildTyCl (
@@ -41,10 +40,7 @@ import TcRnMonad
import UniqSupply
import Util
import Outputable
-\end{code}
-
-\begin{code}
------------------------------------------------------
buildSynonymTyCon :: Name -> [TyVar] -> [Role]
-> Type
@@ -213,11 +209,9 @@ buildPatSyn src_name declared_infix matcher@(matcher_id,_) builder
([pat_ty', cont_sigma, _], _) = tcSplitFunTys tau
(ex_tvs', prov_theta', cont_tau) = tcSplitSigmaTy cont_sigma
(arg_tys', _) = tcSplitFunTys cont_tau
-\end{code}
+-- ------------------------------------------------------
-------------------------------------------------------
-\begin{code}
type TcMethInfo = (Name, DefMethSpec, Type)
-- A temporary intermediate, to communicate between
-- tcClassSigs and buildClass.
@@ -319,8 +313,8 @@ buildClass tycon_name tvs roles sc_theta fds at_items sig_stuff mindef tc_isrec
VanillaDM -> do { dm_name <- newImplicitBinder op_name mkDefaultMethodOcc
; return (DefMeth dm_name) }
; return (mkDictSelId op_name rec_clas, dm_info) }
-\end{code}
+{-
Note [Class newtypes and equality predicates]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
@@ -336,3 +330,4 @@ Moreover,
Here we can't use a newtype either, even though there is only
one field, because equality predicates are unboxed, and classes
are boxed.
+-}
diff --git a/compiler/iface/IfaceEnv.lhs b/compiler/iface/IfaceEnv.hs
index 6c93f50456..efd4956b70 100644
--- a/compiler/iface/IfaceEnv.lhs
+++ b/compiler/iface/IfaceEnv.hs
@@ -1,6 +1,5 @@
-(c) The University of Glasgow 2002-2006
+-- (c) The University of Glasgow 2002-2006
-\begin{code}
{-# LANGUAGE CPP, RankNTypes #-}
module IfaceEnv (
@@ -38,14 +37,13 @@ import Outputable
import Exception ( evaluate )
import Data.IORef ( atomicModifyIORef, readIORef )
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
Allocating new Names in the Name Cache
-%* *
-%*********************************************************
+* *
+*********************************************************
Note [The Name Cache]
~~~~~~~~~~~~~~~~~~~~~
@@ -61,9 +59,8 @@ External Name "M.x" has one, and only one globally-agreed Unique.
The functions newGlobalBinder, allocateGlobalBinder do the main work.
When you make an External name, you should probably be calling one
of them.
+-}
-
-\begin{code}
newGlobalBinder :: Module -> OccName -> SrcSpan -> TcRnIf a b Name
-- Used for source code and interface files, to make the
-- Name for a thing, given its Module and OccName
@@ -165,13 +162,13 @@ lookupOrig mod occ
new_cache = extendNameCache (nsNames name_cache) mod occ name
in (name_cache{ nsUniqs = us, nsNames = new_cache }, name)
}}}
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Name cache access
-%* *
-%************************************************************************
+* *
+************************************************************************
See Note [The Name Cache] above.
@@ -192,8 +189,8 @@ However, there are two reasons why we might look up an Orig RdrName:
(DsMeta.globalVar), and parses a NameG into an Orig RdrName
(Convert.thRdrName). So, eg $(do { reify '(,); ... }) will
go this route (Trac #8954).
+-}
-\begin{code}
lookupOrigNameCache :: OrigNameCache -> Module -> OccName -> Maybe Name
lookupOrigNameCache nc mod occ
| Just name <- isBuiltInOcc_maybe occ
@@ -240,10 +237,7 @@ mkNameCacheUpdater = do
_ <- evaluate =<< readIORef nc_var
return r
return (NCU update_nc)
-\end{code}
-
-\begin{code}
initNameCache :: UniqSupply -> [Name] -> NameCache
initNameCache us names
= NameCache { nsUniqs = us,
@@ -251,17 +245,15 @@ initNameCache us names
initOrigNames :: [Name] -> OrigNameCache
initOrigNames names = foldl extendOrigNameCache emptyModuleEnv names
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Type variables and local Ids
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
tcIfaceLclId :: FastString -> IfL Id
tcIfaceLclId occ
= do { lcl <- getLclEnv
@@ -297,16 +289,15 @@ extendIfaceTyVarEnv tyvars thing_inside
; let { tv_env' = addListToUFM (if_tv_env env) pairs
; pairs = [(occNameFS (getOccName tv), tv) | tv <- tyvars] }
; setLclEnv (env { if_tv_env = tv_env' }) thing_inside }
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Getting from RdrNames to Names
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
lookupIfaceTop :: OccName -> IfL Name
-- Look up a top-level name from the current Iface module
lookupIfaceTop occ
@@ -322,4 +313,3 @@ newIfaceNames occs
= do { uniqs <- newUniqueSupply
; return [ mkInternalName uniq occ noSrcSpan
| (occ,uniq) <- occs `zip` uniqsFromSupply uniqs] }
-\end{code}
diff --git a/compiler/iface/IfaceSyn.lhs b/compiler/iface/IfaceSyn.hs
index 98bfae9f81..6bb34838c5 100644
--- a/compiler/iface/IfaceSyn.lhs
+++ b/compiler/iface/IfaceSyn.hs
@@ -1,9 +1,8 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1993-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module IfaceSyn (
@@ -63,16 +62,15 @@ import System.IO.Unsafe
import Data.Maybe (isJust)
infixl 3 &&&
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Declarations
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type IfaceTopBndr = OccName
-- It's convenient to have an OccName in the IfaceSyn, altough in each
-- case the namespace is implied by the context. However, having an
@@ -214,7 +212,7 @@ data IfaceClsInst
ifInstTys :: [Maybe IfaceTyCon], -- the defn of ClsInst
ifDFun :: IfExtName, -- The dfun
ifOFlag :: OverlapFlag, -- Overlap flag
- ifInstOrph :: IsOrphan } -- See Note [Orphans]
+ ifInstOrph :: IsOrphan } -- See Note [Orphans] in InstEnv
-- There's always a separate IfaceDecl for the DFun, which gives
-- its IdInfo with its full type and version number.
-- The instance declarations taken together have a version number,
@@ -228,7 +226,7 @@ data IfaceFamInst
= IfaceFamInst { ifFamInstFam :: IfExtName -- Family name
, ifFamInstTys :: [Maybe IfaceTyCon] -- See above
, ifFamInstAxiom :: IfExtName -- The axiom
- , ifFamInstOrph :: IsOrphan -- Just like IfaceClsInst
+ , ifFamInstOrph :: IsOrphan -- Just like IfaceClsInst
}
data IfaceRule
@@ -300,100 +298,26 @@ data IfaceIdDetails
= IfVanillaId
| IfRecSelId IfaceTyCon Bool
| IfDFunId Int -- Number of silent args
-\end{code}
-
-
-Note [Orphans]: the ifInstOrph and ifRuleOrph fields
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Class instances, rules, and family instances are divided into orphans
-and non-orphans. Roughly speaking, an instance/rule is an orphan if
-its left hand side mentions nothing defined in this module. Orphan-hood
-has two major consequences
-
- * A non-orphan is not finger-printed separately. Instead, for
- fingerprinting purposes it is treated as part of the entity it
- mentions on the LHS. For example
- data T = T1 | T2
- instance Eq T where ....
- The instance (Eq T) is incorprated as part of T's fingerprint.
-
- In constrast, orphans are all fingerprinted together in the
- mi_orph_hash field of the ModIface.
-
- See MkIface.addFingerprints.
-
- * A module that contains orphans is called an "orphan module". If
- the module being compiled depends (transitively) on an oprhan
- module M, then M.hi is read in regardless of whether M is oherwise
- needed. This is to ensure that we don't miss any instance decls in
- M. But it's painful, because it means we need to keep track of all
- the orphan modules below us.
-
-Orphan-hood is computed when we generate an IfaceInst, IfaceRule, or
-IfaceFamInst respectively:
-
- - If an instance is an orphan its ifInstOprh field is Nothing
- Otherwise ifInstOrph is (Just n) where n is the Name of a
- local class or tycon that witnesses its non-orphan-hood.
- This computation is done by MkIface.instanceToIfaceInst
-
- - Similarly for ifRuleOrph
- The computation is done by MkIface.coreRuleToIfaceRule
-
-Note [When exactly is an instance decl an orphan?]
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- (see MkIface.instanceToIfaceInst, which implements this)
-Roughly speaking, an instance is an orphan if its head (after the =>)
-mentions nothing defined in this module.
-
-Functional dependencies complicate the situation though. Consider
-
- module M where { class C a b | a -> b }
-
-and suppose we are compiling module X:
-
- module X where
- import M
- data T = ...
- instance C Int T where ...
-
-This instance is an orphan, because when compiling a third module Y we
-might get a constraint (C Int v), and we'd want to improve v to T. So
-we must make sure X's instances are loaded, even if we do not directly
-use anything from X.
-
-More precisely, an instance is an orphan iff
-
- If there are no fundeps, then at least of the names in
- the instance head is locally defined.
-
- If there are fundeps, then for every fundep, at least one of the
- names free in a *non-determined* part of the instance head is
- defined in this module.
-
-(Note that these conditions hold trivially if the class is locally
-defined.)
+{-
Note [Versioning of instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See [http://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/RecompilationAvoidance#Instances]
-%************************************************************************
-%* *
+************************************************************************
+* *
Functions over declarations
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
visibleIfConDecls :: IfaceConDecls -> [IfaceConDecl]
visibleIfConDecls (IfAbstractTyCon {}) = []
visibleIfConDecls IfDataFamTyCon = []
visibleIfConDecls (IfDataTyCon cs) = cs
visibleIfConDecls (IfNewTyCon c) = [c]
-\end{code}
-\begin{code}
ifaceDeclImplicitBndrs :: IfaceDecl -> [OccName]
-- *Excludes* the 'main' name, but *includes* the implicitly-bound names
-- Deeply revolting, because it has to predict what gets bound,
@@ -474,15 +398,15 @@ ifaceDeclFingerprints hash decl
computeFingerprint' =
unsafeDupablePerformIO
. computeFingerprint (panic "ifaceDeclFingerprints")
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Expressions
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data IfaceExpr
= IfaceLcl IfLclName
| IfaceExt IfExtName
@@ -521,8 +445,8 @@ data IfaceBinding
-- It's used for *non-top-level* let/rec binders
-- See Note [IdInfo on nested let-bindings]
data IfaceLetBndr = IfLetBndr IfLclName IfaceType IfaceIdInfo
-\end{code}
+{-
Note [Empty case alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In IfaceSyn an IfaceCase does not record the types of the alternatives,
@@ -547,13 +471,13 @@ In general we retain all info that is left by CoreTidy.tidyLetBndr, since
that is what is seen by importing module with --make
-%************************************************************************
-%* *
+************************************************************************
+* *
Printing IfaceDecl
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
pprAxBranch :: SDoc -> IfaceAxBranch -> SDoc
-- The TyCon might be local (just an OccName), or this might
-- be a branch for an imported TyCon, so it would be an ExtName
@@ -615,8 +539,8 @@ showSub :: HasOccName n => ShowSub -> n -> Bool
showSub (ShowSub { ss_how_much = ShowHeader }) _ = False
showSub (ShowSub { ss_how_much = ShowSome (n:_) }) thing = n == occName thing
showSub (ShowSub { ss_how_much = _ }) _ = True
-\end{code}
+{-
Note [Printing IfaceDecl binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The binders in an IfaceDecl are just OccNames, so we don't know what module they
@@ -627,8 +551,8 @@ binders.
When printing an interface file (--show-iface), we want to print
everything unqualified, so we can just print the OccName directly.
+-}
-\begin{code}
ppr_trim :: [Maybe SDoc] -> [SDoc]
-- Collapse a group of Nothings to a single "..."
ppr_trim xs
@@ -931,8 +855,8 @@ instance Outputable IfaceFamInst where
ppr_rough :: Maybe IfaceTyCon -> SDoc
ppr_rough Nothing = dot
ppr_rough (Just tc) = ppr tc
-\end{code}
+{-
Note [Result type of a data family GADT]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
@@ -955,8 +879,8 @@ Remember that in IfaceSyn, the TyCon and DataCon share the same
universal type variables.
----------------------------- Printing IfaceExpr ------------------------------------
+-}
-\begin{code}
instance Outputable IfaceExpr where
ppr e = pprIfaceExpr noParens e
@@ -1092,13 +1016,13 @@ instance Outputable IfaceUnfolding where
pprParendIfaceExpr e]
ppr (IfDFunUnfold bs es) = hang (ptext (sLit "DFun:") <+> sep (map ppr bs) <> dot)
2 (sep (map pprParendIfaceExpr es))
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Finding the Names in IfaceSyn
-%* *
-%************************************************************************
+* *
+************************************************************************
This is used for dependency analysis in MkIface, so that we
fingerprint a declaration before the things that depend on it. It
@@ -1106,8 +1030,8 @@ is specific to interface-file fingerprinting in the sense that we
don't collect *all* Names: for example, the DFun of an instance is
recorded textually rather than by its fingerprint when
fingerprinting the instance, so DFuns are not dependencies.
+-}
-\begin{code}
freeNamesIfDecl :: IfaceDecl -> NameSet
freeNamesIfDecl (IfaceId _s t d i) =
freeNamesIfType t &&&
@@ -1340,8 +1264,8 @@ freeNamesIfaceTyConParent (IfDataInstance ax tc tys)
fnList :: (a -> NameSet) -> [a] -> NameSet
fnList f = foldr (&&&) emptyNameSet . map f
-\end{code}
+{-
Note [Tracking data constructors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In a case expression
@@ -1368,13 +1292,13 @@ on the *locally-defined* type PackageState is not visible. We need
to take account of the use of the data constructor PS in the pattern match.
-%************************************************************************
-%* *
+************************************************************************
+* *
Binary instances
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
instance Binary IfaceDecl where
put_ bh (IfaceId name ty details idinfo) = do
putByte bh 0
@@ -1910,4 +1834,3 @@ instance Binary IfaceTyConParent where
pr <- get bh
ty <- get bh
return $ IfDataInstance ax pr ty
-\end{code}
diff --git a/compiler/iface/IfaceType.lhs b/compiler/iface/IfaceType.hs
index 223a25b8b4..534545372f 100644
--- a/compiler/iface/IfaceType.lhs
+++ b/compiler/iface/IfaceType.hs
@@ -1,11 +1,11 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1993-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
+
This module defines interface types and binders
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module IfaceType (
IfExtName, IfLclName,
@@ -65,15 +65,15 @@ import Outputable
import FastString
import UniqSet
import Data.Maybe( fromMaybe )
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Local (nested) binders
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type IfLclName = FastString -- A local name in iface syntax
type IfExtName = Name -- An External or WiredIn Name can appear in IfaceSyn
@@ -150,17 +150,14 @@ data IfaceCoercion
| IfaceSubCo IfaceCoercion
| IfaceAxiomRuleCo IfLclName [IfaceType] [IfaceCoercion]
-
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Functions over IFaceTypes
-%* *
-%************************************************************************
-
+* *
+************************************************************************
+-}
-\begin{code}
splitIfaceSigmaTy :: IfaceType -> ([IfaceTvBndr], [IfacePredType], IfaceType)
-- Mainly for printing purposes
splitIfaceSigmaTy ty
@@ -219,13 +216,13 @@ ifTyVarsOfArgs args = argv emptyUniqSet args
argv vs (ITC_Type t ts) = argv (vs `unionUniqSets` (ifTyVarsOfType t)) ts
argv vs (ITC_Kind k ks) = argv (vs `unionUniqSets` (ifTyVarsOfType k)) ks
argv vs ITC_Nil = vs
-\end{code}
+{-
Substitutions on IfaceType. This is only used during pretty-printing to construct
the result type of a GADT, and does not deal with binders (eg IfaceForAll), so
it doesn't need fancy capture stuff.
+-}
-\begin{code}
type IfaceTySubst = FastStringEnv IfaceType
mkIfaceTySubst :: [IfaceTvBndr] -> [IfaceType] -> IfaceTySubst
@@ -255,16 +252,15 @@ substIfaceTyVar :: IfaceTySubst -> IfLclName -> IfaceType
substIfaceTyVar env tv
| Just ty <- lookupFsEnv env tv = ty
| otherwise = IfaceTyVar tv
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Functions over IFaceTcArgs
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-
-\begin{code}
stripKindArgs :: DynFlags -> IfaceTcArgs -> IfaceTcArgs
stripKindArgs dflags tys
| gopt Opt_PrintExplicitKinds dflags = tys
@@ -290,8 +286,8 @@ tcArgsIfaceTypes :: IfaceTcArgs -> [IfaceType]
tcArgsIfaceTypes ITC_Nil = []
tcArgsIfaceTypes (ITC_Kind t ts) = t : tcArgsIfaceTypes ts
tcArgsIfaceTypes (ITC_Type t ts) = t : tcArgsIfaceTypes ts
-\end{code}
+{-
Note [Suppressing kinds]
~~~~~~~~~~~~~~~~~~~~~~~~
We use the IfaceTcArgs to specify which of the arguments to a type
@@ -306,24 +302,25 @@ we want
'Just * prints as Just *
-%************************************************************************
-%* *
+************************************************************************
+* *
Functions over IFaceTyCon
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
--isPromotedIfaceTyCon :: IfaceTyCon -> Bool
--isPromotedIfaceTyCon (IfacePromotedTyCon _) = True
--isPromotedIfaceTyCon _ = False
-\end{code}
-%************************************************************************
-%* *
+
+{-
+************************************************************************
+* *
Pretty-printing
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
pprIfaceInfixApp :: (TyPrec -> a -> SDoc) -> TyPrec -> SDoc -> a -> a -> SDoc
pprIfaceInfixApp pp p pp_tc ty1 ty2
= maybeParen p FunPrec $
@@ -334,12 +331,9 @@ pprIfacePrefixApp p pp_fun pp_tys
| null pp_tys = pp_fun
| otherwise = maybeParen p TyConPrec $
hang pp_fun 2 (sep pp_tys)
-\end{code}
+-- ----------------------------- Printing binders ------------------------------------
------------------------------ Printing binders ------------------------------------
-
-\begin{code}
instance Outputable IfaceBndr where
ppr (IfaceIdBndr bndr) = pprIfaceIdBndr bndr
ppr (IfaceTvBndr bndr) = char '@' <+> pprIfaceTvBndr bndr
@@ -387,11 +381,9 @@ instance Binary IfaceOneShot where
case h of
0 -> do return IfaceNoOneShot
_ -> do return IfaceOneShot
-\end{code}
------------------------------ Printing IfaceType ------------------------------------
+-- ----------------------------- Printing IfaceType ------------------------------------
-\begin{code}
---------------------------------
instance Outputable IfaceType where
ppr ty = pprIfaceType ty
@@ -881,15 +873,14 @@ instance Binary IfaceCoercion where
return $ IfaceAxiomRuleCo a b c
_ -> panic ("get IfaceCoercion " ++ show tag)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Conversion from Type to IfaceType
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
----------------
toIfaceTvBndr :: TyVar -> (IfLclName, IfaceType)
toIfaceTvBndr tyvar = (occNameFS (getOccName tyvar), toIfaceKind (tyVarKind tyvar))
@@ -978,4 +969,3 @@ toIfaceCoercion (AxiomRuleCo co ts cs) = IfaceAxiomRuleCo
(coaxrName co)
(map toIfaceType ts)
(map toIfaceCoercion cs)
-\end{code}
diff --git a/compiler/iface/LoadIface.lhs b/compiler/iface/LoadIface.hs
index 250ef2f182..34ae3d507f 100644
--- a/compiler/iface/LoadIface.lhs
+++ b/compiler/iface/LoadIface.hs
@@ -1,21 +1,21 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
Loading interface files
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module LoadIface (
-- RnM/TcM functions
- loadModuleInterface, loadModuleInterfaces,
- loadSrcInterface, loadSrcInterface_maybe,
+ loadModuleInterface, loadModuleInterfaces,
+ loadSrcInterface, loadSrcInterface_maybe,
loadInterfaceForName, loadInterfaceForModule,
-- IfM functions
- loadInterface, loadWiredInHomeIface,
+ loadInterface, loadWiredInHomeIface,
loadSysInterface, loadUserInterface, loadPluginInterface,
findAndReadIface, readIface, -- Used when reading the module's old interface
loadDecls, -- Should move to TcIface and be renamed
@@ -26,7 +26,7 @@ module LoadIface (
#include "HsVersions.h"
-import {-# SOURCE #-} TcIface( tcIfaceDecl, tcIfaceRules, tcIfaceInst,
+import {-# SOURCE #-} TcIface( tcIfaceDecl, tcIfaceRules, tcIfaceInst,
tcIfaceFamInst, tcIfaceVectInfo, tcIfaceAnnotations )
import DynFlags
@@ -66,18 +66,17 @@ import Hooks
import Control.Monad
import Data.IORef
import System.FilePath
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
loadSrcInterface, loadOrphanModules, loadInterfaceForName
- These three are called from TcM-land
-%* *
-%************************************************************************
+ These three are called from TcM-land
+* *
+************************************************************************
+-}
-\begin{code}
-- Note [Un-ambiguous multiple interfaces]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- When a user writes an import statement, this usually causes a *single*
@@ -110,7 +109,7 @@ import System.FilePath
-- two signatures are the same (a condition which is checked by 'Packages'.)
--- | Load the interface corresponding to an @import@ directive in
+-- | Load the interface corresponding to an @import@ directive in
-- source code. On a failure, fail in the monad with an error message.
-- See Note [Un-ambiguous multiple interfaces] for why the return type
-- is @[ModIface]@
@@ -167,13 +166,13 @@ loadModuleInterfaces doc mods
-- | Loads the interface for a given Name.
loadInterfaceForName :: SDoc -> Name -> TcRn ModIface
loadInterfaceForName doc name
- = do {
+ = do {
when debugIsOn $ do
-- Should not be called with a name from the module being compiled
{ this_mod <- getModule
; MASSERT2( not (nameIsLocalOrFrom this_mod name), ppr name <+> parens doc )
}
- ; ASSERT2( isExternalName name, ppr name )
+ ; ASSERT2( isExternalName name, ppr name )
initIfaceTcRn $ loadSysInterface doc (nameModule name)
}
@@ -186,20 +185,19 @@ loadInterfaceForModule doc m
this_mod <- getModule
MASSERT2( this_mod /= m, ppr m <+> parens doc )
initIfaceTcRn $ loadSysInterface doc m
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
loadInterface
The main function to load an interface
for an imported module, and put it in
the External Package State
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
-- | An 'IfM' function to load the home interface for a wired-in thing,
-- so that we're sure that we see its instance declarations and rules
-- See Note [Loading instances for wired-in things] in TcIface
@@ -219,7 +217,7 @@ loadSysInterface doc mod_name = loadInterfaceWithException doc mod_name ImportBy
-- | Loads a user interface and throws an exception if it fails. The first parameter indicates
-- whether we should import the boot variant of the module
loadUserInterface :: Bool -> SDoc -> Module -> IfM lcl ModIface
-loadUserInterface is_boot doc mod_name
+loadUserInterface is_boot doc mod_name
= loadInterfaceWithException doc mod_name (ImportByUser is_boot)
loadPluginInterface :: SDoc -> Module -> IfM lcl ModIface
@@ -232,7 +230,7 @@ loadInterfaceWithException :: SDoc -> Module -> WhereFrom -> IfM lcl ModIface
loadInterfaceWithException doc mod_name where_from
= do { mb_iface <- loadInterface doc mod_name where_from
; dflags <- getDynFlags
- ; case mb_iface of
+ ; case mb_iface of
Failed err -> liftIO $ throwGhcExceptionIO (ProgramError (showSDoc dflags err))
Succeeded iface -> return iface }
@@ -241,7 +239,7 @@ loadInterface :: SDoc -> Module -> WhereFrom
-> IfM lcl (MaybeErr MsgDoc ModIface)
-- loadInterface looks in both the HPT and PIT for the required interface
--- If not found, it loads it, and puts it in the PIT (always).
+-- If not found, it loads it, and puts it in the PIT (always).
-- If it can't find a suitable interface file, we
-- a) modify the PackageIfaceTable to have an empty entry
@@ -249,7 +247,7 @@ loadInterface :: SDoc -> Module -> WhereFrom
-- b) return (Left message)
--
-- It's not necessarily an error for there not to be an interface
--- file -- perhaps the module has changed, and that interface
+-- file -- perhaps the module has changed, and that interface
-- is no longer used
loadInterface doc_str mod from
@@ -261,7 +259,7 @@ loadInterface doc_str mod from
-- Check whether we have the interface already
; dflags <- getDynFlags
; case lookupIfaceByModule dflags hpt (eps_PIT eps) mod of {
- Just iface
+ Just iface
-> return (Succeeded iface) ; -- Already loaded
-- The (src_imp == mi_boot iface) test checks that the already-loaded
-- interface isn't a boot iface. This can conceivably happen,
@@ -278,9 +276,9 @@ loadInterface doc_str mod from
; updateEps_ $ \eps ->
eps { eps_PIT = extendModuleEnv (eps_PIT eps) (mi_module fake_iface) fake_iface }
- -- Not found, so add an empty iface to
+ -- Not found, so add an empty iface to
-- the EPS map so that we don't look again
-
+
; return (Failed err) } ;
-- Found and parsed!
@@ -294,14 +292,14 @@ loadInterface doc_str mod from
-- Template Haskell original-name).
Succeeded (iface, file_path) ->
- let
+ let
loc_doc = text file_path
- in
+ in
initIfaceLcl mod loc_doc $ do
-- Load the new ModIface into the External Package State
- -- Even home-package interfaces loaded by loadInterface
- -- (which only happens in OneShot mode; in Batch/Interactive
+ -- Even home-package interfaces loaded by loadInterface
+ -- (which only happens in OneShot mode; in Batch/Interactive
-- mode, home-package modules are loaded one by one into the HPT)
-- are put in the EPS.
--
@@ -323,7 +321,7 @@ loadInterface doc_str mod from
; new_eps_anns <- tcIfaceAnnotations (mi_anns iface)
; new_eps_vect_info <- tcIfaceVectInfo mod (mkNameEnv new_eps_decls) (mi_vect_info iface)
- ; let { final_iface = iface {
+ ; let { final_iface = iface {
mi_decls = panic "No mi_decls in PIT",
mi_insts = panic "No mi_insts in PIT",
mi_fam_insts = panic "No mi_fam_insts in PIT",
@@ -332,7 +330,7 @@ loadInterface doc_str mod from
}
}
- ; updateEps_ $ \ eps ->
+ ; updateEps_ $ \ eps ->
if elemModuleEnv mod (eps_PIT eps) then eps else
case from of -- See Note [Care with plugin imports]
ImportByPlugin -> eps {
@@ -341,26 +339,26 @@ loadInterface doc_str mod from
_ -> eps {
eps_PIT = extendModuleEnv (eps_PIT eps) mod final_iface,
eps_PTE = addDeclsToPTE (eps_PTE eps) new_eps_decls,
- eps_rule_base = extendRuleBaseList (eps_rule_base eps)
+ eps_rule_base = extendRuleBaseList (eps_rule_base eps)
new_eps_rules,
- eps_inst_env = extendInstEnvList (eps_inst_env eps)
+ eps_inst_env = extendInstEnvList (eps_inst_env eps)
new_eps_insts,
eps_fam_inst_env = extendFamInstEnvList (eps_fam_inst_env eps)
new_eps_fam_insts,
- eps_vect_info = plusVectInfo (eps_vect_info eps)
+ eps_vect_info = plusVectInfo (eps_vect_info eps)
new_eps_vect_info,
eps_ann_env = extendAnnEnvList (eps_ann_env eps)
new_eps_anns,
eps_mod_fam_inst_env
= let
- fam_inst_env =
+ fam_inst_env =
extendFamInstEnvList emptyFamInstEnv
new_eps_fam_insts
in
extendModuleEnv (eps_mod_fam_inst_env eps)
mod
fam_inst_env,
- eps_stats = addEpsInStats (eps_stats eps)
+ eps_stats = addEpsInStats (eps_stats eps)
(length new_eps_decls)
(length new_eps_insts)
(length new_eps_rules) }
@@ -373,7 +371,7 @@ wantHiBootFile :: DynFlags -> ExternalPackageState -> Module -> WhereFrom
-- Figure out whether we want Foo.hi or Foo.hi-boot
wantHiBootFile dflags eps mod from
= case from of
- ImportByUser usr_boot
+ ImportByUser usr_boot
| usr_boot && not this_package
-> Failed (badSourceImport mod)
| otherwise -> Succeeded usr_boot
@@ -384,14 +382,14 @@ wantHiBootFile dflags eps mod from
ImportBySystem
| not this_package -- If the module to be imported is not from this package
-> Succeeded False -- don't look it up in eps_is_boot, because that is keyed
- -- on the ModuleName of *home-package* modules only.
+ -- on the ModuleName of *home-package* modules only.
-- We never import boot modules from other packages!
| otherwise
-> case lookupUFM (eps_is_boot eps) (moduleName mod) of
Just (_, is_boot) -> Succeeded is_boot
Nothing -> Succeeded False
- -- The boot-ness of the requested interface,
+ -- The boot-ness of the requested interface,
-- based on the dependencies in directly-imported modules
where
this_package = thisPackage dflags == modulePackageKey mod
@@ -401,13 +399,13 @@ badSourceImport mod
= hang (ptext (sLit "You cannot {-# SOURCE #-} import a module from another package"))
2 (ptext (sLit "but") <+> quotes (ppr mod) <+> ptext (sLit "is from package")
<+> quotes (ppr (modulePackageKey mod)))
-\end{code}
+{-
Note [Care with plugin imports]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When dynamically loading a plugin (via loadPluginInterface) we
populate the same External Package State (EPS), even though plugin
-modules are to link with the compiler itself, and not with the
+modules are to link with the compiler itself, and not with the
compiled program. That's fine: mostly the EPS is just a cache for
the interace files on disk.
@@ -421,9 +419,8 @@ Solution: when loading plugins, do not extend the rule and instance
environments. We are only interested in the type environment, so that
we can check that the plugin exports a function with the type that the
compiler expects.
+-}
-
-\begin{code}
-----------------------------------------------------
-- Loading type/class/value decls
-- We pass the full Module name here, replete with
@@ -455,13 +452,13 @@ loadDecl :: Bool -- Don't load pragmas into the decl pool
-> IfL [(Name,TyThing)] -- The list can be poked eagerly, but the
-- TyThings are forkM'd thunks
loadDecl ignore_prags mod (_version, decl)
- = do { -- Populate the name cache with final versions of all
+ = do { -- Populate the name cache with final versions of all
-- the names associated with the decl
main_name <- lookupOrig mod (ifName decl)
-- Typecheck the thing, lazily
-- NB. Firstly, the laziness is there in case we never need the
- -- declaration (in one-shot mode), and secondly it is there so that
+ -- declaration (in one-shot mode), and secondly it is there so that
-- we don't look up the occurrence of a name before calling mk_new_bndr
-- on the binder. This is important because we must get the right name
-- which includes its nameParent.
@@ -470,7 +467,7 @@ loadDecl ignore_prags mod (_version, decl)
; tcIfaceDecl ignore_prags decl }
-- Populate the type environment with the implicitTyThings too.
- --
+ --
-- Note [Tricky iface loop]
-- ~~~~~~~~~~~~~~~~~~~~~~~~
-- Summary: The delicate point here is that 'mini-env' must be
@@ -481,8 +478,8 @@ loadDecl ignore_prags mod (_version, decl)
-- data T a = MkT { x :: T a }
-- The implicitTyThings of T are: [ <datacon MkT>, <selector x>]
-- (plus their workers, wrappers, coercions etc etc)
- --
- -- We want to return an environment
+ --
+ -- We want to return an environment
-- [ "MkT" -> <datacon MkT>, "x" -> <selector x>, ... ]
-- (where the "MkT" is the *Name* associated with MkT, etc.)
--
@@ -498,7 +495,7 @@ loadDecl ignore_prags mod (_version, decl)
--
-- However, there is a subtlety: due to how type checking needs
-- to be staged, we can't poke on the forkM'd thunks inside the
- -- implicitTyThings while building this mini-env.
+ -- implicitTyThings while building this mini-env.
-- If we poke these thunks too early, two problems could happen:
-- (1) When processing mutually recursive modules across
-- hs-boot boundaries, poking too early will do the
@@ -506,7 +503,7 @@ loadDecl ignore_prags mod (_version, decl)
-- so things will be type-checked in the wrong
-- environment, and necessary variables won't be in
-- scope.
- --
+ --
-- (2) Looking up one OccName in the mini_env will cause
-- others to be looked up, which might cause that
-- original one to be looked up again, and hence loop.
@@ -527,7 +524,7 @@ loadDecl ignore_prags mod (_version, decl)
; let mini_env = mkOccEnv [(getOccName t, t) | t <- implicitTyThings thing]
lookup n = case lookupOccEnv mini_env (getOccName n) of
Just thing -> thing
- Nothing ->
+ Nothing ->
pprPanic "loadDecl" (ppr main_name <+> ppr n $$ ppr (decl))
; implicit_names <- mapM (lookupOrig mod) (ifaceDeclImplicitBndrs decl)
@@ -547,14 +544,13 @@ bumpDeclStats name
; updateEps_ (\eps -> let stats = eps_stats eps
in eps { eps_stats = stats { n_decls_out = n_decls_out stats + 1 } })
}
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Reading an interface file}
-%* *
-%*********************************************************
+* *
+*********************************************************
Note [Home module load error]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -566,32 +562,32 @@ in one-shot mode; see notes with hsc_HPT decl in HscTypes).
It is possible (though hard) to get this error through user behaviour.
* Suppose package P (modules P1, P2) depends on package Q (modules Q1,
Q2, with Q2 importing Q1)
- * We compile both packages.
+ * We compile both packages.
* Now we edit package Q so that it somehow depends on P
- * Now recompile Q with --make (without recompiling P).
+ * Now recompile Q with --make (without recompiling P).
* Then Q1 imports, say, P1, which in turn depends on Q2. So Q2
is a home-package module which is not yet in the HPT! Disaster.
This actually happened with P=base, Q=ghc-prim, via the AMP warnings.
See Trac #8320.
+-}
-\begin{code}
findAndReadIface :: SDoc -> Module
-> IsBootInterface -- True <=> Look for a .hi-boot file
-- False <=> Look for .hi file
-> TcRnIf gbl lcl (MaybeErr MsgDoc (ModIface, FilePath))
-- Nothing <=> file not found, or unreadable, or illegible
- -- Just x <=> successfully found and parsed
+ -- Just x <=> successfully found and parsed
- -- It *doesn't* add an error to the monad, because
+ -- It *doesn't* add an error to the monad, because
-- sometimes it's ok to fail... see notes with loadInterface
findAndReadIface doc_str mod hi_boot_file
- = do traceIf (sep [hsep [ptext (sLit "Reading"),
- if hi_boot_file
- then ptext (sLit "[boot]")
+ = do traceIf (sep [hsep [ptext (sLit "Reading"),
+ if hi_boot_file
+ then ptext (sLit "[boot]")
else Outputable.empty,
- ptext (sLit "interface for"),
+ ptext (sLit "interface for"),
ppr mod <> semi],
nest 4 (ptext (sLit "reason:") <+> doc_str)])
@@ -607,7 +603,7 @@ findAndReadIface doc_str mod hi_boot_file
hsc_env <- getTopEnv
mb_found <- liftIO (findExactModule hsc_env mod)
case mb_found of
- Found loc mod -> do
+ Found loc mod -> do
-- Found file, so read it
let file_path = addBootSuffix_maybe hi_boot_file
@@ -623,14 +619,14 @@ findAndReadIface doc_str mod hi_boot_file
err -> do
traceIf (ptext (sLit "...not found"))
dflags <- getDynFlags
- return (Failed (cannotFindInterface dflags
+ return (Failed (cannotFindInterface dflags
(moduleName mod) err))
where read_file file_path = do
traceIf (ptext (sLit "readIFace") <+> text file_path)
read_result <- readIface mod file_path
case read_result of
Failed err -> return (Failed (badIfaceFile file_path err))
- Succeeded iface
+ Succeeded iface
| mi_module iface /= mod ->
return (Failed (wrongIfaceModErr iface mod file_path))
| otherwise ->
@@ -654,21 +650,19 @@ findAndReadIface doc_str mod hi_boot_file
do traceIf (text "Failed to load dynamic interface file:" $$ err)
liftIO $ writeIORef ref False
checkBuildDynamicToo _ = return ()
-\end{code}
-@readIface@ tries just the one file.
+-- @readIface@ tries just the one file.
-\begin{code}
readIface :: Module -> FilePath
-> TcRnIf gbl lcl (MaybeErr MsgDoc ModIface)
-- Failed err <=> file not found, or unreadable, or illegible
- -- Succeeded iface <=> successfully found and parsed
+ -- Succeeded iface <=> successfully found and parsed
readIface wanted_mod file_path
= do { res <- tryMostM $
readBinIface CheckHiWay QuietBinIFaceReading file_path
; case res of
- Right iface
+ Right iface
| wanted_mod == actual_mod -> return (Succeeded iface)
| otherwise -> return (Failed err)
where
@@ -677,19 +671,18 @@ readIface wanted_mod file_path
Left exn -> return (Failed (text (showException exn)))
}
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
Wired-in interface for GHC.Prim
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
initExternalPackageState :: ExternalPackageState
initExternalPackageState
- = EPS {
+ = EPS {
eps_is_boot = emptyUFM,
eps_PIT = emptyPackageIfaceTable,
eps_PTE = emptyTypeEnv,
@@ -705,16 +698,15 @@ initExternalPackageState
, n_insts_in = 0, n_insts_out = 0
, n_rules_in = length builtinRules, n_rules_out = 0 }
}
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
Wired-in interface for GHC.Prim
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
ghcPrimIface :: ModIface
ghcPrimIface
= (emptyModIface gHC_PRIM) {
@@ -722,44 +714,43 @@ ghcPrimIface
mi_decls = [],
mi_fixities = fixities,
mi_fix_fn = mkIfaceFixCache fixities
- }
+ }
where
fixities = (getOccName seqId, Fixity 0 InfixR) -- seq is infixr 0
: mapMaybe mkFixity allThePrimOps
mkFixity op = (,) (primOpOcc op) <$> primOpFixity op
-\end{code}
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Statistics}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
ifaceStats :: ExternalPackageState -> SDoc
-ifaceStats eps
+ifaceStats eps
= hcat [text "Renamer stats: ", msg]
where
stats = eps_stats eps
- msg = vcat
+ msg = vcat
[int (n_ifaces_in stats) <+> text "interfaces read",
- hsep [ int (n_decls_out stats), text "type/class/variable imported, out of",
+ hsep [ int (n_decls_out stats), text "type/class/variable imported, out of",
int (n_decls_in stats), text "read"],
- hsep [ int (n_insts_out stats), text "instance decls imported, out of",
+ hsep [ int (n_insts_out stats), text "instance decls imported, out of",
int (n_insts_in stats), text "read"],
- hsep [ int (n_rules_out stats), text "rule decls imported, out of",
+ hsep [ int (n_rules_out stats), text "rule decls imported, out of",
int (n_rules_in stats), text "read"]
]
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Printing interfaces
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Read binary interface, and print it out
showIface :: HscEnv -> FilePath -> IO ()
showIface hsc_env filename = do
@@ -769,9 +760,7 @@ showIface hsc_env filename = do
readBinIface IgnoreHiWay TraceBinIFaceReading filename
let dflags = hsc_dflags hsc_env
log_action dflags dflags SevDump noSrcSpan defaultDumpStyle (pprModIface iface)
-\end{code}
-\begin{code}
pprModIface :: ModIface -> SDoc
-- Show a ModIface
pprModIface iface
@@ -807,21 +796,21 @@ pprModIface iface
where
pp_boot | mi_boot iface = ptext (sLit "[boot]")
| otherwise = Outputable.empty
-\end{code}
+{-
When printing export lists, we print like this:
Avail f f
AvailTC C [C, x, y] C(x,y)
AvailTC C [x, y] C!(x,y) -- Exporting x, y but not C
+-}
-\begin{code}
pprExport :: IfaceExport -> SDoc
pprExport (Avail n) = ppr n
pprExport (AvailTC _ []) = Outputable.empty
-pprExport (AvailTC n (n':ns))
+pprExport (AvailTC n (n':ns))
| n==n' = ppr n <> pp_export ns
| otherwise = ppr n <> char '|' <> pp_export (n':ns)
- where
+ where
pp_export [] = Outputable.empty
pp_export names = braces (hsep (map ppr names))
@@ -865,7 +854,7 @@ pprFixities :: [(OccName, Fixity)] -> SDoc
pprFixities [] = Outputable.empty
pprFixities fixes = ptext (sLit "fixities") <+> pprWithCommas pprFix fixes
where
- pprFix (occ,fix) = ppr fix <+> ppr occ
+ pprFix (occ,fix) = ppr fix <+> ppr occ
pprVectInfo :: IfaceVectInfo -> SDoc
pprVectInfo (IfaceVectInfo { ifaceVectInfoVar = vars
@@ -873,8 +862,8 @@ pprVectInfo (IfaceVectInfo { ifaceVectInfoVar = vars
, ifaceVectInfoTyConReuse = tyconsReuse
, ifaceVectInfoParallelVars = parallelVars
, ifaceVectInfoParallelTyCons = parallelTyCons
- }) =
- vcat
+ }) =
+ vcat
[ ptext (sLit "vectorised variables:") <+> hsep (map ppr vars)
, ptext (sLit "vectorised tycons:") <+> hsep (map ppr tycons)
, ptext (sLit "vectorised reused tycons:") <+> hsep (map ppr tyconsReuse)
@@ -901,23 +890,22 @@ pprWarns (WarnSome prs) = ptext (sLit "Warnings")
pprIfaceAnnotation :: IfaceAnnotation -> SDoc
pprIfaceAnnotation (IfaceAnnotation { ifAnnotatedTarget = target, ifAnnotatedValue = serialized })
= ppr target <+> ptext (sLit "annotated by") <+> ppr serialized
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Errors}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
badIfaceFile :: String -> SDoc -> SDoc
badIfaceFile file err
- = vcat [ptext (sLit "Bad interface file:") <+> text file,
+ = vcat [ptext (sLit "Bad interface file:") <+> text file,
nest 4 err]
hiModuleNameMismatchWarn :: Module -> Module -> MsgDoc
-hiModuleNameMismatchWarn requested_mod read_mod =
+hiModuleNameMismatchWarn requested_mod read_mod =
-- ToDo: This will fail to have enough qualification when the package IDs
-- are the same
withPprStyle (mkUserStyle alwaysQualify AllTheWay) $
@@ -930,7 +918,7 @@ hiModuleNameMismatchWarn requested_mod read_mod =
]
wrongIfaceModErr :: ModIface -> Module -> String -> SDoc
-wrongIfaceModErr iface mod_name file_path
+wrongIfaceModErr iface mod_name file_path
= sep [ptext (sLit "Interface file") <+> iface_file,
ptext (sLit "contains module") <+> quotes (ppr (mi_module iface)) <> comma,
ptext (sLit "but we were expecting module") <+> quotes (ppr mod_name),
@@ -949,5 +937,3 @@ homeModError mod location
Just file -> space <> parens (text file)
Nothing -> Outputable.empty)
<+> ptext (sLit "which is not loaded")
-\end{code}
-
diff --git a/compiler/iface/MkIface.lhs b/compiler/iface/MkIface.hs
index 8b5dac58e7..b3321c19de 100644
--- a/compiler/iface/MkIface.lhs
+++ b/compiler/iface/MkIface.hs
@@ -1,9 +1,8 @@
-%
-% (c) The University of Glasgow 2006-2008
-% (c) The GRASP/AQUA Project, Glasgow University, 1993-1998
-%
+{-
+(c) The University of Glasgow 2006-2008
+(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
+-}
-\begin{code}
{-# LANGUAGE CPP, NondecreasingIndentation #-}
-- | Module for constructing @ModIface@ values (interface files),
@@ -25,8 +24,8 @@ module MkIface (
tyThingToIfaceDecl -- Converting things to their Iface equivalents
) where
-\end{code}
+{-
-----------------------------------------------
Recompilation checking
-----------------------------------------------
@@ -56,8 +55,8 @@ Basic idea:
* In checkOldIface we compare the mi_usages for the module with
the actual fingerprint for all each thing recorded in mi_usages
+-}
-\begin{code}
#include "HsVersions.h"
import IfaceSyn
@@ -123,17 +122,15 @@ import Data.Ord
import Data.IORef
import System.Directory
import System.FilePath
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Completing an interface}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
mkIface :: HscEnv
-> Maybe Fingerprint -- The old fingerprint, if we have it
-> ModDetails -- The trimmed, tidied interface
@@ -669,10 +666,7 @@ sortDependencies d
dep_pkgs = sortBy (stablePackageKeyCmp `on` fst) (dep_pkgs d),
dep_orphs = sortBy stableModuleCmp (dep_orphs d),
dep_finsts = sortBy stableModuleCmp (dep_finsts d) }
-\end{code}
-
-\begin{code}
-- | Creates cached lookup for the 'mi_anns' field of ModIface
-- Hackily, we use "module" as the OccName for any module-level annotations
mkIfaceAnnCache :: [IfaceAnnotation] -> OccName -> [AnnPayload]
@@ -686,8 +680,8 @@ mkIfaceAnnCache anns
, [value])
-- flipping (++), so the first argument is always short
env = mkOccEnv_C (flip (++)) (map pair anns)
-\end{code}
+{-
Note [Orphans and auto-generated rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we specialise an INLINEABLE function, or when we have
@@ -707,11 +701,11 @@ module M will be used in other modules only if M.hi has been read for
some other reason, which is actually pretty likely.
-%************************************************************************
-%* *
+************************************************************************
+* *
The ABI of an IfaceDecl
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [The ABI of an IfaceDecl]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -734,8 +728,8 @@ Items (c)-(f) are not stored in the IfaceDecl, but instead appear
elsewhere in the interface file. But they are *fingerprinted* with
the declaration itself. This is done by grouping (c)-(f) in IfaceDeclExtras,
and fingerprinting that as part of the declaration.
+-}
-\begin{code}
type IfaceDeclABI = (Module, IfaceDecl, IfaceDeclExtras)
data IfaceDeclExtras
@@ -744,7 +738,7 @@ data IfaceDeclExtras
| IfaceDataExtras
Fixity -- Fixity of the tycon itself
[IfaceInstABI] -- Local class and family instances of this tycon
- -- See Note [Orphans] in IfaceSyn
+ -- See Note [Orphans] in InstEnv
[AnnPayload] -- Annotations of the type itself
[IfaceIdExtras] -- For each constructor: fixity, RULES and annotations
@@ -752,7 +746,7 @@ data IfaceDeclExtras
Fixity -- Fixity of the class itself
[IfaceInstABI] -- Local instances of this class *or*
-- of its associated data types
- -- See Note [Orphans] in IfaceSyn
+ -- See Note [Orphans] in InstEnv
[AnnPayload] -- Annotations of the type itself
[IfaceIdExtras] -- For each class method: fixity, RULES and annotations
@@ -946,16 +940,15 @@ mkOrphMap get_key decls
| NotOrphan occ <- get_key d
= (extendOccEnv_Acc (:) singleton non_orphs occ d, orphs)
| otherwise = (non_orphs, d:orphs)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Keeping track of what we've slurped, and fingerprints
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
mkUsageInfo :: HscEnv -> Module -> ImportedMods -> NameSet -> [FilePath] -> IO [Usage]
mkUsageInfo hsc_env this_mod dir_imp_mods used_names dependent_files
= do
@@ -1093,18 +1086,14 @@ mk_mod_usage_info pit hsc_env this_mod direct_imports used_names
from generating many of these usages (at least in
one-shot mode), but that's even more bogus!
-}
-\end{code}
-\begin{code}
mkIfaceAnnotation :: Annotation -> IfaceAnnotation
mkIfaceAnnotation (Annotation { ann_target = target, ann_value = payload })
= IfaceAnnotation {
ifAnnotatedTarget = fmap nameOccName target,
ifAnnotatedValue = payload
}
-\end{code}
-\begin{code}
mkIfaceExports :: [AvailInfo] -> [IfaceExport] -- Sort to make canonical
mkIfaceExports exports
= sortBy stableAvailCmp (map sort_subs exports)
@@ -1116,8 +1105,8 @@ mkIfaceExports exports
| n==m = AvailTC n (m:sortBy stableNameCmp ms)
| otherwise = AvailTC n (sortBy stableNameCmp (m:ms))
-- Maintain the AvailTC Invariant
-\end{code}
+{-
Note [Orignal module]
~~~~~~~~~~~~~~~~~~~~~
Consider this:
@@ -1143,14 +1132,14 @@ Trac #5362 for an example. Such Names are always
- They are always System Names, hence the assert, just as a double check.
-%************************************************************************
-%* *
+************************************************************************
+* *
Load the old interface file for this module (unless
we have it already), and check whether it is up to date
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data RecompileRequired
= UpToDate
-- ^ everything is up to date, recompilation is not required
@@ -1500,15 +1489,15 @@ checkList (check:checks) = do recompile <- check
if recompileRequired recompile
then return recompile
else checkList checks
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Converting things to their Iface equivalents
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
tyThingToIfaceDecl :: TyThing -> IfaceDecl
tyThingToIfaceDecl (AnId id) = idToIfaceDecl id
tyThingToIfaceDecl (ATyCon tycon) = snd (tyConToIfaceDecl emptyTidyEnv tycon)
@@ -1948,7 +1937,7 @@ coreRuleToIfaceRule mod rule@(Rule { ru_name = name, ru_fn = fn,
do_arg (Coercion co) = IfaceCo (toIfaceCoercion co)
do_arg arg = toIfaceExpr arg
- -- Compute orphanhood. See Note [Orphans] in IfaceSyn
+ -- Compute orphanhood. See Note [Orphans] in InstEnv
-- A rule is an orphan only if none of the variables
-- mentioned on its left-hand side are locally defined
lhs_names = nameSetElems (ruleLhsOrphNames rule)
@@ -2041,4 +2030,3 @@ toIfaceVar v
| isExternalName name = IfaceExt name
| otherwise = IfaceLcl (getFS name)
where name = idName v
-\end{code}
diff --git a/compiler/iface/TcIface.lhs b/compiler/iface/TcIface.hs
index 10984ece24..692bfad534 100644
--- a/compiler/iface/TcIface.lhs
+++ b/compiler/iface/TcIface.hs
@@ -1,11 +1,11 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
Type checking of type signatures in interface files
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module TcIface (
@@ -75,8 +75,8 @@ import qualified Data.Map as Map
#if __GLASGOW_HASKELL__ < 709
import Data.Traversable ( traverse )
#endif
-\end{code}
+{-
This module takes
IfaceDecl -> TyThing
@@ -96,12 +96,12 @@ Names before typechecking, because there should be no scope errors etc.
-- bound in this module (and hence not yet processed).
-- The discarding happens when forkM finds a type error.
-%************************************************************************
-%* *
-%* tcImportDecl is the key function for "faulting in" *
-%* imported things
-%* *
-%************************************************************************
+************************************************************************
+* *
+* tcImportDecl is the key function for "faulting in" *
+* imported things
+* *
+************************************************************************
The main idea is this. We are chugging along type-checking source code, and
find a reference to GHC.Base.map. We call tcLookupGlobal, which doesn't find
@@ -119,8 +119,8 @@ mutable variable. This is important in situations like
...$(e1)...$(e2)...
where the code that e1 expands to might import some defns that
also turn out to be needed by the code that e2 expands to.
+-}
-\begin{code}
tcLookupImported_maybe :: Name -> TcM (MaybeErr MsgDoc TyThing)
-- Returns (Failed err) if we can't find the interface file for the thing
tcLookupImported_maybe name
@@ -167,13 +167,13 @@ importDecl name
pprNameSpace (occNameSpace (nameOccName name)) <+> ppr name)
2 (vcat [ptext (sLit "Probable cause: bug in .hi-boot file, or inconsistent .hi file"),
ptext (sLit "Use -ddump-if-trace to get an idea of which file caused the error")])
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Checks for wired-in things
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Loading instances for wired-in things]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -200,9 +200,8 @@ for any module with an instance decl or RULE that we might want.
All of this is done by the type checker. The renamer plays no role.
(It used to, but no longer.)
+-}
-
-\begin{code}
checkWiredInTyCon :: TyCon -> TcM ()
-- Ensure that the home module of the TyCon (and hence its instances)
-- are loaded. See Note [Loading instances for wired-in things]
@@ -244,13 +243,13 @@ needWiredInHomeIface :: TyThing -> Bool
-- Only for TyCons; see Note [Loading instances for wired-in things]
needWiredInHomeIface (ATyCon {}) = True
needWiredInHomeIface _ = False
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Type-checking a complete interface
-%* *
-%************************************************************************
+* *
+************************************************************************
Suppose we discover we don't need to recompile. Then we must type
check the old interface file. This is a bit different to the
@@ -259,8 +258,8 @@ we do things similarly as when we are typechecking source decls: we
bring into scope the type envt for the interface all at once, using a
knot. Remember, the decls aren't necessarily in dependency order --
and even if they were, the type decls might be mutually recursive.
+-}
-\begin{code}
typecheckIface :: ModIface -- Get the decls from here
-> TcRnIf gbl lcl ModDetails
typecheckIface iface
@@ -306,16 +305,15 @@ typecheckIface iface
, md_exports = exports
}
}
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Type and class declarations
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
tcHiBootIface :: HscSource -> Module -> TcRn ModDetails
-- Load the hi-boot iface for the module being compiled,
-- if it indeed exists in the transitive closure of imports
@@ -384,14 +382,13 @@ tcHiBootIface hsc_src mod
elaborate err = hang (ptext (sLit "Could not find hi-boot interface for") <+>
quotes (ppr mod) <> colon) 4 err
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Type and class declarations
-%* *
-%************************************************************************
+* *
+************************************************************************
When typechecking a data type decl, we *lazily* (via forkM) typecheck
the constructor argument types. This is in the hope that we may never
@@ -435,9 +432,8 @@ type envt by accident, because they look at it later.
What this means is that the implicitTyThings MUST NOT DEPEND on any of
the forkM stuff.
+-}
-
-\begin{code}
tcIfaceDecl :: Bool -- True <=> discard IdInfo on IfaceId bindings
-> IfaceDecl
-> IfL TyThing
@@ -706,8 +702,8 @@ tcIfaceEqSpec spec
do_item (occ, if_ty) = do { tv <- tcIfaceTyVar occ
; ty <- tcIfaceType if_ty
; return (tv,ty) }
-\end{code}
+{-
Note [Synonym kind loop]
~~~~~~~~~~~~~~~~~~~~~~~~
Notice that we eagerly grab the *kind* from the interface file, but
@@ -726,13 +722,13 @@ be defined, and we must not do that until we've finished with M.T.
Solution: record S's kind in the interface file; now we can safely
look at it.
-%************************************************************************
-%* *
+************************************************************************
+* *
Instances
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
tcIfaceInst :: IfaceClsInst -> IfL ClsInst
tcIfaceInst (IfaceClsInst { ifDFun = dfun_occ, ifOFlag = oflag
, ifInstCls = cls, ifInstTys = mb_tcs
@@ -751,20 +747,19 @@ tcIfaceFamInst (IfaceFamInst { ifFamInstFam = fam, ifFamInstTys = mb_tcs
; let axiom'' = toUnbranchedAxiom axiom'
mb_tcs' = map (fmap ifaceTyConName) mb_tcs
; return (mkImportedFamInst fam mb_tcs' axiom'') }
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Rules
-%* *
-%************************************************************************
+* *
+************************************************************************
We move a IfaceRule from eps_rules to eps_rule_base when all its LHS free vars
are in the type environment. However, remember that typechecking a Rule may
(as a side effect) augment the type envt, and so we may need to iterate the process.
+-}
-\begin{code}
tcIfaceRules :: Bool -- True <=> ignore rules
-> [IfaceRule]
-> IfL [CoreRule]
@@ -805,16 +800,15 @@ tcIfaceRule (IfaceRule {ifRuleName = name, ifActivation = act, ifRuleBndrs = bnd
ifTopFreeName (IfaceApp f _) = ifTopFreeName f
ifTopFreeName (IfaceExt n) = Just n
ifTopFreeName _ = Nothing
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Annotations
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
tcIfaceAnnotations :: [IfaceAnnotation] -> IfL [Annotation]
tcIfaceAnnotations = mapM tcIfaceAnnotation
@@ -833,16 +827,14 @@ tcIfaceAnnTarget (NamedTarget occ) = do
tcIfaceAnnTarget (ModuleTarget mod) = do
return $ ModuleTarget mod
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Vectorisation information
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- We need access to the type environment as we need to look up information about type constructors
-- (i.e., their data constructors and whether they are class type constructors). If a vectorised
-- type constructor or class is defined in the same module as where it is vectorised, we cannot
@@ -962,15 +954,15 @@ tcIfaceVectInfo mod typeEnv (IfaceVectInfo
}
notATyConErr = pprPanic "TcIface.tcIfaceVectInfo: not a tycon" (ppr name)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Types
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
tcIfaceType :: IfaceType -> IfL Type
tcIfaceType (IfaceTyVar n) = do { tv <- tcIfaceTyVar n; return (TyVarTy tv) }
tcIfaceType (IfaceAppTy t1 t2) = do { t1' <- tcIfaceType t1; t2' <- tcIfaceType t2; return (AppTy t1' t2') }
@@ -1015,16 +1007,15 @@ tcIfaceCtxt sts = mapM tcIfaceType sts
tcIfaceTyLit :: IfaceTyLit -> IfL TyLit
tcIfaceTyLit (IfaceNumTyLit n) = return (NumTyLit n)
tcIfaceTyLit (IfaceStrTyLit n) = return (StrTyLit n)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Coercions
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
tcIfaceCo :: IfaceCoercion -> IfL Coercion
tcIfaceCo (IfaceReflCo r t) = mkReflCo r <$> tcIfaceType t
tcIfaceCo (IfaceFunCo r c1 c2) = mkFunCo r <$> tcIfaceCo c1 <*> tcIfaceCo c2
@@ -1061,16 +1052,15 @@ tcIfaceCoAxiomRule n =
case Map.lookup n typeNatCoAxiomRules of
Just ax -> return ax
_ -> pprPanic "tcIfaceCoAxiomRule" (ppr n)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Core
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
tcIfaceExpr :: IfaceExpr -> IfL CoreExpr
tcIfaceExpr (IfaceType ty)
= Type <$> tcIfaceType ty
@@ -1247,16 +1237,15 @@ tcIfaceDataAlt con inst_tys arg_strs rhs
extendIfaceIdEnv arg_ids $
tcIfaceExpr rhs
; return (DataAlt con, ex_tvs ++ arg_ids, rhs') }
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
IdInfo
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
tcIdDetails :: Type -> IfaceIdDetails -> IfL IdDetails
tcIdDetails _ IfVanillaId = return VanillaId
tcIdDetails ty (IfDFunId ns)
@@ -1291,9 +1280,7 @@ tcIdInfo ignore_prags name ty info
; let info1 | lb = info `setOccInfo` strongLoopBreaker
| otherwise = info
; return (info1 `setUnfoldingInfoLazily` unf) }
-\end{code}
-\begin{code}
tcUnfolding :: Name -> Type -> IdInfo -> IfaceUnfolding -> IfL Unfolding
tcUnfolding name _ info (IfCoreUnfold stable if_expr)
= do { dflags <- getDynFlags
@@ -1333,12 +1320,12 @@ tcUnfolding name dfun_ty _ (IfDFunUnfold bs ops)
where
doc = text "Class ops for dfun" <+> ppr name
(_, _, cls, _) = tcSplitDFunTy dfun_ty
-\end{code}
+{-
For unfoldings we try to do the job lazily, so that we never type check
an unfolding that isn't going to be looked at.
+-}
-\begin{code}
tcPragExpr :: Name -> IfaceExpr -> IfL (Maybe CoreExpr)
tcPragExpr name expr
= forkM_maybe doc $ do
@@ -1370,17 +1357,15 @@ tcPragExpr name expr
; return (varEnvElts (if_tv_env lcl_env) ++
varEnvElts (if_id_env lcl_env) ++
rec_ids) }
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Getting from Names to TyThings
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
tcIfaceGlobal :: Name -> IfL TyThing
tcIfaceGlobal name
| Just thing <- wiredInNameTyThing_maybe name
@@ -1461,15 +1446,15 @@ tcIfaceExtId name = do { thing <- tcIfaceGlobal name
; case thing of
AnId id -> return id
_ -> pprPanic "tcIfaceExtId" (ppr name$$ ppr thing) }
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Bindings
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
bindIfaceBndr :: IfaceBndr -> (CoreBndr -> IfL a) -> IfL a
bindIfaceBndr (IfaceIdBndr (fs, ty)) thing_inside
= do { name <- newIfaceName (mkVarOccFS fs)
@@ -1532,4 +1517,3 @@ bindIfaceTyVars_AT (b@(tv_occ,_) : bs) thing_inside
; bind_b $ \b' ->
bindIfaceTyVars_AT bs $ \bs' ->
thing_inside (b':bs') }
-\end{code}
diff --git a/compiler/iface/TcIface.lhs-boot b/compiler/iface/TcIface.hs-boot
index 591419a251..619e3efdbb 100644
--- a/compiler/iface/TcIface.lhs-boot
+++ b/compiler/iface/TcIface.hs-boot
@@ -1,4 +1,3 @@
-\begin{code}
module TcIface where
import IfaceSyn ( IfaceDecl, IfaceClsInst, IfaceFamInst, IfaceRule, IfaceAnnotation )
@@ -17,5 +16,3 @@ tcIfaceVectInfo :: Module -> TypeEnv -> IfaceVectInfo -> IfL VectInfo
tcIfaceInst :: IfaceClsInst -> IfL ClsInst
tcIfaceFamInst :: IfaceFamInst -> IfL FamInst
tcIfaceAnnotations :: [IfaceAnnotation] -> IfL [Annotation]
-\end{code}
-
diff --git a/compiler/main/CodeOutput.lhs b/compiler/main/CodeOutput.hs
index 72803c0d6b..cdb81b7f9e 100644
--- a/compiler/main/CodeOutput.lhs
+++ b/compiler/main/CodeOutput.hs
@@ -1,9 +1,9 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1993-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
+
\section{Code output phase}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module CodeOutput( codeOutput, outputForeignStubs ) where
@@ -36,15 +36,15 @@ import Control.Exception
import System.Directory
import System.FilePath
import System.IO
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Steering}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
codeOutput :: DynFlags
-> Module
-> FilePath
@@ -56,7 +56,7 @@ codeOutput :: DynFlags
(Bool{-stub_h_exists-}, Maybe FilePath{-stub_c_exists-}))
codeOutput dflags this_mod filenm location foreign_stubs pkg_deps cmm_stream
- =
+ =
do {
-- Lint each CmmGroup as it goes past
; let linted_cmm_stream =
@@ -87,16 +87,15 @@ codeOutput dflags this_mod filenm location foreign_stubs pkg_deps cmm_stream
doOutput :: String -> (Handle -> IO a) -> IO a
doOutput filenm io_action = bracket (openFile filenm WriteMode) hClose io_action
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{C}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
outputC :: DynFlags
-> FilePath
-> Stream IO RawCmmGroup ()
@@ -104,7 +103,7 @@ outputC :: DynFlags
-> IO ()
outputC dflags filenm cmm_stream packages
- = do
+ = do
-- ToDo: make the C backend consume the C-- incrementally, by
-- pushing the cmm_stream inside (c.f. nativeCodeGen)
rawcmms <- Stream.collect cmm_stream
@@ -116,10 +115,10 @@ outputC dflags filenm cmm_stream packages
-- * the _stub.h file, if there is one.
--
let rts = getPackageDetails dflags rtsPackageKey
-
+
let cc_injects = unlines (map mk_include (includes rts))
- mk_include h_file =
- case h_file of
+ mk_include h_file =
+ case h_file of
'"':_{-"-} -> "#include "++h_file
'<':_ -> "#include "++h_file
_ -> "#include \""++h_file++"\""
@@ -130,16 +129,15 @@ outputC dflags filenm cmm_stream packages
hPutStr h ("/* GHC_PACKAGES " ++ unwords pkg_names ++ "\n*/\n")
hPutStr h cc_injects
writeCs dflags h rawcmms
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Assembler}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
outputAsm :: DynFlags -> Module -> FilePath -> Stream IO RawCmmGroup () -> IO ()
outputAsm dflags this_mod filenm cmm_stream
| cGhcWithNativeCodeGen == "YES"
@@ -154,16 +152,15 @@ outputAsm dflags this_mod filenm cmm_stream
| otherwise
= panic "This compiler was built without a native code generator"
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{LLVM}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
outputLlvm :: DynFlags -> FilePath -> Stream IO RawCmmGroup () -> IO ()
outputLlvm dflags filenm cmm_stream
= do ncg_uniqs <- mkSplitUniqSupply 'n'
@@ -171,16 +168,15 @@ outputLlvm dflags filenm cmm_stream
{-# SCC "llvm_output" #-} doOutput filenm $
\f -> {-# SCC "llvm_CodeGen" #-}
llvmCodeGen dflags f ncg_uniqs cmm_stream
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Foreign import/export}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
outputForeignStubs :: DynFlags -> Module -> ModLocation -> ForeignStubs
-> IO (Bool, -- Header file created
Maybe FilePath) -- C file created
@@ -197,7 +193,7 @@ outputForeignStubs dflags mod location stubs
let
stub_c_output_d = pprCode CStyle c_code
stub_c_output_w = showSDoc dflags stub_c_output_d
-
+
-- Header file protos for "foreign export"ed functions.
stub_h_output_d = pprCode CStyle h_code
stub_h_output_w = showSDoc dflags stub_h_output_d
@@ -208,7 +204,7 @@ outputForeignStubs dflags mod location stubs
"Foreign export header file" stub_h_output_d
-- we need the #includes from the rts package for the stub files
- let rts_includes =
+ let rts_includes =
let rts_pkg = getPackageDetails dflags rtsPackageKey in
concatMap mk_include (includes rts_pkg)
mk_include i = "#include \"" ++ i ++ "\"\n"
@@ -226,7 +222,7 @@ outputForeignStubs dflags mod location stubs
stub_c_file_exists
<- outputForeignStubs_help stub_c stub_c_output_w
- ("#define IN_STG_CODE 0\n" ++
+ ("#define IN_STG_CODE 0\n" ++
"#include \"Rts.h\"\n" ++
rts_includes ++
ffi_includes ++
@@ -252,4 +248,3 @@ outputForeignStubs_help _fname "" _header _footer = return False
outputForeignStubs_help fname doc_str header footer
= do writeFile fname (header ++ doc_str ++ '\n':footer ++ "\n")
return True
-\end{code}
diff --git a/compiler/main/Constants.lhs b/compiler/main/Constants.hs
index ee126f5b20..0054888df3 100644
--- a/compiler/main/Constants.lhs
+++ b/compiler/main/Constants.hs
@@ -1,9 +1,9 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[Constants]{Info about this compilation}
+-}
-\begin{code}
module Constants (module Constants) where
import Config
@@ -30,4 +30,3 @@ wORD64_SIZE = 8
tARGET_MAX_CHAR :: Int
tARGET_MAX_CHAR = 0x10ffff
-\end{code}
diff --git a/compiler/main/DriverPipeline.hs b/compiler/main/DriverPipeline.hs
index eefa0a6ba3..fdec73e1ce 100644
--- a/compiler/main/DriverPipeline.hs
+++ b/compiler/main/DriverPipeline.hs
@@ -240,7 +240,7 @@ compileOne' m_tc_result mHscMessage
_ ->
case ms_hsc_src summary of
- t | isHsBootOrSig t ->
+ HsBootFile ->
do (iface, changed, details) <- hscSimpleIface hsc_env tc_result mb_old_hash
hscWriteIface dflags iface changed summary
touchObjectFile dflags object_filename
@@ -248,7 +248,23 @@ compileOne' m_tc_result mHscMessage
hm_iface = iface,
hm_linkable = maybe_old_linkable })
- _ -> do guts0 <- hscDesugar hsc_env summary tc_result
+ HsigFile ->
+ do (iface, changed, details) <-
+ hscSimpleIface hsc_env tc_result mb_old_hash
+ hscWriteIface dflags iface changed summary
+ compileEmptyStub dflags hsc_env basename location
+
+ -- Same as Hs
+ o_time <- getModificationUTCTime object_filename
+ let linkable =
+ LM o_time this_mod [DotO object_filename]
+
+ return (HomeModInfo{ hm_details = details,
+ hm_iface = iface,
+ hm_linkable = Just linkable })
+
+ HsSrcFile ->
+ do guts0 <- hscDesugar hsc_env summary tc_result
guts <- hscSimplify hsc_env guts0
(iface, changed, details, cgguts) <- hscNormalIface hsc_env guts mb_old_hash
hscWriteIface dflags iface changed summary
@@ -287,6 +303,21 @@ compileStub hsc_env stub_c = do
return stub_o
+compileEmptyStub :: DynFlags -> HscEnv -> FilePath -> ModLocation -> IO ()
+compileEmptyStub dflags hsc_env basename location = do
+ -- To maintain the invariant that every Haskell file
+ -- compiles to object code, we make an empty (but
+ -- valid) stub object file for signatures
+ empty_stub <- newTempName dflags "c"
+ writeFile empty_stub ""
+ _ <- runPipeline StopLn hsc_env
+ (empty_stub, Nothing)
+ (Just basename)
+ Persistent
+ (Just location)
+ Nothing
+ return ()
+
-- ---------------------------------------------------------------------------
-- Link
@@ -341,11 +372,7 @@ link' dflags batch_attempt_linking hpt
LinkStaticLib -> True
_ -> platformBinariesAreStaticLibs (targetPlatform dflags)
- -- Don't attempt to link hsigs; they don't actually produce objects.
- -- This is in contrast to hs-boot files, which will /eventually/
- -- get objects.
- home_mod_infos =
- filter ((==Nothing).mi_sig_of.hm_iface) (eltsUFM hpt)
+ home_mod_infos = eltsUFM hpt
-- the packages we depend on
pkg_deps = concatMap (map fst . dep_pkgs . mi_deps . hm_iface) home_mod_infos
@@ -981,6 +1008,14 @@ runPhase (HscOut src_flavour mod_name result) _ dflags = do
-- stamp file for the benefit of Make
liftIO $ touchObjectFile dflags o_file
return (RealPhase next_phase, o_file)
+ HscUpdateSig ->
+ do -- We need to create a REAL but empty .o file
+ -- because we are going to attempt to put it in a library
+ PipeState{hsc_env=hsc_env'} <- getPipeState
+ let input_fn = expectJust "runPhase" (ml_hs_file location)
+ basename = dropExtension input_fn
+ liftIO $ compileEmptyStub dflags hsc_env' basename location
+ return (RealPhase next_phase, o_file)
HscRecomp cgguts mod_summary
-> do output_fn <- phaseOutputFilename next_phase
diff --git a/compiler/main/ErrUtils.lhs b/compiler/main/ErrUtils.hs
index 61f433573b..59bc01b324 100644
--- a/compiler/main/ErrUtils.lhs
+++ b/compiler/main/ErrUtils.hs
@@ -1,13 +1,13 @@
-%
-% (c) The AQUA Project, Glasgow University, 1994-1998
-%
+{-
+(c) The AQUA Project, Glasgow University, 1994-1998
+
\section[ErrsUtils]{Utilities for error reporting}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module ErrUtils (
- MsgDoc,
+ MsgDoc,
Validity(..), andValid, allValid, isValid, getInvalids,
ErrMsg, WarnMsg, Severity(..),
@@ -130,7 +130,7 @@ mkLocMessage severity locn msg
where
sev_info = case severity of
SevWarning -> ptext (sLit "Warning:")
- _other -> empty
+ _other -> empty
-- For warnings, print Foo.hs:34: Warning:
-- <the warning message>
@@ -417,5 +417,3 @@ prettyPrintGhcErrors dflags
pprDebugAndThen dflags pgmError (text str) doc
_ ->
liftIO $ throwIO e
-\end{code}
-
diff --git a/compiler/main/ErrUtils.lhs-boot b/compiler/main/ErrUtils.hs-boot
index fc99c5afde..ac1673b367 100644
--- a/compiler/main/ErrUtils.lhs-boot
+++ b/compiler/main/ErrUtils.hs-boot
@@ -1,4 +1,3 @@
-\begin{code}
module ErrUtils where
import Outputable (SDoc)
@@ -16,5 +15,3 @@ data Severity
type MsgDoc = SDoc
mkLocMessage :: Severity -> SrcSpan -> MsgDoc -> MsgDoc
-\end{code}
-
diff --git a/compiler/main/Finder.lhs b/compiler/main/Finder.hs
index 189ef50fb6..71b4e97b39 100644
--- a/compiler/main/Finder.lhs
+++ b/compiler/main/Finder.hs
@@ -1,9 +1,9 @@
-%
-% (c) The University of Glasgow, 2000-2006
-%
+{-
+(c) The University of Glasgow, 2000-2006
+
\section[Finder]{Module Finder}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module Finder (
@@ -258,7 +258,7 @@ uncacheModule hsc_env mod = do
findHomeModule :: HscEnv -> ModuleName -> IO FindResult
findHomeModule hsc_env mod_name =
homeSearchCache hsc_env mod_name $
- let
+ let
dflags = hsc_dflags hsc_env
home_path = importPaths dflags
hisuf = hiSuf dflags
@@ -691,4 +691,3 @@ cantFindErr cannot_find _ dflags mod_name find_result
= parens (ptext (sLit "needs flag -package-key")
<+> ppr (packageConfigId pkg))
| otherwise = Outputable.empty
-\end{code}
diff --git a/compiler/main/Hooks.lhs b/compiler/main/Hooks.hs
index 63aaafa2a7..44f340aed9 100644
--- a/compiler/main/Hooks.lhs
+++ b/compiler/main/Hooks.hs
@@ -1,6 +1,5 @@
-\section[Hooks]{Low level API hooks}
+-- \section[Hooks]{Low level API hooks}
-\begin{code}
module Hooks ( Hooks
, emptyHooks
, lookupHook
@@ -40,15 +39,14 @@ import Type
import SrcLoc
import Data.Maybe
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Hooks}
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
-- | Hooks can be used by GHC API clients to replace parts of
-- the compiler pipeline. If a hook is not installed, GHC
@@ -78,6 +76,3 @@ getHooked hook def = fmap (lookupHook hook def) getDynFlags
lookupHook :: (Hooks -> Maybe a) -> a -> DynFlags -> a
lookupHook hook def = fromMaybe def . hook . hooks
-
-\end{code}
-
diff --git a/compiler/main/Hooks.lhs-boot b/compiler/main/Hooks.hs-boot
index 71b7bf2a7d..280de32063 100644
--- a/compiler/main/Hooks.lhs-boot
+++ b/compiler/main/Hooks.hs-boot
@@ -1,9 +1,5 @@
-\begin{code}
module Hooks where
data Hooks
emptyHooks :: Hooks
-
-\end{code}
-
diff --git a/compiler/main/HscMain.hs b/compiler/main/HscMain.hs
index fcf0c48de0..8f8da0266b 100644
--- a/compiler/main/HscMain.hs
+++ b/compiler/main/HscMain.hs
@@ -647,7 +647,10 @@ hscCompileOneShot' hsc_env mod_summary src_changed
t | isHsBootOrSig t ->
do (iface, changed, _) <- hscSimpleIface' tc_result mb_old_hash
liftIO $ hscWriteIface dflags iface changed mod_summary
- return HscUpdateBoot
+ return (case t of
+ HsBootFile -> HscUpdateBoot
+ HsigFile -> HscUpdateSig
+ HsSrcFile -> panic "hscCompileOneShot Src")
_ ->
do guts <- hscSimplify' guts0
(iface, changed, _details, cgguts) <- hscNormalIface' guts mb_old_hash
diff --git a/compiler/main/HscTypes.lhs b/compiler/main/HscTypes.hs
index cf3db52c94..d3666f52e8 100644
--- a/compiler/main/HscTypes.lhs
+++ b/compiler/main/HscTypes.hs
@@ -1,9 +1,9 @@
-%
-% (c) The University of Glasgow, 2006
-%
+{-
+(c) The University of Glasgow, 2006
+
\section[HscTypes]{Types for the per-module compiler}
+-}
-\begin{code}
{-# LANGUAGE CPP, DeriveDataTypeable, ScopedTypeVariables #-}
-- | Types for the per-module compiler
@@ -195,6 +195,7 @@ data HscStatus
= HscNotGeneratingCode
| HscUpToDate
| HscUpdateBoot
+ | HscUpdateSig
| HscRecomp CgGuts ModSummary
-- -----------------------------------------------------------------------------
@@ -314,15 +315,14 @@ handleFlagWarnings dflags warns
| L loc warn <- warns ]
printOrThrowWarnings dflags bag
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{HscEnv}
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
-- | Hscenv is like 'Session', except that some of the fields are immutable.
-- An HscEnv is used to compile a single module from plain Haskell source
@@ -435,15 +435,15 @@ pprTargetId (TargetFile f _) = text f
instance Outputable TargetId where
ppr = pprTargetId
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Package and Module Tables}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Helps us find information about modules in the home package
type HomePackageTable = ModuleNameEnv HomeModInfo
-- Domain = modules in the home package that have been fully compiled
@@ -590,15 +590,15 @@ hptSomeThingsBelowUs extract include_hi_boot hsc_env deps
hptObjs :: HomePackageTable -> [FilePath]
hptObjs hpt = concat (map (maybe [] linkableObjs . hm_linkable) (eltsUFM hpt))
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Dealing with Annotations}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Deal with gathering annotations in from all possible places
-- and combining them into a single 'AnnEnv'
prepareAnnotations :: HscEnv -> Maybe ModGuts -> IO AnnEnv
@@ -615,15 +615,15 @@ prepareAnnotations hsc_env mb_guts = do
Just home_pkg_anns,
Just other_pkg_anns]
return ann_env
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{The Finder cache}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | The 'FinderCache' maps home module names to the result of
-- searching for that module. It records the results of searching for
-- modules along the search path. On @:load@, we flush the entire
@@ -664,15 +664,15 @@ data FindResult
-- home modules and package modules. On @:load@, only home modules are
-- purged from this cache.
type ModLocationCache = ModuleEnv ModLocation
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Symbol tables and Module details}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A 'ModIface' plus a 'ModDetails' summarises everything we know
-- about a compiled module. The 'ModIface' is the stuff *before* linking,
-- and can be written out to an interface file. The 'ModDetails is after
@@ -1100,13 +1100,13 @@ data ForeignStubs
appendStubC :: ForeignStubs -> SDoc -> ForeignStubs
appendStubC NoStubs c_code = ForeignStubs empty c_code
appendStubC (ForeignStubs h c) c_code = ForeignStubs h (c $$ c_code)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{The interactive context}
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [The interactive package]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1214,9 +1214,8 @@ It does *not* contain
* CoAxioms (ditto)
See also Note [Interactively-bound Ids in GHCi]
+-}
-
-\begin{code}
-- | Interactive context, recording information about the state of the
-- context in which statements are executed in a GHC session.
data InteractiveContext
@@ -1381,13 +1380,13 @@ substInteractiveContext ictxt@InteractiveContext{ ic_tythings = tts } subst
instance Outputable InteractiveImport where
ppr (IIModule m) = char '*' <> ppr m
ppr (IIDecl d) = ppr d
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Building a PrintUnqualified
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Printing original names]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1433,8 +1432,8 @@ another scheme is to (recursively) say which dependencies are different.
NB: When we extend package keys to also have holes, we will have to disambiguate
those as well.
+-}
-\begin{code}
-- | Creates some functions that work out the best ways to format
-- names for the user according to a set of heuristics.
mkPrintUnqualified :: DynFlags -> GlobalRdrEnv -> PrintUnqualified
@@ -1515,14 +1514,12 @@ pkgQual dflags = alwaysQualify {
queryQualifyPackage = mkQualPackage dflags
}
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Implicit TyThings
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Implicit TyThings]
~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1547,8 +1544,8 @@ Examples:
* Axioms for newtypes are implicit (same as above), but axioms
for data/type family instances are *not* implicit (like DFunIds).
+-}
-\begin{code}
-- | Determine the 'TyThing's brought into scope by another 'TyThing'
-- /other/ than itself. For example, Id's don't have any implicit TyThings
-- as they just bring themselves into scope, but classes bring their
@@ -1676,15 +1673,15 @@ tyThingAvailInfo (ATyCon t)
dcs = tyConDataCons t
tyThingAvailInfo t
= Avail (getName t)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
TypeEnv
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A map from 'Name's to 'TyThing's, constructed by typechecking
-- local declarations or interface files
type TypeEnv = NameEnv TyThing
@@ -1740,9 +1737,7 @@ extendTypeEnvList env things = foldl extendTypeEnv env things
extendTypeEnvWithIds :: TypeEnv -> [Id] -> TypeEnv
extendTypeEnvWithIds env ids
= extendNameEnvList env [(getName id, AnId id) | id <- ids]
-\end{code}
-\begin{code}
-- | Find the 'TyThing' for the given 'Name' by using all the resources
-- at our disposal: the compiled modules in the 'HomePackageTable' and the
-- compiled modules in other packages that live in 'PackageTypeEnv'. Note
@@ -1773,9 +1768,7 @@ lookupTypeHscEnv hsc_env name = do
where
dflags = hsc_dflags hsc_env
hpt = hsc_HPT hsc_env
-\end{code}
-\begin{code}
-- | Get the 'TyCon' from a 'TyThing' if it is a type constructor thing. Panics otherwise
tyThingTyCon :: TyThing -> TyCon
tyThingTyCon (ATyCon tc) = tc
@@ -1796,15 +1789,15 @@ tyThingId :: TyThing -> Id
tyThingId (AnId id) = id
tyThingId (AConLike (RealDataCon dc)) = dataConWrapId dc
tyThingId other = pprPanic "tyThingId" (pprTyThing other)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{MonadThings and friends}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Class that abstracts out the common ability of the monads in GHC
-- to lookup a 'TyThing' in the monadic environment by 'Name'. Provides
-- a number of related convenience functions for accessing particular
@@ -1820,18 +1813,18 @@ class Monad m => MonadThings m where
lookupTyCon :: Name -> m TyCon
lookupTyCon = liftM tyThingTyCon . lookupThing
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Auxiliary types}
-%* *
-%************************************************************************
+* *
+************************************************************************
These types are defined here because they are mentioned in ModDetails,
but they are mostly elaborated elsewhere
+-}
-\begin{code}
------------------ Warnings -------------------------
-- | Warning information for a module
data Warnings
@@ -1894,9 +1887,7 @@ plusWarns NoWarnings d = d
plusWarns _ (WarnAll t) = WarnAll t
plusWarns (WarnAll t) _ = WarnAll t
plusWarns (WarnSome v1) (WarnSome v2) = WarnSome (v1 ++ v2)
-\end{code}
-\begin{code}
-- | Creates cached lookup for the 'mi_fix_fn' field of 'ModIface'
mkIfaceFixCache :: [(OccName, Fixity)] -> OccName -> Fixity
mkIfaceFixCache pairs
@@ -1924,15 +1915,15 @@ lookupFixity :: FixityEnv -> Name -> Fixity
lookupFixity env n = case lookupNameEnv env n of
Just (FixItem _ fix) -> fix
Nothing -> defaultFixity
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{WhatsImported}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Records whether a module has orphans. An \"orphan\" is one of:
--
-- * An instance declaration in a module other than the definition
@@ -2105,16 +2096,14 @@ instance Binary Usage where
return UsageFile { usg_file_path = fp, usg_file_hash = hash }
i -> error ("Binary.get(Usage): " ++ show i)
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
The External Package State
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type PackageTypeEnv = TypeEnv
type PackageRuleBase = RuleBase
type PackageInstEnv = InstEnv
@@ -2196,8 +2185,8 @@ addEpsInStats stats n_decls n_insts n_rules
, n_decls_in = n_decls_in stats + n_decls
, n_insts_in = n_insts_in stats + n_insts
, n_rules_in = n_rules_in stats + n_rules }
-\end{code}
+{-
Names in a NameCache are always stored as a Global, and have the SrcLoc
of their binding locations.
@@ -2205,8 +2194,8 @@ Actually that's not quite right. When we first encounter the original
name, we might not be at its binding site (e.g. we are reading an
interface file); so we give it 'noSrcLoc' then. Later, when we find
its binding site, we fix it up.
+-}
-\begin{code}
-- | The NameCache makes sure that there is just one Unique assigned for
-- each original name; i.e. (module-name, occ-name) pair and provides
-- something of a lookup mechanism for those names.
@@ -2219,10 +2208,7 @@ data NameCache
-- | Per-module cache of original 'OccName's given 'Name's
type OrigNameCache = ModuleEnv (OccEnv Name)
-\end{code}
-
-\begin{code}
mkSOName :: Platform -> FilePath -> FilePath
mkSOName platform root
= case platformOS platform of
@@ -2239,18 +2225,17 @@ soExt platform
OSDarwin -> "dylib"
OSMinGW32 -> "dll"
_ -> "so"
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
The module graph and ModSummary type
A ModSummary is a node in the compilation manager's
dependency graph, and it's also passed to hscMain
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A ModuleGraph contains all the nodes from the home package (only).
-- There will be a node for each source module, plus a node for each hi-boot
-- module.
@@ -2374,15 +2359,15 @@ hscSourceString' dflags mod HsigFile =
(("sig of "++).showPpr dflags)
(getSigOf dflags mod)) ++ "]"
-- NB: -sig-of could be missing if we're just typechecking
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Recmpilation}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Indicates whether a given module's source has been modified since it
-- was last compiled.
data SourceModified
@@ -2399,15 +2384,15 @@ data SourceModified
-- reasons: (a) we can omit the version check in checkOldIface,
-- and (b) if the module used TH splices we don't need to force
-- recompilation.
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Hpc Support}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Information about a modules use of Haskell Program Coverage
data HpcInfo
= HpcInfo
@@ -2430,13 +2415,13 @@ emptyHpcInfo = NoHpcInfo
isHpcUsed :: HpcInfo -> AnyHpcUsage
isHpcUsed (HpcInfo {}) = True
isHpcUsed (NoHpcInfo { hpcUsed = used }) = used
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Vectorisation Support}
-%* *
-%************************************************************************
+* *
+************************************************************************
The following information is generated and consumed by the vectorisation
subsystem. It communicates the vectorisation status of declarations from one
@@ -2446,8 +2431,8 @@ Why do we need both f and f_v in the ModGuts/ModDetails/EPS version VectInfo
below? We need to know `f' when converting to IfaceVectInfo. However, during
vectorisation, we need to know `f_v', whose `Var' we cannot lookup based
on just the OccName easily in a Core pass.
+-}
-\begin{code}
-- |Vectorisation information for 'ModGuts', 'ModDetails' and 'ExternalPackageState'; see also
-- documentation at 'Vectorise.Env.GlobalEnv'.
--
@@ -2543,18 +2528,18 @@ instance Binary IfaceVectInfo where
a4 <- get bh
a5 <- get bh
return (IfaceVectInfo a1 a2 a3 a4 a5)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Safe Haskell Support}
-%* *
-%************************************************************************
+* *
+************************************************************************
This stuff here is related to supporting the Safe Haskell extension,
primarily about storing under what trust type a module has been compiled.
+-}
-\begin{code}
-- | Is an import a safe import?
type IsSafeImport = Bool
@@ -2598,15 +2583,15 @@ instance Outputable IfaceTrustInfo where
instance Binary IfaceTrustInfo where
put_ bh iftrust = putByte bh $ trustInfoToNum iftrust
get bh = getByte bh >>= (return . numToTrustInfo)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Parser result}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data HsParsedModule = HsParsedModule {
hpm_module :: Located (HsModule RdrName),
hpm_src_files :: [FilePath],
@@ -2618,18 +2603,18 @@ data HsParsedModule = HsParsedModule {
hpm_annotations :: ApiAnns
-- See note [Api annotations] in ApiAnnotation.hs
}
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Linkable stuff}
-%* *
-%************************************************************************
+* *
+************************************************************************
This stuff is in here, rather than (say) in Linker.lhs, because the Linker.lhs
stuff is the *dynamic* linker, and isn't present in a stage-1 compiler
+-}
-\begin{code}
-- | Information we can use to dynamically link modules into the compiler
data Linkable = LM {
linkableTime :: UTCTime, -- ^ Time at which this linkable was built
@@ -2709,15 +2694,15 @@ nameOfObject other = pprPanic "nameOfObject" (ppr other)
byteCodeOfObject :: Unlinked -> CompiledByteCode
byteCodeOfObject (BCOs bc _) = bc
byteCodeOfObject other = pprPanic "byteCodeOfObject" (ppr other)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Breakpoint Support}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Breakpoint index
type BreakIndex = Int
@@ -2744,4 +2729,3 @@ emptyModBreaks = ModBreaks
, modBreaks_vars = array (0,-1) []
, modBreaks_decls = array (0,-1) []
}
-\end{code}
diff --git a/compiler/main/Packages.lhs b/compiler/main/Packages.hs
index 8fe169363f..0a875b2f13 100644
--- a/compiler/main/Packages.lhs
+++ b/compiler/main/Packages.hs
@@ -1,7 +1,5 @@
-%
-% (c) The University of Glasgow, 2006
-%
-\begin{code}
+-- (c) The University of Glasgow, 2006
+
{-# LANGUAGE CPP, ScopedTypeVariables #-}
-- | Package manipulation
@@ -1390,5 +1388,3 @@ pprModuleMap dflags =
fsPackageName :: PackageConfig -> FastString
fsPackageName = mkFastString . packageNameString
-
-\end{code}
diff --git a/compiler/main/Packages.lhs-boot b/compiler/main/Packages.hs-boot
index 3fd0fd5422..2f898f19d3 100644
--- a/compiler/main/Packages.lhs-boot
+++ b/compiler/main/Packages.hs-boot
@@ -1,8 +1,6 @@
-\begin{code}
module Packages where
-- Well, this is kind of stupid...
import {-# SOURCE #-} Module (PackageKey)
import {-# SOURCE #-} DynFlags (DynFlags)
data PackageState
packageKeyPackageIdString :: DynFlags -> PackageKey -> String
-\end{code}
diff --git a/compiler/main/SysTools.lhs b/compiler/main/SysTools.hs
index 4c7ab03664..375cf2e58c 100644
--- a/compiler/main/SysTools.lhs
+++ b/compiler/main/SysTools.hs
@@ -1,3 +1,4 @@
+{-
-----------------------------------------------------------------------------
--
-- (c) The University of Glasgow 2001-2003
@@ -5,8 +6,8 @@
-- Access to system tools: gcc, cp, rm etc
--
-----------------------------------------------------------------------------
+-}
-\begin{code}
{-# LANGUAGE CPP, ScopedTypeVariables #-}
module SysTools (
@@ -96,8 +97,8 @@ import SrcLoc ( SrcLoc, mkSrcLoc, noSrcSpan, mkSrcSpan )
# error Unknown mingw32 arch
# endif
#endif
-\end{code}
+{-
How GHC finds its files
~~~~~~~~~~~~~~~~~~~~~~~
@@ -162,13 +163,13 @@ stuff.
End of NOTES
---------------------------------------------
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Initialisation}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
initSysTools :: Maybe String -- Maybe TopDir path (without the '-B' prefix)
-> IO Settings -- Set all the mutable variables above, holding
-- (a) the system programs
@@ -351,9 +352,7 @@ initSysTools mbMinusB
sOpt_lc = [],
sPlatformConstants = platformConstants
}
-\end{code}
-\begin{code}
-- returns a Unix-format path (relying on getBaseDir to do so too)
findTopDir :: Maybe String -- Maybe TopDir path (without the '-B' prefix).
-> IO String -- TopDir (in Unix format '/' separated)
@@ -365,17 +364,15 @@ findTopDir Nothing
-- "Just" on Windows, "Nothing" on unix
Nothing -> throwGhcExceptionIO (InstallationError "missing -B<dir> option")
Just dir -> return dir
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Running an external program}
-%* *
-%************************************************************************
-
+* *
+************************************************************************
+-}
-\begin{code}
runUnlit :: DynFlags -> [Option] -> IO ()
runUnlit dflags args = do
let prog = pgm_L dflags
@@ -932,7 +929,7 @@ runLibtool dflags args = do
linkargs <- neededLinkArgs `fmap` getLinkerInfo dflags
let args1 = map Option (getOpts dflags opt_l)
args2 = [Option "-static"] ++ args1 ++ args ++ linkargs
- libtool = pgm_libtool dflags
+ libtool = pgm_libtool dflags
mb_env <- getGccEnv args2
runSomethingFiltered dflags id "Linker" libtool args2 mb_env
@@ -1019,15 +1016,15 @@ readElfSection _dflags section exe = do
_ <- string "0]"
skipSpaces
munch (const True)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Managing temporary files
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
cleanTempDirs :: DynFlags -> IO ()
cleanTempDirs dflags
= unless (gopt Opt_KeepTmpFiles dflags)
@@ -1347,15 +1344,15 @@ traceCmd dflags phase_name cmd_line action
handle_exn _verb exn = do { debugTraceMsg dflags 2 (char '\n')
; debugTraceMsg dflags 2 (ptext (sLit "Failed:") <+> text cmd_line <+> text (show exn))
; throwGhcExceptionIO (PhaseFailed phase_name (ExitFailure 1)) }
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Support code}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-----------------------------------------------------------------------------
-- Define getBaseDir :: IO (Maybe String)
@@ -1371,7 +1368,7 @@ getBaseDir = try_size 2048 -- plenty, PATH_MAX is 512 under Win32.
0 -> return Nothing
_ | ret < size -> fmap (Just . rootDir) $ peekCWString buf
| otherwise -> try_size (size * 2)
-
+
rootDir s = case splitFileName $ normalise s of
(d, ghc_exe)
| lower ghc_exe `elem` ["ghc.exe",
@@ -1591,4 +1588,3 @@ linkDynLib dflags0 o_files dep_packages
++ map Option pkg_lib_path_opts
++ map Option pkg_link_opts
)
-\end{code}
diff --git a/compiler/main/TidyPgm.lhs b/compiler/main/TidyPgm.hs
index b7a867d718..ed37225219 100644
--- a/compiler/main/TidyPgm.lhs
+++ b/compiler/main/TidyPgm.hs
@@ -1,9 +1,9 @@
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
\section{Tidying up Core}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module TidyPgm (
@@ -63,9 +63,8 @@ import Control.Monad
import Data.Function
import Data.List ( sortBy )
import Data.IORef ( atomicModifyIORef )
-\end{code}
-
+{-
Constructing the TypeEnv, Instances, Rules, VectInfo from which the
ModIface is constructed, and which goes on to subsequent modules in
--make mode.
@@ -84,11 +83,11 @@ plus one for each DataCon; the interface file will contain just one
data type declaration, but it is de-serialised back into a collection
of TyThings.
-%************************************************************************
-%* *
+************************************************************************
+* *
Plan A: simpleTidyPgm
-%* *
-%************************************************************************
+* *
+************************************************************************
Plan A: mkBootModDetails: omit pragmas, make interfaces small
@@ -123,8 +122,8 @@ Plan A: mkBootModDetails: omit pragmas, make interfaces small
* If this an hsig file, drop the instances altogether too (they'll
get pulled in by the implicit module import.
+-}
-\begin{code}
-- This is Plan A: make a small type env when typechecking only,
-- or when compiling a hs-boot file, or simply when not using -O
--
@@ -200,14 +199,13 @@ globaliseAndTidyId id
= Id.setIdType (globaliseId id) tidy_type
where
tidy_type = tidyTopType (idType id)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Plan B: tidy bindings, make TypeEnv full of IdInfo
-%* *
-%************************************************************************
+* *
+************************************************************************
Plan B: include pragmas, make interfaces
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -297,8 +295,8 @@ binder
Finally, substitute these new top-level binders consistently
throughout, including in unfoldings. We also tidy binders in
RHSs, so that they print nicely in interfaces.
+-}
-\begin{code}
tidyProgram :: HscEnv -> ModGuts -> IO (CgGuts, ModDetails)
tidyProgram hsc_env (ModGuts { mg_module = mod
, mg_exports = exports
@@ -334,7 +332,7 @@ tidyProgram hsc_env (ModGuts { mg_module = mod
; (unfold_env, tidy_occ_env)
<- chooseExternalIds hsc_env mod omit_prags expose_all
binds implicit_binds imp_rules (vectInfoVar vect_info)
- ; let { (trimmed_binds, trimmed_rules)
+ ; let { (trimmed_binds, trimmed_rules)
= findExternalRules omit_prags binds imp_rules unfold_env }
; (tidy_env, tidy_binds)
@@ -422,10 +420,7 @@ lookup_aux_id type_env id
= case lookupTypeEnv type_env (idName id) of
Just (AnId id') -> id'
_other -> pprPanic "lookup_aux_id" (ppr id)
-\end{code}
-
-\begin{code}
tidyTypeEnv :: Bool -- Compiling without -O, so omit prags
-> TypeEnv -> TypeEnv
@@ -464,9 +459,7 @@ trimThing other_thing
extendTypeEnvWithPatSyns :: [PatSyn] -> TypeEnv -> TypeEnv
extendTypeEnvWithPatSyns tidy_patsyns type_env
= extendTypeEnvList type_env [AConLike (PatSynCon ps) | ps <- tidy_patsyns ]
-\end{code}
-\begin{code}
tidyVectInfo :: TidyEnv -> VectInfo -> VectInfo
tidyVectInfo (_, var_env) info@(VectInfo { vectInfoVar = vars
, vectInfoParallelVars = parallelVars
@@ -493,17 +486,17 @@ tidyVectInfo (_, var_env) info@(VectInfo { vectInfoVar = vars
]
lookup_var var = lookupWithDefaultVarEnv var_env var var
-
+
-- We need to make sure that all names getting into the iface version of 'VectInfo' are
-- external; otherwise, 'MkIface' will bomb out.
isExternalId = isExternalName . idName
-\end{code}
+{-
Note [Don't attempt to trim data types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For some time GHC tried to avoid exporting the data constructors
of a data type if it wasn't strictly necessary to do so; see Trac #835.
-But "strictly necessary" accumulated a longer and longer list
+But "strictly necessary" accumulated a longer and longer list
of exceptions, and finally I gave up the battle:
commit 9a20e540754fc2af74c2e7392f2786a81d8d5f11
@@ -511,27 +504,27 @@ of exceptions, and finally I gave up the battle:
Date: Thu Dec 6 16:03:16 2012 +0000
Stop attempting to "trim" data types in interface files
-
+
Without -O, we previously tried to make interface files smaller
by not including the data constructors of data types. But
there are a lot of exceptions, notably when Template Haskell is
involved or, more recently, DataKinds.
-
+
However Trac #7445 shows that even without TemplateHaskell, using
the Data class and invoking Language.Haskell.TH.Quote.dataToExpQ
is enough to require us to expose the data constructors.
-
+
So I've given up on this "optimisation" -- it's probably not
important anyway. Now I'm simply not attempting to trim off
the data constructors. The gain in simplicity is worth the
modest cost in interface file growth, which is limited to the
bits reqd to describe those data constructors.
-%************************************************************************
-%* *
+************************************************************************
+* *
Implicit bindings
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Injecting implicit bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -578,8 +571,8 @@ There is one sort of implicit binding that is injected still later,
namely those for data constructor workers. Reason (I think): it's
really just a code generation trick.... binding itself makes no sense.
See Note [Data constructor workers] in CorePrep.
+-}
-\begin{code}
getTyConImplicitBinds :: TyCon -> [CoreBind]
getTyConImplicitBinds tc = map get_defn (mapMaybe dataConWrapId_maybe (tyConDataCons tc))
@@ -590,18 +583,17 @@ getClassImplicitBinds cls
get_defn :: Id -> CoreBind
get_defn id = NonRec id (unfoldingTemplate (realIdUnfolding id))
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Step 1: finding externals}
-%* *
-%************************************************************************
+* *
+************************************************************************
See Note [Choosing external names].
+-}
-\begin{code}
type UnfoldEnv = IdEnv (Name{-new name-}, Bool {-show unfolding-})
-- Maps each top-level Id to its new Name (the Id is tidied in step 2)
-- The Unique is unchanged. If the new Name is external, it will be
@@ -696,7 +688,7 @@ chooseExternalIds hsc_env mod omit_prags expose_all binds implicit_binds imp_id_
-- add vectorised version if any exists
new_ids' = new_ids ++ maybeToList (fmap snd $ lookupVarEnv vect_vars idocc)
-
+
-- 'idocc' is an *occurrence*, but we need to see the
-- unfolding in the *definition*; so look up in binder_set
refined_id = case lookupVarSet binder_set idocc of
@@ -744,13 +736,13 @@ addExternal expose_all id = (new_needed_ids, show_unfold)
|| neverUnfoldGuidance guidance)
show_unfolding (DFunUnfolding {}) = True
show_unfolding _ = False
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Deterministic free variables
-%* *
-%************************************************************************
+* *
+************************************************************************
We want a deterministic free-variable list. exprFreeVars gives us
a VarSet, which is in a non-deterministic order when converted to a
@@ -758,8 +750,8 @@ list. Hence, here we define a free-variable finder that returns
the free variables in the order that they are encountered.
See Note [Choosing external names]
+-}
-\begin{code}
bndrFvsInOrder :: Bool -> Id -> [Id]
bndrFvsInOrder show_unfold id
= run (dffvLetBndr show_unfold id)
@@ -849,21 +841,20 @@ dffvLetBndr vanilla_unfold id
| otherwise -> return ()
_ -> dffvExpr rhs
- go_unf (DFunUnfolding { df_bndrs = bndrs, df_args = args })
+ go_unf (DFunUnfolding { df_bndrs = bndrs, df_args = args })
= extendScopeList bndrs $ mapM_ dffvExpr args
go_unf _ = return ()
go_rule (BuiltinRule {}) = return ()
go_rule (Rule { ru_bndrs = bndrs, ru_rhs = rhs })
= extendScopeList bndrs (dffvExpr rhs)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
findExternalRules
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Finding external rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -918,9 +909,8 @@ called in the final code), we keep the rule too.
I found that binary sizes jumped by 6-10% when I started to specialise
INLINE functions (again, Note [Inline specialisations] in Specialise).
Adding trimAutoRules removed all this bloat.
+-}
-
-\begin{code}
findExternalRules :: Bool -- Omit pragmas
-> [CoreBind]
-> [CoreRule] -- Local rules for imported fns
@@ -1000,20 +990,20 @@ findExternalRules omit_prags binds imp_id_rules unfold_env
, is_external_id id -- Only collect rules for external Ids
, rule <- idCoreRules id
, expose_rule rule ] -- and ones that can fire in a client
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
tidyTopName
-%* *
-%************************************************************************
+* *
+************************************************************************
This is where we set names to local/global based on whether they really are
externally visible (see comment at the top of this module). If the name
was previously local, we have to give it a unique occurrence name if
we intend to externalise it.
+-}
-\begin{code}
tidyTopName :: Module -> IORef NameCache -> Maybe Id -> TidyOccEnv
-> Id -> IO (TidyOccEnv, Name)
tidyTopName mod nc_var maybe_ref occ_env id
@@ -1081,17 +1071,15 @@ tidyTopName mod nc_var maybe_ref occ_env id
-- All this is done by allcoateGlobalBinder.
-- This is needed when *re*-compiling a module in GHCi; we must
-- use the same name for externally-visible things as we did before.
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Step 2: top-level tidying}
-%* *
-%************************************************************************
-
+* *
+************************************************************************
+-}
-\begin{code}
-- TopTidyEnv: when tidying we need to know
-- * nc_var: The NameCache, containing a unique supply and any pre-ordained Names.
-- These may have arisen because the
@@ -1248,7 +1236,7 @@ tidyTopIdInfo dflags rhs_tidy_env name orig_rhs tidy_rhs idinfo show_unfold caf_
sig = strictnessInfo idinfo
final_sig | not $ isNopSig sig
- = WARN( _bottom_hidden sig , ppr name ) sig
+ = WARN( _bottom_hidden sig , ppr name ) sig
-- try a cheap-and-cheerful bottom analyser
| Just (_, nsig) <- mb_bot_str = nsig
| otherwise = sig
@@ -1285,13 +1273,13 @@ tidyTopIdInfo dflags rhs_tidy_env name orig_rhs tidy_rhs idinfo show_unfold caf_
-- it to the top level. So it seems more robust just to
-- fix it here.
arity = exprArity orig_rhs
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Figuring out CafInfo for an expression}
-%* *
-%************************************************************************
+* *
+************************************************************************
hasCafRefs decides whether a top-level closure can point into the dynamic heap.
We mark such things as `MayHaveCafRefs' because this information is
@@ -1307,8 +1295,8 @@ hence the size of the SRTs) down, we could also look at the expression and
decide whether it requires a small bounded amount of heap, so we can ignore
it as a CAF. In these cases however, we would need to use an additional
CAF list to keep track of non-collectable CAFs.
+-}
-\begin{code}
hasCafRefs :: DynFlags -> PackageKey -> Module
-> (Id, Maybe DataCon, VarEnv Var) -> Arity -> CoreExpr
-> CafInfo
@@ -1359,9 +1347,8 @@ cafRefsV (_, _, p) id
fastOr :: FastBool -> (a -> FastBool) -> a -> FastBool
-- hack for lazy-or over FastBool.
fastOr a f x = fastBool (isFastTrue a || isFastTrue (f x))
-\end{code}
-
+{-
------------------------------------------------------------------------------
-- Old, dead, type-trimming code
-------------------------------------------------------------------------------
@@ -1460,3 +1447,4 @@ mustExposeTyCon no_trim_types exports tc
data_cons = tyConDataCons tc
exported_con con = any (`elemNameSet` exports)
(dataConName con : dataConFieldLabels con)
+-}
diff --git a/compiler/prelude/ForeignCall.lhs b/compiler/prelude/ForeignCall.hs
index 010434300b..0a7a8384dc 100644
--- a/compiler/prelude/ForeignCall.lhs
+++ b/compiler/prelude/ForeignCall.hs
@@ -1,9 +1,9 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[Foreign]{Foreign calls}
+-}
-\begin{code}
{-# LANGUAGE DeriveDataTypeable #-}
module ForeignCall (
@@ -25,16 +25,15 @@ import Module
import Data.Char
import Data.Data
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{Data types}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
newtype ForeignCall = CCall CCallSpec
deriving Eq
{-! derive: Binary !-}
@@ -46,10 +45,7 @@ isSafeForeignCall (CCall (CCallSpec _ _ safe)) = playSafe safe
-- but this simple printer will do for now
instance Outputable ForeignCall where
ppr (CCall cc) = ppr cc
-\end{code}
-
-\begin{code}
data Safety
= PlaySafe -- Might invoke Haskell GC, or do a call back, or
-- switch threads, etc. So make sure things are
@@ -82,16 +78,15 @@ playSafe PlayRisky = False
playInterruptible :: Safety -> Bool
playInterruptible PlayInterruptible = True
playInterruptible _ = False
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{Calling C}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data CExportSpec
= CExportStatic -- foreign export ccall foo :: ty
CLabelString -- C Name of exported function
@@ -105,11 +100,8 @@ data CCallSpec
Safety
deriving( Eq )
{-! derive: Binary !-}
-\end{code}
-The call target:
-
-\begin{code}
+-- The call target:
-- | How to call a particular function in C-land.
data CCallTarget
@@ -138,9 +130,8 @@ data CCallTarget
isDynamicTarget :: CCallTarget -> Bool
isDynamicTarget DynamicTarget = True
isDynamicTarget _ = False
-\end{code}
-
+{-
Stuff to do with calling convention:
ccall: Caller allocates parameters, *and* deallocates them.
@@ -154,8 +145,8 @@ so perhaps we should emit a warning if it's being used on other
platforms.
See: http://www.programmersheaven.com/2/Calling-conventions
+-}
-\begin{code}
-- any changes here should be replicated in the CallConv type in template haskell
data CCallConv = CCallConv | CApiConv | StdCallConv | PrimCallConv | JavaScriptCallConv
deriving (Eq, Data, Typeable)
@@ -177,21 +168,19 @@ ccallConvToInt CCallConv = 1
ccallConvToInt CApiConv = panic "ccallConvToInt CApiConv"
ccallConvToInt (PrimCallConv {}) = panic "ccallConvToInt PrimCallConv"
ccallConvToInt JavaScriptCallConv = panic "ccallConvToInt JavaScriptCallConv"
-\end{code}
+{-
Generate the gcc attribute corresponding to the given
calling convention (used by PprAbsC):
+-}
-\begin{code}
ccallConvAttribute :: CCallConv -> SDoc
ccallConvAttribute StdCallConv = text "__attribute__((__stdcall__))"
ccallConvAttribute CCallConv = empty
ccallConvAttribute CApiConv = empty
ccallConvAttribute (PrimCallConv {}) = panic "ccallConvAttribute PrimCallConv"
ccallConvAttribute JavaScriptCallConv = panic "ccallConvAttribute JavaScriptCallConv"
-\end{code}
-\begin{code}
type CLabelString = FastString -- A C label, completely unencoded
pprCLabelString :: CLabelString -> SDoc
@@ -204,12 +193,9 @@ isCLabelString lbl
ok c = isAlphaNum c || c == '_' || c == '.'
-- The '.' appears in e.g. "foo.so" in the
-- module part of a ExtName. Maybe it should be separate
-\end{code}
+-- Printing into C files:
-Printing into C files:
-
-\begin{code}
instance Outputable CExportSpec where
ppr (CExportStatic str _) = pprCLabelString str
@@ -233,9 +219,7 @@ instance Outputable CCallSpec where
ppr_fun DynamicTarget
= text "__dyn_ccall" <> gc_suf <+> text "\"\""
-\end{code}
-\begin{code}
-- The filename for a C header file
newtype Header = Header FastString
deriving (Eq, Data, Typeable)
@@ -253,16 +237,15 @@ instance Outputable CType where
where hDoc = case mh of
Nothing -> empty
Just h -> ppr h
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{Misc}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
{-* Generated by DrIFT-v1.0 : Look, but Don't Touch. *-}
instance Binary ForeignCall where
put_ bh (CCall aa) = put_ bh aa
@@ -350,4 +333,3 @@ instance Binary Header where
put_ bh (Header h) = put_ bh h
get bh = do h <- get bh
return (Header h)
-\end{code}
diff --git a/compiler/prelude/PrelInfo.lhs b/compiler/prelude/PrelInfo.hs
index eaefff2364..2303a8edd3 100644
--- a/compiler/prelude/PrelInfo.lhs
+++ b/compiler/prelude/PrelInfo.hs
@@ -1,9 +1,9 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[PrelInfo]{The @PrelInfo@ interface to the compiler's prelude knowledge}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module PrelInfo (
wiredInIds, ghcPrimIds,
@@ -39,13 +39,13 @@ import Util
import {-# SOURCE #-} TcTypeNats ( typeNatTyCons )
import Data.Array
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[builtinNameInfo]{Lookup built-in names}
-%* *
-%************************************************************************
+* *
+************************************************************************
Notes about wired in things
~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -64,9 +64,8 @@ Notes about wired in things
* MkIface prunes out wired-in things before putting them in an interface file.
So interface files never contain wired-in things.
+-}
-
-\begin{code}
wiredInThings :: [TyThing]
-- This list is used only to initialise HscMain.knownKeyNames
-- to ensure that when you say "Prelude.map" in your source code, you
@@ -86,19 +85,19 @@ wiredInThings
where
tycon_things = map ATyCon ([funTyCon] ++ primTyCons ++ wiredInTyCons
++ typeNatTyCons)
-\end{code}
+{-
We let a lot of "non-standard" values be visible, so that we can make
sense of them in interface pragmas. It's cool, though they all have
"non-standard" names, so they won't get past the parser in user code.
-%************************************************************************
-%* *
+************************************************************************
+* *
PrimOpIds
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
primOpIds :: Array Int Id
-- A cache of the PrimOp Ids, indexed by PrimOp tag
primOpIds = array (1,maxPrimOpTag) [ (primOpTag op, mkPrimOpId op)
@@ -106,51 +105,47 @@ primOpIds = array (1,maxPrimOpTag) [ (primOpTag op, mkPrimOpId op)
primOpId :: PrimOp -> Id
primOpId op = primOpIds ! primOpTag op
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Export lists for pseudo-modules (GHC.Prim)}
-%* *
-%************************************************************************
+* *
+************************************************************************
GHC.Prim "exports" all the primops and primitive types, some
wired-in Ids.
+-}
-\begin{code}
ghcPrimExports :: [IfaceExport]
ghcPrimExports
= map (Avail . idName) ghcPrimIds ++
map (Avail . idName . primOpId) allThePrimOps ++
[ AvailTC n [n]
| tc <- funTyCon : primTyCons, let n = tyConName tc ]
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Built-in keys}
-%* *
-%************************************************************************
+* *
+************************************************************************
ToDo: make it do the ``like'' part properly (as in 0.26 and before).
+-}
-\begin{code}
maybeCharLikeCon, maybeIntLikeCon :: DataCon -> Bool
maybeCharLikeCon con = con `hasKey` charDataConKey
maybeIntLikeCon con = con `hasKey` intDataConKey
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Class predicates}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
isNumericClass, isStandardClass :: Class -> Bool
isNumericClass clas = classKey clas `is_elem` numericClassKeys
@@ -158,4 +153,3 @@ isStandardClass clas = classKey clas `is_elem` standardClassKeys
is_elem :: Eq a => a -> [a] -> Bool
is_elem = isIn "is_X_Class"
-\end{code}
diff --git a/compiler/prelude/PrelNames.lhs b/compiler/prelude/PrelNames.hs
index e0a5890619..65eaebb2db 100644
--- a/compiler/prelude/PrelNames.lhs
+++ b/compiler/prelude/PrelNames.hs
@@ -1,6 +1,6 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[PrelNames]{Definitions of prelude modules and names}
@@ -100,8 +100,8 @@ This is accomplished through a combination of mechanisms:
than trying to find it in the original-name cache.
See also Note [Built-in syntax and the OrigNameCache]
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module PrelNames (
@@ -127,36 +127,32 @@ import SrcLoc
import FastString
import Config ( cIntegerLibraryType, IntegerLibrary(..) )
import Panic ( panic )
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
allNameStrings
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
allNameStrings :: [String]
-- Infinite list of a,b,c...z, aa, ab, ac, ... etc
allNameStrings = [ c:cs | cs <- "" : allNameStrings, c <- ['a'..'z'] ]
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Local Names}
-%* *
-%************************************************************************
+* *
+************************************************************************
This *local* name is used by the interactive stuff
+-}
-\begin{code}
itName :: Unique -> SrcSpan -> Name
itName uniq loc = mkInternalName uniq (mkOccNameFS varName (fsLit "it")) loc
-\end{code}
-\begin{code}
-- mkUnboundName makes a place-holder Name; it shouldn't be looked at except possibly
-- during compiler debugging.
mkUnboundName :: RdrName -> Name
@@ -164,14 +160,13 @@ mkUnboundName rdr_name = mkInternalName unboundKey (rdrNameOcc rdr_name) noSrcSp
isUnboundName :: Name -> Bool
isUnboundName name = name `hasKey` unboundKey
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Known key Names}
-%* *
-%************************************************************************
+* *
+************************************************************************
This section tells what the compiler knows about the association of
names with uniques. These ones are the *non* wired-in ones. The
@@ -182,8 +177,8 @@ The names for DPH can come from one of multiple backend packages. At the point w
the names for multiple backends. That works out fine, although they use the same uniques,
as we are guaranteed to only load one backend; hence, only one of the different names
sharing a unique will be used.
+-}
-\begin{code}
basicKnownKeyNames :: [Name]
basicKnownKeyNames
= genericTyConNames
@@ -368,18 +363,18 @@ genericTyConNames = [
d1TyConName, c1TyConName, s1TyConName, noSelTyConName,
repTyConName, rep1TyConName
]
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Module names}
-%* *
-%************************************************************************
+* *
+************************************************************************
--MetaHaskell Extension Add a new module here
-\begin{code}
+-}
+
pRELUDE :: Module
pRELUDE = mkBaseModule_ pRELUDE_NAME
@@ -491,29 +486,28 @@ mkMainModule m = mkModule mainPackageKey (mkModuleNameFS m)
mkMainModule_ :: ModuleName -> Module
mkMainModule_ m = mkModule mainPackageKey m
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Constructing the names of tuples
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
mkTupleModule :: TupleSort -> Module
mkTupleModule BoxedTuple = gHC_TUPLE
mkTupleModule ConstraintTuple = gHC_TUPLE
mkTupleModule UnboxedTuple = gHC_PRIM
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
RdrNames
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
main_RDR_Unqual :: RdrName
main_RDR_Unqual = mkUnqual varName (fsLit "main")
-- We definitely don't want an Orig RdrName, because
@@ -738,13 +732,13 @@ varQual_RDR mod str = mkOrig mod (mkOccNameFS varName str)
tcQual_RDR mod str = mkOrig mod (mkOccNameFS tcName str)
clsQual_RDR mod str = mkOrig mod (mkOccNameFS clsName str)
dataQual_RDR mod str = mkOrig mod (mkOccNameFS dataName str)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Known-key names}
-%* *
-%************************************************************************
+* *
+************************************************************************
Many of these Names are not really "built in", but some parts of the
compiler (notably the deriving mechanism) need to mention their names,
@@ -752,9 +746,8 @@ and it's convenient to write them all down in one place.
--MetaHaskell Extension add the constrs and the lower case case
-- guys as well (perhaps) e.g. see trueDataConName below
+-}
-
-\begin{code}
wildCardName :: Name
wildCardName = mkSystemVarName wildCardKey (fsLit "wild")
@@ -1165,17 +1158,17 @@ pLUGINS :: Module
pLUGINS = mkThisGhcModule (fsLit "Plugins")
pluginTyConName :: Name
pluginTyConName = tcQual pLUGINS (fsLit "Plugin") pluginTyConKey
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Local helpers}
-%* *
-%************************************************************************
+* *
+************************************************************************
All these are original names; hence mkOrig
+-}
-\begin{code}
varQual, tcQual, clsQual :: Module -> FastString -> Unique -> Name
varQual = mk_known_key_name varName
tcQual = mk_known_key_name tcName
@@ -1188,16 +1181,16 @@ mk_known_key_name space modu str unique
conName :: Module -> FastString -> Unique -> Name
conName modu occ unique
= mkExternalName unique modu (mkOccNameFS dataName occ) noSrcSpan
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection[Uniques-prelude-Classes]{@Uniques@ for wired-in @Classes@}
-%* *
-%************************************************************************
+* *
+************************************************************************
--MetaHaskell extension hand allocate keys here
+-}
-\begin{code}
boundedClassKey, enumClassKey, eqClassKey, floatingClassKey,
fractionalClassKey, integralClassKey, monadClassKey, dataClassKey,
functorClassKey, numClassKey, ordClassKey, readClassKey, realClassKey,
@@ -1270,15 +1263,15 @@ ghciIoClassKey = mkPreludeClassUnique 44
ipClassNameKey :: Unique
ipClassNameKey = mkPreludeClassUnique 45
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection[Uniques-prelude-TyCons]{@Uniques@ for wired-in @TyCons@}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
addrPrimTyConKey, arrayPrimTyConKey, arrayArrayPrimTyConKey, boolTyConKey, byteArrayPrimTyConKey,
charPrimTyConKey, charTyConKey, doublePrimTyConKey, doubleTyConKey,
floatPrimTyConKey, floatTyConKey, funTyConKey, intPrimTyConKey,
@@ -1495,15 +1488,15 @@ smallMutableArrayPrimTyConKey = mkPreludeTyConUnique 179
unitTyConKey :: Unique
unitTyConKey = mkTupleTyConUnique BoxedTuple 0
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection[Uniques-prelude-DataCons]{@Uniques@ for wired-in @DataCons@}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
charDataConKey, consDataConKey, doubleDataConKey, falseDataConKey,
floatDataConKey, intDataConKey, integerSDataConKey, nilDataConKey,
ratioDataConKey, stableNameDataConKey, trueDataConKey, wordDataConKey,
@@ -1545,15 +1538,15 @@ eqDataConKey = mkPreludeDataConUnique 28
gtDataConKey = mkPreludeDataConUnique 29
coercibleDataConKey = mkPreludeDataConUnique 32
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection[Uniques-prelude-Ids]{@Uniques@ for wired-in @Ids@ (except @DataCons@)}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
wildCardKey, absentErrorIdKey, augmentIdKey, appendIdKey,
buildIdKey, errorIdKey, foldrIdKey, recSelErrorIdKey,
seqIdKey, irrefutPatErrorIdKey, eqStringIdKey,
@@ -1716,13 +1709,13 @@ magicDictKey = mkPreludeMiscIdUnique 156
coerceKey :: Unique
coerceKey = mkPreludeMiscIdUnique 157
-\end{code}
+{-
Certain class operations from Prelude classes. They get their own
uniques so we can look them up easily when we want to conjure them up
during type checking.
+-}
-\begin{code}
-- Just a place holder for unbound variables produced by the renamer:
unboundKey :: Unique
unboundKey = mkPreludeMiscIdUnique 160
@@ -1800,19 +1793,19 @@ proxyHashKey = mkPreludeMiscIdUnique 502
---------------- Template Haskell -------------------
-- USES IdUniques 200-499
-----------------------------------------------------
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Class-std-groups]{Standard groups of Prelude classes}
-%* *
-%************************************************************************
+* *
+************************************************************************
NOTE: @Eq@ and @Text@ do need to appear in @standardClasses@
even though every numeric class has these two as a superclass,
because the list of ambiguous dictionaries hasn't been simplified.
+-}
-\begin{code}
numericClassKeys :: [Unique]
numericClassKeys =
[ numClassKey
@@ -1840,14 +1833,13 @@ standardClassKeys = derivableClassKeys ++ numericClassKeys
applicativeClassKey, foldableClassKey,
traversableClassKey, alternativeClassKey
]
-\end{code}
+{-
@derivableClassKeys@ is also used in checking \tr{deriving} constructs
(@TcDeriv@).
+-}
-\begin{code}
derivableClassKeys :: [Unique]
derivableClassKeys
= [ eqClassKey, ordClassKey, enumClassKey, ixClassKey,
boundedClassKey, showClassKey, readClassKey ]
-\end{code}
diff --git a/compiler/prelude/PrelNames.lhs-boot b/compiler/prelude/PrelNames.hs-boot
index 7b5365e621..0bd74d5577 100644
--- a/compiler/prelude/PrelNames.lhs-boot
+++ b/compiler/prelude/PrelNames.hs-boot
@@ -1,4 +1,3 @@
-\begin{code}
module PrelNames where
import Module
@@ -6,5 +5,3 @@ import Unique
mAIN :: Module
liftedTypeKindTyConKey :: Unique
-\end{code}
-
diff --git a/compiler/prelude/PrelRules.lhs b/compiler/prelude/PrelRules.hs
index 054137178b..6807b1c79f 100644
--- a/compiler/prelude/PrelRules.lhs
+++ b/compiler/prelude/PrelRules.hs
@@ -1,6 +1,6 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[ConFold]{Constant Folder}
Conceptually, constant folding should be parameterized with the kind
@@ -10,8 +10,8 @@ and runtime. We cheat a little bit here...
ToDo:
check boundaries before folding, e.g. we can fold the Float addition
(i1 + i2) only if it results in a valid Float.
+-}
-\begin{code}
{-# LANGUAGE CPP, RankNTypes #-}
{-# OPTIONS_GHC -optc-DNON_POSIX_SOURCE #-}
@@ -60,9 +60,8 @@ import qualified Data.ByteString as BS
import Data.Int
import Data.Ratio
import Data.Word
-\end{code}
-
+{-
Note [Constant folding]
~~~~~~~~~~~~~~~~~~~~~~~
primOpRules generates a rewrite rule for each primop
@@ -77,9 +76,8 @@ more like
where the (+#) on the rhs is done at compile time
That is why these rules are built in here.
+-}
-
-\begin{code}
primOpRules :: Name -> PrimOp -> Maybe CoreRule
-- ToDo: something for integer-shift ops?
-- NotOp
@@ -271,15 +269,13 @@ primOpRules nm SparkOp = mkPrimOpRule nm 4 [ sparkRule ]
primOpRules _ _ = Nothing
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Doing the business}
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
-- useful shorthands
mkPrimOpRule :: Name -> Int -> [RuleM CoreExpr] -> Maybe CoreRule
@@ -401,10 +397,10 @@ wordShiftRule shift_op
= do { dflags <- getDynFlags
; [e1, Lit (MachInt shift_len)] <- getArgs
; case e1 of
- _ | shift_len == 0
+ _ | shift_len == 0
-> return e1
| shift_len < 0 || wordSizeInBits dflags < shift_len
- -> return (mkRuntimeErrorApp rUNTIME_ERROR_ID wordPrimTy
+ -> return (mkRuntimeErrorApp rUNTIME_ERROR_ID wordPrimTy
("Bad shift length" ++ show shift_len))
Lit (MachWord x)
-> let op = shift_op dflags
@@ -553,8 +549,8 @@ idempotent :: RuleM CoreExpr
idempotent = do [e1, e2] <- getArgs
guard $ cheapEqExpr e1 e2
return e1
-\end{code}
+{-
Note [Guarding against silly shifts]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this code:
@@ -593,7 +589,7 @@ Shift.$wgo = \ (w_sCS :: GHC.Prim.Int#) (w1_sCT :: [GHC.Types.Bool]) ->
9223372036854775807 -> __word 0
} } } }
-Note the massive shift on line "!!!!". It can't happen, because we've checked
+Note the massive shift on line "!!!!". It can't happen, because we've checked
that w < 64, but the optimiser didn't spot that. We DO NO want to constant-fold this!
Moreover, if the programmer writes (n `uncheckedShiftL` 9223372036854775807), we
can't constant fold it, but if it gets to the assember we get
@@ -602,13 +598,13 @@ can't constant fold it, but if it gets to the assember we get
So the best thing to do is to rewrite the shift with a call to error,
when the second arg is stupid.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Vaguely generic functions}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
mkBasicRule :: Name -> Int -> RuleM CoreExpr -> CoreRule
-- Gives the Rule the same name as the primop itself
mkBasicRule op_name n_args rm
@@ -829,13 +825,12 @@ matchPrimOpId op id = do
op' <- liftMaybe $ isPrimOpId_maybe id
guard $ op == op'
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Special rules for seq, tagToEnum, dataToTag}
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [tagToEnum#]
~~~~~~~~~~~~~~~~~
@@ -857,8 +852,8 @@ because we don't expect the user to call tagToEnum# at all; we merely
generate calls in derived instances of Enum. So we compromise: a
rewrite rule rewrites a bad instance of tagToEnum# to an error call,
and emits a warning.
+-}
-\begin{code}
tagToEnumRule :: RuleM CoreExpr
-- If data T a = A | B | C
-- then tag2Enum# (T ty) 2# --> B ty
@@ -875,15 +870,14 @@ tagToEnumRule = do
-- See Note [tagToEnum#]
_ -> WARN( True, ptext (sLit "tagToEnum# on non-enumeration type") <+> ppr ty )
return $ mkRuntimeErrorApp rUNTIME_ERROR_ID ty "tagToEnum# on non-enumeration type"
-\end{code}
-
+{-
For dataToTag#, we can reduce if either
(a) the argument is a constructor
(b) the argument is a variable whose unfolding is a known constructor
+-}
-\begin{code}
dataToTagRule :: RuleM CoreExpr
dataToTagRule = a `mplus` b
where
@@ -899,15 +893,15 @@ dataToTagRule = a `mplus` b
(dc,_,_) <- liftMaybe $ exprIsConApp_maybe in_scope val_arg
ASSERT( not (isNewTyCon (dataConTyCon dc)) ) return ()
return $ mkIntVal dflags (toInteger (dataConTag dc - fIRST_TAG))
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Rules for seq# and spark#}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- seq# :: forall a s . a -> State# s -> (# State# s, a #)
seqRule :: RuleM CoreExpr
seqRule = do
@@ -921,13 +915,13 @@ sparkRule :: RuleM CoreExpr
sparkRule = seqRule -- reduce on HNF, just the same
-- XXX perhaps we shouldn't do this, because a spark eliminated by
-- this rule won't be counted as a dud at runtime?
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Built in rules}
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Scoping for Builtin rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -954,9 +948,8 @@ rewriting so again we are fine.
(This whole thing doesn't show up for non-built-in rules because their dependencies
are explicit.)
+-}
-
-\begin{code}
builtinRules :: [CoreRule]
-- Rules for non-primops that can't be expressed using a RULE pragma
builtinRules
@@ -1327,4 +1320,3 @@ match_smallIntegerTo primOp _ _ _ [App (Var x) y]
| idName x == smallIntegerName
= Just $ App (Var (mkPrimOpId primOp)) y
match_smallIntegerTo _ _ _ _ _ = Nothing
-\end{code}
diff --git a/compiler/prelude/PrimOp.lhs b/compiler/prelude/PrimOp.hs
index 198078bc9f..1b7e314fc7 100644
--- a/compiler/prelude/PrimOp.lhs
+++ b/compiler/prelude/PrimOp.hs
@@ -1,9 +1,9 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[PrimOp]{Primitive operations (machine-level)}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module PrimOp (
@@ -41,26 +41,23 @@ import Outputable
import FastTypes
import FastString
import Module ( PackageKey )
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[PrimOp-datatype]{Datatype for @PrimOp@ (an enumeration)}
-%* *
-%************************************************************************
+* *
+************************************************************************
These are in \tr{state-interface.verb} order.
-
-\begin{code}
+-}
-- supplies:
-- data PrimOp = ...
#include "primop-data-decl.hs-incl"
-\end{code}
-Used for the Ord instance
+-- Used for the Ord instance
-\begin{code}
primOpTag :: PrimOp -> Int
primOpTag op = iBox (tagOf_PrimOp op)
@@ -84,34 +81,26 @@ instance Ord PrimOp where
instance Outputable PrimOp where
ppr op = pprPrimOp op
-\end{code}
-\begin{code}
data PrimOpVecCat = IntVec
| WordVec
| FloatVec
-\end{code}
-An @Enum@-derived list would be better; meanwhile... (ToDo)
+-- An @Enum@-derived list would be better; meanwhile... (ToDo)
-\begin{code}
allThePrimOps :: [PrimOp]
allThePrimOps =
#include "primop-list.hs-incl"
-\end{code}
-\begin{code}
tagToEnumKey :: Unique
tagToEnumKey = mkPrimOpIdUnique (primOpTag TagToEnumOp)
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[PrimOp-info]{The essential info about each @PrimOp@}
-%* *
-%************************************************************************
+* *
+************************************************************************
The @String@ in the @PrimOpInfos@ is the ``base name'' by which the user may
refer to the primitive operation. The conventional \tr{#}-for-
@@ -122,7 +111,8 @@ interfere with the programmer's Haskell name spaces.
We use @PrimKinds@ for the ``type'' information, because they're
(slightly) more convenient to use than @TyCons@.
-\begin{code}
+-}
+
data PrimOpInfo
= Dyadic OccName -- string :: T -> T -> T
Type
@@ -142,50 +132,50 @@ mkCompare str ty = Compare (mkVarOccFS str) ty
mkGenPrimOp :: FastString -> [TyVar] -> [Type] -> Type -> PrimOpInfo
mkGenPrimOp str tvs tys ty = GenPrimOp (mkVarOccFS str) tvs tys ty
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{Strictness}
-%* *
-%************************************************************************
+* *
+************************************************************************
Not all primops are strict!
+-}
-\begin{code}
primOpStrictness :: PrimOp -> Arity -> StrictSig
-- See Demand.StrictnessInfo for discussion of what the results
-- The arity should be the arity of the primop; that's why
-- this function isn't exported.
#include "primop-strictness.hs-incl"
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{Fixity}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
primOpFixity :: PrimOp -> Maybe Fixity
#include "primop-fixity.hs-incl"
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection[PrimOp-comparison]{PrimOpInfo basic comparison ops}
-%* *
-%************************************************************************
+* *
+************************************************************************
@primOpInfo@ gives all essential information (from which everything
else, notably a type, can be constructed) for each @PrimOp@.
+-}
-\begin{code}
primOpInfo :: PrimOp -> PrimOpInfo
#include "primop-primop-info.hs-incl"
primOpInfo _ = error "primOpInfo: unknown primop"
-\end{code}
+{-
Here are a load of comments from the old primOp info:
A @Word#@ is an unsigned @Int#@.
@@ -302,27 +292,25 @@ These primops are pretty weird.
The constraints aren't currently checked by the front end, but the
code generator will fall over if they aren't satisfied.
-%************************************************************************
-%* *
+************************************************************************
+* *
Which PrimOps are out-of-line
-%* *
-%************************************************************************
+* *
+************************************************************************
Some PrimOps need to be called out-of-line because they either need to
perform a heap check or they block.
+-}
-
-\begin{code}
primOpOutOfLine :: PrimOp -> Bool
#include "primop-out-of-line.hs-incl"
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Failure and side effects
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [PrimOp can_fail and has_side_effects]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -470,9 +458,8 @@ Two main predicates on primpops test these flags:
* The no-duplicate thing is done via primOpIsCheap, by making
has_side_effects things (very very very) not-cheap!
+-}
-
-\begin{code}
primOpHasSideEffects :: PrimOp -> Bool
#include "primop-has-side-effects.hs-incl"
@@ -492,9 +479,8 @@ primOpOkForSpeculation op
primOpOkForSideEffects :: PrimOp -> Bool
primOpOkForSideEffects op
= not (primOpHasSideEffects op)
-\end{code}
-
+{-
Note [primOpIsCheap]
~~~~~~~~~~~~~~~~~~~~
@primOpIsCheap@, as used in \tr{SimplUtils.lhs}. For now (HACK
@@ -502,8 +488,8 @@ WARNING), we just borrow some other predicates for a
what-should-be-good-enough test. "Cheap" means willing to call it more
than once, and/or push it inside a lambda. The latter could change the
behaviour of 'seq' for primops that can fail, so we don't treat them as cheap.
+-}
-\begin{code}
primOpIsCheap :: PrimOp -> Bool
-- See Note [PrimOp can_fail and has_side_effects]
primOpIsCheap op = primOpOkForSpeculation op
@@ -523,21 +509,20 @@ primOpIsCheap op = primOpOkForSpeculation op
-- were we don't want to inline x. But primopIsCheap doesn't control
-- that (it's exprIsDupable that does) so the problem doesn't occur
-- even if primOpIsCheap sometimes says 'True'.
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
PrimOp code size
-%* *
-%************************************************************************
+* *
+************************************************************************
primOpCodeSize
~~~~~~~~~~~~~~
Gives an indication of the code size of a primop, for the purposes of
calculating unfolding sizes; see CoreUnfold.sizeExpr.
+-}
-\begin{code}
primOpCodeSize :: PrimOp -> Int
#include "primop-code-size.hs-incl"
@@ -548,16 +533,15 @@ primOpCodeSizeDefault = 1
primOpCodeSizeForeignCall :: Int
primOpCodeSizeForeignCall = 4
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
PrimOp types
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
primOpType :: PrimOp -> Type -- you may want to use primOpSig instead
primOpType op
= case primOpInfo op of
@@ -590,9 +574,7 @@ primOpSig op
Dyadic _occ ty -> ([], [ty,ty], ty )
Compare _occ ty -> ([], [ty,ty], intPrimTy)
GenPrimOp _occ tyvars arg_tys res_ty -> (tyvars, arg_tys, res_ty )
-\end{code}
-\begin{code}
data PrimOpResultInfo
= ReturnsPrim PrimRep
| ReturnsAlg TyCon
@@ -614,46 +596,41 @@ getPrimOpResultInfo op
-- All primops return a tycon-app result
-- The tycon can be an unboxed tuple, though, which
-- gives rise to a ReturnAlg
-\end{code}
+{-
We do not currently make use of whether primops are commutable.
We used to try to move constants to the right hand side for strength
reduction.
+-}
-\begin{code}
{-
commutableOp :: PrimOp -> Bool
#include "primop-commutable.hs-incl"
-}
-\end{code}
-Utils:
-\begin{code}
+-- Utils:
+
dyadic_fun_ty, monadic_fun_ty, compare_fun_ty :: Type -> Type
dyadic_fun_ty ty = mkFunTys [ty, ty] ty
monadic_fun_ty ty = mkFunTy ty ty
compare_fun_ty ty = mkFunTys [ty, ty] intPrimTy
-\end{code}
-Output stuff:
-\begin{code}
+-- Output stuff:
+
pprPrimOp :: PrimOp -> SDoc
pprPrimOp other_op = pprOccName (primOpOcc other_op)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection[PrimCall]{User-imported primitive calls}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data PrimCall = PrimCall CLabelString PackageKey
instance Outputable PrimCall where
ppr (PrimCall lbl pkgId)
= text "__primcall" <+> ppr pkgId <+> ppr lbl
-
-\end{code}
diff --git a/compiler/prelude/PrimOp.lhs-boot b/compiler/prelude/PrimOp.hs-boot
index 5d003f2b51..6b92ef3d49 100644
--- a/compiler/prelude/PrimOp.lhs-boot
+++ b/compiler/prelude/PrimOp.hs-boot
@@ -1,7 +1,3 @@
-
-\begin{code}
module PrimOp where
data PrimOp
-\end{code}
-
diff --git a/compiler/prelude/TysPrim.lhs b/compiler/prelude/TysPrim.hs
index e130fe57b7..e8542eb670 100644
--- a/compiler/prelude/TysPrim.lhs
+++ b/compiler/prelude/TysPrim.hs
@@ -1,10 +1,10 @@
-%
-% (c) The AQUA Project, Glasgow University, 1994-1998
-%
+{-
+(c) The AQUA Project, Glasgow University, 1994-1998
+
\section[TysPrim]{Wired-in knowledge about primitive types}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
-- | This module defines TyCons that can't be expressed in Haskell.
@@ -92,15 +92,15 @@ import PrelNames
import FastString
import Data.Char
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Primitive type constructors}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
primTyCons :: [TyCon]
primTyCons
= [ addrPrimTyCon
@@ -195,18 +195,18 @@ stableNamePrimTyConName = mkPrimTc (fsLit "StableName#") stableNamePrimTyC
bcoPrimTyConName = mkPrimTc (fsLit "BCO#") bcoPrimTyConKey bcoPrimTyCon
weakPrimTyConName = mkPrimTc (fsLit "Weak#") weakPrimTyConKey weakPrimTyCon
threadIdPrimTyConName = mkPrimTc (fsLit "ThreadId#") threadIdPrimTyConKey threadIdPrimTyCon
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Support code}
-%* *
-%************************************************************************
+* *
+************************************************************************
alphaTyVars is a list of type variables for use in templates:
["a", "b", ..., "z", "t1", "t2", ... ]
+-}
-\begin{code}
tyVarList :: Kind -> [TyVar]
tyVarList kind = [ mkTyVar (mkInternalName (mkAlphaTyVarUnique u)
(mkTyVarOccFS (mkFastString name))
@@ -245,16 +245,14 @@ openBetaTy = mkTyVarTy openBetaTyVar
kKiVar :: KindVar
kKiVar = (tyVarList superKind) !! 10
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
FunTyCon
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
funTyConName :: Name
funTyConName = mkPrimTyConName (fsLit "(->)") funTyConKey funTyCon
@@ -283,14 +281,13 @@ funTyCon = mkFunTyCon funTyConName $
-- --------------------------
-- Gamma |- tau -> sigma :: *
-- In the end we don't want subkinding at all.
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Kinds
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [SuperKind (BOX)]
~~~~~~~~~~~~~~~~~~~~~~
@@ -308,9 +305,8 @@ So the full defn of keq is
keq :: (~) BOX * * = Eq# BOX * * <refl *>
So you can see it's convenient to have BOX:BOX
+-}
-
-\begin{code}
-- | See "Type#kind_subtyping" for details of the distinction between the 'Kind' 'TyCon's
superKindTyCon, anyKindTyCon, liftedTypeKindTyCon,
openTypeKindTyCon, unliftedTypeKindTyCon,
@@ -349,10 +345,7 @@ mkPrimTyConName occ key tycon = mkWiredInName gHC_PRIM (mkTcOccFS occ)
BuiltInSyntax
-- All of the super kinds and kinds are defined in Prim and use BuiltInSyntax,
-- because they are never in scope in the source
-\end{code}
-
-\begin{code}
kindTyConType :: TyCon -> Type
kindTyConType kind = TyConApp kind [] -- mkTyConApp isn't defined yet
@@ -373,15 +366,15 @@ mkArrowKind k1 k2 = FunTy k1 k2
-- | Iterated application of 'mkArrowKind'
mkArrowKinds :: [Kind] -> Kind -> Kind
mkArrowKinds arg_kinds result_kind = foldr mkArrowKind result_kind arg_kinds
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[TysPrim-basic]{Basic primitive types (@Char#@, @Int#@, etc.)}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- only used herein
pcPrimTyCon :: Name -> [Role] -> PrimRep -> TyCon
pcPrimTyCon name roles rep
@@ -445,14 +438,13 @@ doublePrimTy :: Type
doublePrimTy = mkTyConTy doublePrimTyCon
doublePrimTyCon :: TyCon
doublePrimTyCon = pcPrimTyCon0 doublePrimTyConName DoubleRep
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[TysPrim-state]{The @State#@ type (and @_RealWorld@ types)}
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [The ~# TyCon)
~~~~~~~~~~~~~~~~~~~~
@@ -480,8 +472,8 @@ keep different state threads separate. It is represented by nothing at all.
The type parameter to State# is intended to keep separate threads separate.
Even though this parameter is not used in the definition of State#, it is
given role Nominal to enforce its intended use.
+-}
-\begin{code}
mkStatePrimTy :: Type -> Type
mkStatePrimTy ty = TyConApp statePrimTyCon [ty]
@@ -520,31 +512,31 @@ eqReprPrimTyCon = mkPrimTyCon eqReprPrimTyConName kind
where kind = ForAllTy kv $ mkArrowKinds [k, k] unliftedTypeKind
kv = kKiVar
k = mkTyVarTy kv
-\end{code}
+{-
RealWorld is deeply magical. It is *primitive*, but it is not
*unlifted* (hence ptrArg). We never manipulate values of type
RealWorld; it's only used in the type system, to parameterise State#.
+-}
-\begin{code}
realWorldTyCon :: TyCon
realWorldTyCon = mkLiftedPrimTyCon realWorldTyConName liftedTypeKind [] PtrRep
realWorldTy :: Type
realWorldTy = mkTyConTy realWorldTyCon
realWorldStatePrimTy :: Type
realWorldStatePrimTy = mkStatePrimTy realWorldTy -- State# RealWorld
-\end{code}
+{-
Note: the ``state-pairing'' types are not truly primitive, so they are
defined in \tr{TysWiredIn.lhs}, not here.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection[TysPrim-arrays]{The primitive array types}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
arrayPrimTyCon, mutableArrayPrimTyCon, mutableByteArrayPrimTyCon,
byteArrayPrimTyCon, arrayArrayPrimTyCon, mutableArrayArrayPrimTyCon,
smallArrayPrimTyCon, smallMutableArrayPrimTyCon :: TyCon
@@ -573,110 +565,110 @@ mkMutableArrayArrayPrimTy :: Type -> Type
mkMutableArrayArrayPrimTy s = TyConApp mutableArrayArrayPrimTyCon [s]
mkSmallMutableArrayPrimTy :: Type -> Type -> Type
mkSmallMutableArrayPrimTy s elt = TyConApp smallMutableArrayPrimTyCon [s, elt]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[TysPrim-mut-var]{The mutable variable type}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
mutVarPrimTyCon :: TyCon
mutVarPrimTyCon = pcPrimTyCon mutVarPrimTyConName [Nominal, Representational] PtrRep
mkMutVarPrimTy :: Type -> Type -> Type
mkMutVarPrimTy s elt = TyConApp mutVarPrimTyCon [s, elt]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[TysPrim-synch-var]{The synchronizing variable type}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
mVarPrimTyCon :: TyCon
mVarPrimTyCon = pcPrimTyCon mVarPrimTyConName [Nominal, Representational] PtrRep
mkMVarPrimTy :: Type -> Type -> Type
mkMVarPrimTy s elt = TyConApp mVarPrimTyCon [s, elt]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[TysPrim-stm-var]{The transactional variable type}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
tVarPrimTyCon :: TyCon
tVarPrimTyCon = pcPrimTyCon tVarPrimTyConName [Nominal, Representational] PtrRep
mkTVarPrimTy :: Type -> Type -> Type
mkTVarPrimTy s elt = TyConApp tVarPrimTyCon [s, elt]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[TysPrim-stable-ptrs]{The stable-pointer type}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
stablePtrPrimTyCon :: TyCon
stablePtrPrimTyCon = pcPrimTyCon stablePtrPrimTyConName [Representational] AddrRep
mkStablePtrPrimTy :: Type -> Type
mkStablePtrPrimTy ty = TyConApp stablePtrPrimTyCon [ty]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[TysPrim-stable-names]{The stable-name type}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
stableNamePrimTyCon :: TyCon
stableNamePrimTyCon = pcPrimTyCon stableNamePrimTyConName [Representational] PtrRep
mkStableNamePrimTy :: Type -> Type
mkStableNamePrimTy ty = TyConApp stableNamePrimTyCon [ty]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[TysPrim-BCOs]{The ``bytecode object'' type}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
bcoPrimTy :: Type
bcoPrimTy = mkTyConTy bcoPrimTyCon
bcoPrimTyCon :: TyCon
bcoPrimTyCon = pcPrimTyCon0 bcoPrimTyConName PtrRep
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[TysPrim-Weak]{The ``weak pointer'' type}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
weakPrimTyCon :: TyCon
weakPrimTyCon = pcPrimTyCon weakPrimTyConName [Representational] PtrRep
mkWeakPrimTy :: Type -> Type
mkWeakPrimTy v = TyConApp weakPrimTyCon [v]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[TysPrim-thread-ids]{The ``thread id'' type}
-%* *
-%************************************************************************
+* *
+************************************************************************
A thread id is represented by a pointer to the TSO itself, to ensure
that they are always unique and we can always find the TSO for a given
@@ -686,19 +678,19 @@ collector and can keep TSOs around for too long.
Hence the programmer API for thread manipulation uses a weak pointer
to the thread id internally.
+-}
-\begin{code}
threadIdPrimTy :: Type
threadIdPrimTy = mkTyConTy threadIdPrimTyCon
threadIdPrimTyCon :: TyCon
threadIdPrimTyCon = pcPrimTyCon0 threadIdPrimTyConName PtrRep
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Any
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Any types]
~~~~~~~~~~~~~~~~
@@ -763,8 +755,8 @@ This commit uses
Any for kind *
Any(*->*) for kind *->*
etc
+-}
-\begin{code}
anyTyConName :: Name
anyTyConName = mkPrimTc (fsLit "Any") anyTyConKey anyTyCon
@@ -780,14 +772,13 @@ anyTyCon = mkFamilyTyCon anyTyConName kind [kKiVar]
anyTypeOfKind :: Kind -> Type
anyTypeOfKind kind = TyConApp anyTyCon [kind]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{SIMD vector types}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
#include "primop-vector-tys.hs-incl"
-\end{code}
diff --git a/compiler/prelude/TysWiredIn.lhs b/compiler/prelude/TysWiredIn.hs
index f4dca9a0de..ccebe539d2 100644
--- a/compiler/prelude/TysWiredIn.lhs
+++ b/compiler/prelude/TysWiredIn.hs
@@ -1,9 +1,9 @@
-%
-% (c) The GRASP Project, Glasgow University, 1994-1998
-%
+{-
+(c) The GRASP Project, Glasgow University, 1994-1998
+
\section[TysWiredIn]{Wired-in knowledge about {\em non-primitive} types}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
-- | This module is about types that can be defined in Haskell, but which
@@ -111,19 +111,18 @@ alpha_tyvar = [alphaTyVar]
alpha_ty :: [Type]
alpha_ty = [alphaTy]
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Wired in type constructors}
-%* *
-%************************************************************************
+* *
+************************************************************************
If you change which things are wired in, make sure you change their
names in PrelNames, so they use wTcQual, wDataQual, etc
+-}
-\begin{code}
-- This list is used only to define PrelInfo.wiredInThings. That in turn
-- is used to initialise the name environment carried around by the renamer.
-- This means that if we look up the name of a TyCon (or its implicit binders)
@@ -156,9 +155,7 @@ wiredInTyCons = [ unitTyCon -- Not treated like other tuples, because
, typeNatKindCon
, typeSymbolKindCon
]
-\end{code}
-\begin{code}
mkWiredInTyConName :: BuiltInSyntax -> Module -> FastString -> Unique -> TyCon -> Name
mkWiredInTyConName built_in modu fs unique tycon
= mkWiredInName modu (mkTcOccFS fs) unique
@@ -228,15 +225,15 @@ listTyCon_RDR = nameRdrName listTyConName
consDataCon_RDR = nameRdrName consDataConName
parrTyCon_RDR = nameRdrName parrTyConName
eqTyCon_RDR = nameRdrName eqTyConName
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{mkWiredInTyCon}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
pcNonRecDataTyCon :: Name -> Maybe CType -> [TyVar] -> [DataCon] -> TyCon
-- Not an enumeration, not promotable
pcNonRecDataTyCon = pcTyCon False NonRecursive False
@@ -293,16 +290,15 @@ pcDataConWithFixity' declared_infix dc_name wrk_key tyvars arg_tys tycon
wrk_occ = mkDataConWorkerOcc (nameOccName dc_name)
wrk_name = mkWiredInName modu wrk_occ wrk_key
(AnId (dataConWorkId data_con)) UserSyntax
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Kinds
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
typeNatKindCon, typeSymbolKindCon :: TyCon
-- data Nat
-- data Symbol
@@ -312,14 +308,13 @@ typeSymbolKindCon = pcTyCon False NonRecursive True typeSymbolKindConName Nothin
typeNatKind, typeSymbolKind :: Kind
typeNatKind = TyConApp (promoteTyCon typeNatKindCon) []
typeSymbolKind = TyConApp (promoteTyCon typeSymbolKindCon) []
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Stuff for dealing with tuples
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [How tuples work] See also Note [Known-key names] in PrelNames
~~~~~~~~~~~~~~~~~~~~~~
@@ -338,10 +333,10 @@ Note [How tuples work] See also Note [Known-key names] in PrelNames
we get the right wired-in name. This guy can't tell the difference
betweeen BoxedTuple and ConstraintTuple (same OccName!), so tuples
are not serialised into interface files using OccNames at all.
+-}
-\begin{code}
isBuiltInOcc_maybe :: OccName -> Maybe Name
--- Built in syntax isn't "in scope" so these OccNames
+-- Built in syntax isn't "in scope" so these OccNames
-- map to wired-in Names with BuiltInSyntax
isBuiltInOcc_maybe occ
= case occNameString occ of
@@ -365,7 +360,7 @@ isBuiltInOcc_maybe occ
tail_matches BoxedTuple ")" = True
tail_matches UnboxedTuple "#)" = True
tail_matches _ _ = False
-
+
choose_ns tc dc
| isTcClsNameSpace ns = Just (getName tc)
| isDataConNameSpace ns = Just (getName dc)
@@ -479,16 +474,15 @@ unboxedPairTyCon :: TyCon
unboxedPairTyCon = tupleTyCon UnboxedTuple 2
unboxedPairDataCon :: DataCon
unboxedPairDataCon = tupleCon UnboxedTuple 2
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[TysWiredIn-boxed-prim]{The ``boxed primitive'' types (@Char@, @Int@, etc)}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
eqTyCon :: TyCon
eqTyCon = mkAlgTyCon eqTyConName
(ForAllTy kv $ mkArrowKinds [k, k] constraintKind)
@@ -537,9 +531,6 @@ coercibleDataCon = pcDataCon coercibleDataConName args [TyConApp eqReprPrimTyCon
coercibleClass :: Class
coercibleClass = mkClass (tyConTyVars coercibleTyCon) [] [] [] [] [] (mkAnd []) coercibleTyCon
-\end{code}
-
-\begin{code}
charTy :: Type
charTy = mkTyConTy charTyCon
@@ -552,9 +543,7 @@ charDataCon = pcDataCon charDataConName [] [charPrimTy] charTyCon
stringTy :: Type
stringTy = mkListTy charTy -- convenience only
-\end{code}
-\begin{code}
intTy :: Type
intTy = mkTyConTy intTyCon
@@ -562,9 +551,7 @@ intTyCon :: TyCon
intTyCon = pcNonRecDataTyCon intTyConName (Just (CType Nothing (fsLit "HsInt"))) [] [intDataCon]
intDataCon :: DataCon
intDataCon = pcDataCon intDataConName [] [intPrimTy] intTyCon
-\end{code}
-\begin{code}
wordTy :: Type
wordTy = mkTyConTy wordTyCon
@@ -572,9 +559,7 @@ wordTyCon :: TyCon
wordTyCon = pcNonRecDataTyCon wordTyConName (Just (CType Nothing (fsLit "HsWord"))) [] [wordDataCon]
wordDataCon :: DataCon
wordDataCon = pcDataCon wordDataConName [] [wordPrimTy] wordTyCon
-\end{code}
-\begin{code}
floatTy :: Type
floatTy = mkTyConTy floatTyCon
@@ -582,9 +567,7 @@ floatTyCon :: TyCon
floatTyCon = pcNonRecDataTyCon floatTyConName (Just (CType Nothing (fsLit "HsFloat"))) [] [floatDataCon]
floatDataCon :: DataCon
floatDataCon = pcDataCon floatDataConName [] [floatPrimTy] floatTyCon
-\end{code}
-\begin{code}
doubleTy :: Type
doubleTy = mkTyConTy doubleTyCon
@@ -593,14 +576,13 @@ doubleTyCon = pcNonRecDataTyCon doubleTyConName (Just (CType Nothing (fsLit "HsD
doubleDataCon :: DataCon
doubleDataCon = pcDataCon doubleDataConName [] [doublePrimTy] doubleTyCon
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[TysWiredIn-Bool]{The @Bool@ type}
-%* *
-%************************************************************************
+* *
+************************************************************************
An ordinary enumeration type, but deeply wired in. There are no
magical operations on @Bool@ (just the regular Prelude code).
@@ -643,8 +625,8 @@ necessarily need to be a straightforwardly boxed version of its
primitive counterpart.
{\em END IDLE SPECULATION BY SIMON}
+-}
-\begin{code}
boolTy :: Type
boolTy = mkTyConTy boolTyCon
@@ -674,13 +656,13 @@ ltDataConId, eqDataConId, gtDataConId :: Id
ltDataConId = dataConWorkId ltDataCon
eqDataConId = dataConWorkId eqDataCon
gtDataConId = dataConWorkId gtDataCon
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[TysWiredIn-List]{The @List@ type (incl ``build'' magic)}
-%* *
-%************************************************************************
+* *
+************************************************************************
Special syntax, deeply wired in, but otherwise an ordinary algebraic
data types:
@@ -690,8 +672,8 @@ data () = ()
data (,) a b = (,,) a b
...
\end{verbatim}
+-}
-\begin{code}
mkListTy :: Type -> Type
mkListTy ty = mkTyConApp listTyCon [ty]
@@ -715,13 +697,13 @@ consDataCon = pcDataConWithFixity True {- Declared infix -}
-- Interesting: polymorphic recursion would help here.
-- We can't use (mkListTy alphaTy) in the defn of consDataCon, else mkListTy
-- gets the over-specific type (Type -> Type)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[TysWiredIn-Tuples]{The @Tuple@ types}
-%* *
-%************************************************************************
+* *
+************************************************************************
The tuple types are definitely magic, because they form an infinite
family.
@@ -762,8 +744,8 @@ There should also be a way to generate the appropriate code for each
of these instances, but (like the info tables and entry code) it is
done by enumeration\srcloc{lib/prelude/InTup?.hs}.
\end{itemize}
+-}
-\begin{code}
mkTupleTy :: TupleSort -> [Type] -> Type
-- Special case for *boxed* 1-tuples, which are represented by the type itself
mkTupleTy sort [ty] | Boxed <- tupleSortBoxity sort = ty
@@ -775,17 +757,17 @@ mkBoxedTupleTy tys = mkTupleTy BoxedTuple tys
unitTy :: Type
unitTy = mkTupleTy BoxedTuple []
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[TysWiredIn-PArr]{The @[::]@ type}
-%* *
-%************************************************************************
+* *
+************************************************************************
Special syntax for parallel arrays needs some wired in definitions.
+-}
-\begin{code}
-- | Construct a type representing the application of the parallel array constructor
mkPArrTy :: Type -> Type
mkPArrTy ty = mkTyConApp parrTyCon [ty]
@@ -848,20 +830,16 @@ mkPArrFakeCon arity = data_con
-- | Checks whether a data constructor is a fake constructor for parallel arrays
isPArrFakeCon :: DataCon -> Bool
isPArrFakeCon dcon = dcon == parrFakeCon (dataConSourceArity dcon)
-\end{code}
-Promoted Booleans
+-- Promoted Booleans
-\begin{code}
promotedBoolTyCon, promotedFalseDataCon, promotedTrueDataCon :: TyCon
promotedBoolTyCon = promoteTyCon boolTyCon
promotedTrueDataCon = promoteDataCon trueDataCon
promotedFalseDataCon = promoteDataCon falseDataCon
-\end{code}
-Promoted Ordering
+-- Promoted Ordering
-\begin{code}
promotedOrderingTyCon
, promotedLTDataCon
, promotedEQDataCon
@@ -871,7 +849,3 @@ promotedOrderingTyCon = promoteTyCon orderingTyCon
promotedLTDataCon = promoteDataCon ltDataCon
promotedEQDataCon = promoteDataCon eqDataCon
promotedGTDataCon = promoteDataCon gtDataCon
-\end{code}
-
-
-
diff --git a/compiler/prelude/TysWiredIn.lhs-boot b/compiler/prelude/TysWiredIn.hs-boot
index 305d82e2b5..309dfa22e1 100644
--- a/compiler/prelude/TysWiredIn.lhs-boot
+++ b/compiler/prelude/TysWiredIn.hs-boot
@@ -1,4 +1,3 @@
-\begin{code}
module TysWiredIn where
import {-# SOURCE #-} TyCon (TyCon)
@@ -8,4 +7,3 @@ import {-# SOURCE #-} TypeRep (Type)
eqTyCon, coercibleTyCon :: TyCon
typeNatKind, typeSymbolKind :: Type
mkBoxedTupleTy :: [Type] -> Type
-\end{code}
diff --git a/compiler/rename/RnBinds.lhs b/compiler/rename/RnBinds.hs
index cdb211259b..1af93f35d2 100644
--- a/compiler/rename/RnBinds.lhs
+++ b/compiler/rename/RnBinds.hs
@@ -1,14 +1,14 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[RnBinds]{Renaming and dependency analysis of bindings}
This module does renaming and dependency analysis on value bindings in
the abstract syntax. It does {\em not} do cycle-checks on class or
type-synonym declarations; those cannot be done at this stage because
they may be affected by renaming (which isn't fully worked out yet).
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module RnBinds (
@@ -53,8 +53,8 @@ import Control.Monad
#if __GLASGOW_HASKELL__ < 709
import Data.Traversable ( traverse )
#endif
-\end{code}
+{-
-- ToDo: Put the annotations into the monad, so that they arrive in the proper
-- place and can be used when complaining.
@@ -82,11 +82,11 @@ within one @MonoBinds@, so that unique-Int plumbing is done explicitly
(heavy monad machinery not needed).
-%************************************************************************
-%* *
-%* naming conventions *
-%* *
-%************************************************************************
+************************************************************************
+* *
+* naming conventions *
+* *
+************************************************************************
\subsection[name-conventions]{Name conventions}
@@ -109,11 +109,11 @@ a set of variables defined in @Exp@ is written @dvExp@
a set of variables free in @Exp@ is written @fvExp@
\end{itemize}
-%************************************************************************
-%* *
-%* analysing polymorphic bindings (HsBindGroup, HsBind)
-%* *
-%************************************************************************
+************************************************************************
+* *
+* analysing polymorphic bindings (HsBindGroup, HsBind)
+* *
+************************************************************************
\subsubsection[dep-HsBinds]{Polymorphic bindings}
@@ -155,13 +155,13 @@ instance declarations. It expects only to see @FunMonoBind@s, and
it expects the global environment to contain bindings for the binders
(which are all class operations).
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsubsection{ Top-level bindings}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- for top-level bindings, we need to make top-level names,
-- so we have a different entry point than for local bindings
rnTopBindsLHS :: MiniFixityEnv
@@ -186,16 +186,15 @@ rnTopBindsBoot (ValBindsIn mbinds sigs)
; (sigs', fvs) <- renameSigs HsBootCtxt sigs
; return (ValBindsOut [] sigs', usesOnly fvs) }
rnTopBindsBoot b = pprPanic "rnTopBindsBoot" (ppr b)
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
HsLocalBinds
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
rnLocalBindsAndThen :: HsLocalBinds RdrName
-> (HsLocalBinds Name -> RnM (result, FreeVars))
-> RnM (result, FreeVars)
@@ -223,16 +222,15 @@ rnIPBind :: IPBind RdrName -> RnM (IPBind Name, FreeVars)
rnIPBind (IPBind ~(Left n) expr) = do
(expr',fvExpr) <- rnLExpr expr
return (IPBind (Left n) expr', fvExpr)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
ValBinds
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- Renaming local binding groups
-- Does duplicate/shadow check
rnLocalValBindsLHS :: MiniFixityEnv
@@ -678,9 +676,8 @@ mkSigTvFn sigs
, L _ name <- names]
-- Note the pattern-match on "Explicit"; we only bind
-- type variables from signatures with an explicit top-level for-all
-\end{code}
-
+{-
@rnMethodBinds@ is used for the method bindings of a class and an instance
declaration. Like @rnBinds@ but without dependency analysis.
@@ -695,8 +692,8 @@ and unless @op@ occurs we won't treat the type signature of @op@ in the class
decl for @Foo@ as a source of instance-decl gates. But we should! Indeed,
in many ways the @op@ in an instance decl is just like an occurrence, not
a binder.
+-}
-\begin{code}
rnMethodBinds :: Name -- Class name
-> (Name -> [Name]) -- Signature tyvar function
-> LHsBinds RdrName
@@ -757,26 +754,24 @@ rnMethodBind _ _ (L loc bind@(PatSynBind {})) = do
return (emptyBag, emptyFVs)
rnMethodBind _ _ b = pprPanic "rnMethodBind" (ppr b)
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection[dep-Sigs]{Signatures (and user-pragmas for values)}
-%* *
-%************************************************************************
+* *
+************************************************************************
@renameSigs@ checks for:
\begin{enumerate}
\item more than one sig for one thing;
\item signatures given for things not bound here;
\end{enumerate}
-%
+
At the moment we don't gather free-var info from the types in
signatures. We'd only need this if we wanted to report unused tyvars.
+-}
-\begin{code}
renameSigs :: HsSigCtxt
-> [LSig RdrName]
-> RnM ([LSig Name], FreeVars)
@@ -946,16 +941,15 @@ checkDupMinimalSigs sigs
= case filter isMinimalLSig sigs of
minSigs@(_:_:_) -> dupMinimalSigErr minSigs
_ -> return ()
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Match}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
rnMatchGroup :: Outputable (body RdrName) => HsMatchContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> MatchGroup RdrName (Located (body RdrName))
@@ -1006,16 +1000,15 @@ resSigErr ctxt match ty
, nest 2 $ ptext (sLit
"Result signatures are no longer supported in pattern matches")
, pprMatchInCtxt ctxt match ]
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{Guarded right-hand sides (GRHSs)}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
rnGRHSs :: HsMatchContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> GRHSs RdrName (Located (body RdrName))
@@ -1051,15 +1044,15 @@ rnGRHS' ctxt rnBody (GRHS guards rhs)
is_standard_guard [] = True
is_standard_guard [L _ (BodyStmt _ _ _ _)] = True
is_standard_guard _ = False
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Error messages}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
dupSigDeclErr :: [(Located RdrName, Sig RdrName)] -> RnM ()
dupSigDeclErr pairs@((L loc name, sig) : _)
= addErrAt loc $
@@ -1113,4 +1106,3 @@ dupMinimalSigErr sigs@(L loc _ : _)
, ptext (sLit "at") <+> vcat (map ppr $ sort $ map getLoc sigs)
, ptext (sLit "Combine alternative minimal complete definitions with `|'") ]
dupMinimalSigErr [] = panic "dupMinimalSigErr"
-\end{code}
diff --git a/compiler/rename/RnEnv.lhs b/compiler/rename/RnEnv.hs
index 7e096c0648..0cea309208 100644
--- a/compiler/rename/RnEnv.lhs
+++ b/compiler/rename/RnEnv.hs
@@ -1,9 +1,9 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-2006
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-2006
+
\section[RnEnv]{Environment manipulation for the renamer monad}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module RnEnv (
@@ -24,12 +24,12 @@ module RnEnv (
lookupSubBndrGREs, lookupConstructorFields,
lookupSyntaxName, lookupSyntaxNames, lookupIfThenElse,
lookupGreRn, lookupGreRn_maybe,
- lookupGreLocalRn_maybe,
+ lookupGreLocalRn_maybe,
getLookupOccRn, addUsedRdrNames,
newLocalBndrRn, newLocalBndrsRn,
bindLocalNames, bindLocalNamesFV,
- MiniFixityEnv,
+ MiniFixityEnv,
addLocalFixities,
bindLocatedLocalsFV, bindLocatedLocalsRn,
extendTyVarEnvFVRn,
@@ -76,13 +76,13 @@ import Control.Monad
import Data.List
import qualified Data.Set as Set
import Constants ( mAX_TUPLE_SIZE )
-\end{code}
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
Source-code binders
-%* *
-%*********************************************************
+* *
+*********************************************************
Note [Signature lazy interface loading]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -141,8 +141,8 @@ warning until you use the identifier further downstream. This would
require adjusting addUsedRdrName so that during signature compilation,
we do not report deprecation warnings for LocalDef. See also
Note [Handling of deprecations]
+-}
-\begin{code}
newTopSrcBinder :: Located RdrName -> RnM Name
newTopSrcBinder (L loc rdr_name)
| Just name <- isExact_maybe rdr_name
@@ -232,13 +232,13 @@ newTopSrcBinder (L loc rdr_name)
-- Normal case
do { this_mod <- getModule
; newGlobalBinder this_mod (rdrNameOcc rdr_name) loc } }
-\end{code}
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
Source code occurrences
-%* *
-%*********************************************************
+* *
+*********************************************************
Looking up a name in the RnEnv.
@@ -253,8 +253,8 @@ The latter two mean that we are not just looking for a
*syntactically-infix* declaration, but one that uses an operator
OccName. We use OccName.isSymOcc to detect that case, which isn't
terribly efficient, but there seems to be no better way.
+-}
-\begin{code}
lookupTopBndrRn :: RdrName -> RnM Name
lookupTopBndrRn n = do nopt <- lookupTopBndrRn_maybe n
case nopt of
@@ -366,7 +366,7 @@ lookupExactOcc_either name
[gre] -> return (Right (gre_name gre))
_ -> return (Left dup_nm_err)
- -- We can get more than one GRE here, if there are multiple
+ -- We can get more than one GRE here, if there are multiple
-- bindings for the same name. Sometimes they are caught later
-- by findLocalDupsRdrEnv, like in this example (Trac #8932):
-- $( [d| foo :: a->a; foo x = x |])
@@ -528,8 +528,8 @@ lookupSubBndrGREs env parent rdr_name
parent_is p (GRE { gre_par = ParentIs p' }) = p == p'
parent_is _ _ = False
-\end{code}
+{-
Note [Family instance binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
@@ -643,8 +643,8 @@ we'll miss the fact that the qualified import is redundant.
--------------------------------------------------
-- Occurrences
--------------------------------------------------
+-}
-\begin{code}
getLookupOccRn :: RnM (Name -> Maybe Name)
getLookupOccRn
= do local_env <- getLocalRdrEnv
@@ -707,8 +707,8 @@ lookup_demoted rdr_name
where
suggest_dk = ptext (sLit "A data constructor of that name is in scope; did you mean DataKinds?")
-\end{code}
+{-
Note [Demotion]
~~~~~~~~~~~~~~~
When the user writes:
@@ -725,8 +725,8 @@ its namespace to DataName and do a second lookup.
The final result (after the renamer) will be:
HsTyVar ("Zero", DataName)
+-}
-\begin{code}
-- Use this version to get tracing
--
-- lookupOccRn_maybe, lookupOccRn_maybe' :: RdrName -> RnM (Maybe Name)
@@ -827,13 +827,13 @@ lookupGreRn_help rdr_name lookup
; return (Just gre) }
gres -> do { addNameClashErrRn rdr_name gres
; return (Just (head gres)) } }
-\end{code}
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
Deprecations
-%* *
-%*********************************************************
+* *
+*********************************************************
Note [Handling of deprecations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -850,8 +850,8 @@ Note [Handling of deprecations]
- the ".." completion for records
- the ".." in an export item 'T(..)'
- the things exported by a module export 'module M'
+-}
-\begin{code}
addUsedRdrName :: Bool -> GlobalRdrElt -> RdrName -> RnM ()
-- Record usage of imported RdrNames
addUsedRdrName warnIfDeprec gre rdr
@@ -903,8 +903,8 @@ lookupImpDeprec iface gre
case gre_par gre of -- or its parent, is warn'd
ParentIs p -> mi_warn_fn iface p
NoParent -> Nothing
-\end{code}
+{-
Note [Used names with interface not loaded]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's (just) possible to find a used
@@ -925,11 +925,11 @@ In both cases we simply don't permit deprecations;
this is, after all, wired-in stuff.
-%*********************************************************
-%* *
+*********************************************************
+* *
GHCi support
-%* *
-%*********************************************************
+* *
+*********************************************************
A qualified name on the command line can refer to any module at
all: we try to load the interface if we don't already have it, just
@@ -945,8 +945,8 @@ Note [Safe Haskell and GHCi]
We DONT do this Safe Haskell as we need to check imports. We can
and should instead check the qualified import but at the moment
this requires some refactoring so leave as a TODO
+-}
-\begin{code}
lookupQualifiedNameGHCi :: DynFlags -> Bool -> RdrName -> RnM (Maybe Name)
lookupQualifiedNameGHCi dflags is_ghci rdr_name
| Just (mod,occ) <- isQual_maybe rdr_name
@@ -974,8 +974,8 @@ lookupQualifiedNameGHCi dflags is_ghci rdr_name
= return Nothing
where
doc = ptext (sLit "Need to find") <+> ppr rdr_name
-\end{code}
+{-
Note [Looking up signature names]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
lookupSigOccRn is used for type signatures and pragmas
@@ -1016,8 +1016,8 @@ data HsSigCtxt = ... | TopSigCtxt NameSet Bool | ....
f :: C a => a -> a -- No, not ok
class C a where
f :: a -> a
+-}
-\begin{code}
data HsSigCtxt
= TopSigCtxt NameSet Bool -- At top level, binding these names
-- See Note [Signatures for top level things]
@@ -1137,8 +1137,8 @@ dataTcOccs rdr_name
where
occ = rdrNameOcc rdr_name
rdr_name_tc = setRdrNameSpace rdr_name tcName
-\end{code}
+{-
Note [dataTcOccs and Exact Names]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Exact RdrNames can occur in code generated by Template Haskell, and generally
@@ -1155,11 +1155,11 @@ the list type constructor.
Note that setRdrNameSpace on an Exact name requires the Name to be External,
which it always is for built in syntax.
-%*********************************************************
-%* *
+*********************************************************
+* *
Fixities
-%* *
-%*********************************************************
+* *
+*********************************************************
Note [Fixity signature lookup]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1180,8 +1180,8 @@ well as the original namespace.
The extended lookup is also used in other places, like resolution of
deprecation declarations, and lookup of names in GHCi.
+-}
-\begin{code}
--------------------------------
type MiniFixityEnv = FastStringEnv (Located Fixity)
-- Mini fixity env for the names we're about
@@ -1208,8 +1208,8 @@ addLocalFixities mini_fix_env names thing_inside
Nothing -> Nothing
where
occ = nameOccName name
-\end{code}
+{-
--------------------------------
lookupFixity is a bit strange.
@@ -1223,12 +1223,12 @@ lookupFixity is a bit strange.
or Local/Exported (everything else)
(See notes with RnNames.getLocalDeclBinders for why we have this split.)
We put them all in the local fixity environment
+-}
-\begin{code}
lookupFixityRn :: Name -> RnM Fixity
lookupFixityRn name
| isUnboundName name
- = return (Fixity minPrecedence InfixL)
+ = return (Fixity minPrecedence InfixL)
-- Minimise errors from ubound names; eg
-- a>0 `foo` b>0
-- where 'foo' is not in scope, should not give an error (Trac #7937)
@@ -1274,10 +1274,9 @@ lookupFixityRn name
lookupTyFixityRn :: Located Name -> RnM Fixity
lookupTyFixityRn (L _ n) = lookupFixityRn n
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Rebindable names
Dealing with rebindable syntax is driven by the
Opt_RebindableSyntax dynamic flag.
@@ -1285,8 +1284,8 @@ lookupTyFixityRn (L _ n) = lookupFixityRn n
In "deriving" code we don't want to use rebindable syntax
so we switch off the flag locally
-%* *
-%************************************************************************
+* *
+************************************************************************
Haskell 98 says that when you say "3" you get the "fromInteger" from the
Standard Prelude, regardless of what is in scope. However, to experiment
@@ -1314,8 +1313,8 @@ name if Opt_NoImplicitPrelude is on. That is what lookupSyntaxName does.
We treat the orignal (standard) names as free-vars too, because the type checker
checks the type of the user thing against the type of the standard thing.
+-}
-\begin{code}
lookupIfThenElse :: RnM (Maybe (SyntaxExpr Name), FreeVars)
-- Different to lookupSyntaxName because in the non-rebindable
-- case we desugar directly rather than calling an existing function
@@ -1331,7 +1330,7 @@ lookupSyntaxName :: Name -- The standard name
-> RnM (SyntaxExpr Name, FreeVars) -- Possibly a non-standard name
lookupSyntaxName std_name
= do { rebindable_on <- xoptM Opt_RebindableSyntax
- ; if not rebindable_on then
+ ; if not rebindable_on then
return (HsVar std_name, emptyFVs)
else
-- Get the similarly named thing from the local environment
@@ -1342,21 +1341,20 @@ lookupSyntaxNames :: [Name] -- Standard names
-> RnM ([HsExpr Name], FreeVars) -- See comments with HsExpr.ReboundNames
lookupSyntaxNames std_names
= do { rebindable_on <- xoptM Opt_RebindableSyntax
- ; if not rebindable_on then
+ ; if not rebindable_on then
return (map HsVar std_names, emptyFVs)
else
do { usr_names <- mapM (lookupOccRn . mkRdrUnqual . nameOccName) std_names
; return (map HsVar usr_names, mkFVs usr_names) } }
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Binding}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
newLocalBndrRn :: Located RdrName -> RnM Name
-- Used for non-top-level binders. These should
-- never be qualified.
@@ -1496,16 +1494,15 @@ checkShadowedOccs (global_env,local_env) get_loc_occ ns
; return (gre_name gre `elemNameSet` fld_set) }
| otherwise = do { sel_id <- tcLookupField (gre_name gre)
; return (isRecordSelector sel_id) }
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
What to do when a lookup fails
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data WhereLooking = WL_Any -- Any binding
| WL_Global -- Any top-level binding (local or imported)
| WL_LocalTop -- Any top-level binding in this module
@@ -1656,15 +1653,15 @@ unknownNameSuggestErr where_look tried_rdr_name
quals_only _ LocalDef = []
quals_only occ (Imported is) = [ (mkRdrQual (is_as ispec) occ, Right ispec)
| i <- is, let ispec = is_decl i, is_qual ispec ]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Free variable manipulation}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- A useful utility
addFvRn :: FreeVars -> RnM (thing, FreeVars) -> RnM (thing, FreeVars)
addFvRn fvs1 thing_inside = do { (res, fvs2) <- thing_inside
@@ -1689,16 +1686,15 @@ mapFvRnCPS _ [] cont = cont []
mapFvRnCPS f (x:xs) cont = f x $ \ x' ->
mapFvRnCPS f xs $ \ xs' ->
cont (x':xs')
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Envt utility functions}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
warnUnusedTopBinds :: [GlobalRdrElt] -> RnM ()
warnUnusedTopBinds gres
= whenWOptM Opt_WarnUnusedBinds
@@ -1765,9 +1761,7 @@ addUnusedWarning name span msg
sep [msg <> colon,
nest 2 $ pprNonVarNameSpace (occNameSpace (nameOccName name))
<+> quotes (ppr name)]
-\end{code}
-\begin{code}
addNameClashErrRn :: RdrName -> [GlobalRdrElt] -> RnM ()
addNameClashErrRn rdr_name gres
| all isLocalGRE gres -- If there are two or more *local* defns, we'll have reported
@@ -1834,16 +1828,14 @@ checkTupSize tup_size
= addErr (sep [ptext (sLit "A") <+> int tup_size <> ptext (sLit "-tuple is too large for GHC"),
nest 2 (parens (ptext (sLit "max size is") <+> int mAX_TUPLE_SIZE)),
nest 2 (ptext (sLit "Workaround: use nested tuples or define a data type"))])
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Contexts for renaming errors}
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
data HsDocContext
= TypeSigCtx SDoc
@@ -1893,4 +1885,3 @@ docOfHsDocContext GHCiCtx = ptext (sLit "In GHCi input")
docOfHsDocContext (SpliceTypeCtx hs_ty) = ptext (sLit "In the spliced type") <+> ppr hs_ty
docOfHsDocContext ClassInstanceCtx = ptext (sLit "TcSplice.reifyInstances")
docOfHsDocContext (VectDeclCtx tycon) = ptext (sLit "In the VECTORISE pragma for type constructor") <+> quotes (ppr tycon)
-\end{code}
diff --git a/compiler/rename/RnExpr.lhs b/compiler/rename/RnExpr.hs
index 533cdcdef5..a0b5a1537c 100644
--- a/compiler/rename/RnExpr.lhs
+++ b/compiler/rename/RnExpr.hs
@@ -1,6 +1,6 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[RnExpr]{Renaming of expressions}
Basically dependency analysis.
@@ -8,8 +8,8 @@ Basically dependency analysis.
Handles @Match@, @GRHSs@, @HsExpr@, and @Qualifier@ datatypes. In
general, all of these functions return a renamed thing, and a set of
free variables.
+-}
-\begin{code}
{-# LANGUAGE CPP, ScopedTypeVariables #-}
module RnExpr (
@@ -46,15 +46,15 @@ import SrcLoc
import FastString
import Control.Monad
import TysWiredIn ( nilDataConName )
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{Expressions}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
rnExprs :: [LHsExpr RdrName] -> RnM ([LHsExpr Name], FreeVars)
rnExprs ls = rnExprs' ls emptyUniqSet
where
@@ -66,11 +66,9 @@ rnExprs ls = rnExprs' ls emptyUniqSet
; let acc' = acc `plusFV` fvExpr
; (exprs', fvExprs) <- acc' `seq` rnExprs' exprs acc'
; return (expr':exprs', fvExprs) }
-\end{code}
-Variables. We look up the variable and return the resulting name.
+-- Variables. We look up the variable and return the resulting name.
-\begin{code}
rnLExpr :: LHsExpr RdrName -> RnM (LHsExpr Name, FreeVars)
rnLExpr = wrapLocFstM rnExpr
@@ -294,26 +292,26 @@ rnExpr (ArithSeq _ _ seq)
rnExpr (PArrSeq _ seq)
= do { (new_seq, fvs) <- rnArithSeq seq
; return (PArrSeq noPostTcExpr new_seq, fvs) }
-\end{code}
+{-
These three are pattern syntax appearing in expressions.
Since all the symbols are reservedops we can simply reject them.
We return a (bogus) EWildPat in each case.
+-}
-\begin{code}
rnExpr EWildPat = return (hsHoleExpr, emptyFVs)
rnExpr e@(EAsPat {}) = patSynErr e
rnExpr e@(EViewPat {}) = patSynErr e
rnExpr e@(ELazyPat {}) = patSynErr e
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Arrow notation
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
rnExpr (HsProc pat body)
= newArrowScope $
rnPat ProcExpr pat $ \ pat' -> do
@@ -354,15 +352,15 @@ rnSection section@(SectionL expr op)
; return (SectionL expr' op', fvs_op `plusFV` fvs_expr) }
rnSection other = pprPanic "rnSection" (ppr other)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Records
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
rnHsRecBinds :: HsRecFieldContext -> HsRecordBinds RdrName
-> RnM (HsRecordBinds Name, FreeVars)
rnHsRecBinds ctxt rec_binds@(HsRecFields { rec_dotdot = dd })
@@ -373,16 +371,15 @@ rnHsRecBinds ctxt rec_binds@(HsRecFields { rec_dotdot = dd })
where
rn_field (L l fld) = do { (arg', fvs) <- rnLExpr (hsRecFieldArg fld)
; return (L l (fld { hsRecFieldArg = arg' }), fvs) }
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Arrow commands
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
rnCmdArgs :: [LHsCmdTop RdrName] -> RnM ([LHsCmdTop Name], FreeVars)
rnCmdArgs [] = return ([], emptyFVs)
rnCmdArgs (arg:args)
@@ -546,16 +543,15 @@ methodNamesStmt (ParStmt {}) = emptyFVs
methodNamesStmt (TransStmt {}) = emptyFVs
-- ParStmt and TransStmt can't occur in commands, but it's not convenient to error
-- here so we just do what's convenient
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Arithmetic sequences
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
rnArithSeq :: ArithSeqInfo RdrName -> RnM (ArithSeqInfo Name, FreeVars)
rnArithSeq (From expr)
= do { (expr', fvExpr) <- rnLExpr expr
@@ -577,15 +573,15 @@ rnArithSeq (FromThenTo expr1 expr2 expr3)
; (expr3', fvExpr3) <- rnLExpr expr3
; return (FromThenTo expr1' expr2' expr3',
plusFVs [fvExpr1, fvExpr2, fvExpr3]) }
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{@Stmt@s: in @do@ expressions}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
rnStmts :: Outputable (body RdrName) => HsStmtContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> [LStmt RdrName (Located (body RdrName))]
@@ -791,8 +787,8 @@ lookupStmtName ctxt n
where
rebindable = lookupSyntaxName n
not_rebindable = return (HsVar n, emptyFVs)
-\end{code}
+{-
Note [Renaming parallel Stmts]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Renaming parallel statements is painful. Given, say
@@ -811,13 +807,13 @@ To satisfy (a) we nest the segements.
To satisfy (b) we check for duplicates just before thing_inside.
To satisfy (c) we reset the LocalRdrEnv each time.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsubsection{mdo expressions}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type FwdRefs = NameSet
type Segment stmts = (Defs,
Uses, -- May include defs
@@ -986,7 +982,7 @@ rn_rec_stmts rnBody bndrs stmts
; return (concat segs_s) }
---------------------------------------------
-segmentRecStmts :: HsStmtContext Name
+segmentRecStmts :: HsStmtContext Name
-> Stmt Name body
-> [Segment (LStmt Name body)] -> FreeVars
-> ([LStmt Name body], FreeVars)
@@ -1039,8 +1035,8 @@ addFwdRefs segs
all_defs = later_defs `unionNameSet` defs
new_fwds = fwds `unionNameSet` (uses `intersectNameSet` later_defs)
-- Add the downstream fwd refs here
-\end{code}
+{-
Note [Segmenting mdo]
~~~~~~~~~~~~~~~~~~~~~
NB. June 7 2012: We only glom segments that appear in an explicit mdo;
@@ -1082,8 +1078,8 @@ glom it together with the first two groups
{ rec { x <- ...y...; p <- z ; y <- ...x... ;
q <- x ; z <- y } ;
r <- x }
+-}
-\begin{code}
glomSegments :: HsStmtContext Name -> [Segment (LStmt Name body)] -> [Segment [LStmt Name body]]
-- See Note [Glomming segments]
@@ -1132,15 +1128,15 @@ segsToStmts empty_rec_stmt ((defs, uses, fwds, ss) : segs) fvs_later
non_rec = isSingleton ss && isEmptyNameSet fwds
used_later = defs `intersectNameSet` later_uses
-- The ones needed after the RecStmt
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{Errors}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
checkEmptyStmts :: HsStmtContext Name -> RnM ()
-- We've seen an empty sequence of Stmts... is that ok?
checkEmptyStmts ctxt
@@ -1309,4 +1305,3 @@ badIpBinds :: Outputable a => SDoc -> a -> SDoc
badIpBinds what binds
= hang (ptext (sLit "Implicit-parameter bindings illegal in") <+> what)
2 (ppr binds)
-\end{code}
diff --git a/compiler/rename/RnExpr.lhs-boot b/compiler/rename/RnExpr.hs-boot
index 0a00a9e2bc..5419870d38 100644
--- a/compiler/rename/RnExpr.lhs-boot
+++ b/compiler/rename/RnExpr.hs-boot
@@ -1,4 +1,3 @@
-\begin{code}
module RnExpr where
import HsSyn
import Name ( Name )
@@ -12,10 +11,8 @@ rnLExpr :: LHsExpr RdrName
-> RnM (LHsExpr Name, FreeVars)
rnStmts :: --forall thing body.
- Outputable (body RdrName) => HsStmtContext Name
+ Outputable (body RdrName) => HsStmtContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> [LStmt RdrName (Located (body RdrName))]
-> ([Name] -> RnM (thing, FreeVars))
- -> RnM (([LStmt Name (Located (body Name))], thing), FreeVars)
-\end{code}
-
+ -> RnM (([LStmt Name (Located (body Name))], thing), FreeVars)
diff --git a/compiler/rename/RnNames.lhs b/compiler/rename/RnNames.hs
index 02a45d0db8..bff2ed0f29 100644
--- a/compiler/rename/RnNames.lhs
+++ b/compiler/rename/RnNames.hs
@@ -1,9 +1,9 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[RnNames]{Extracting imported and top-level names in scope}
+-}
-\begin{code}
{-# LANGUAGE CPP, NondecreasingIndentation #-}
module RnNames (
@@ -48,14 +48,13 @@ import Data.List ( partition, (\\), find )
import qualified Data.Set as Set
import System.FilePath ((</>))
import System.IO
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{rnImports}
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Tracking Trust Transitively]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -135,9 +134,8 @@ compilation errors in code that doesn't do anything with Safe Haskell simply
because they are using the network package. They will have to call 'ghc-pkg
trust network' to get everything working. Due to this invasive nature of going
with yes we have gone with no for now.
+-}
-
-\begin{code}
-- | Process Import Decls
-- Do the non SOURCE ones first, so that we get a helpful warning for SOURCE
-- ones that are unnecessary
@@ -357,14 +355,13 @@ warnRedundantSourceImport :: ModuleName -> SDoc
warnRedundantSourceImport mod_name
= ptext (sLit "Unnecessary {-# SOURCE #-} in the import of module")
<+> quotes (ppr mod_name)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{importsFromLocalDecls}
-%* *
-%************************************************************************
+* *
+************************************************************************
From the top-level declarations of this module produce
* the lexical environment
@@ -411,8 +408,8 @@ top level binders specially in two ways
stage. This is a slight hack, because the stage field was really
meant for the type checker, and here we are not interested in the
fields of Brack, hence the error thunks in thRnBrack.
+-}
-\begin{code}
extendGlobalRdrEnvRn :: [AvailInfo]
-> MiniFixityEnv
-> RnM (TcGblEnv, TcLclEnv)
@@ -473,14 +470,14 @@ extendGlobalRdrEnvRn avails new_fixities
= fix_env
where
occ = nameOccName name
-\end{code}
+{-
@getLocalDeclBinders@ returns the names for an @HsDecl@. It's
used for source code.
*** See "THE NAMING STORY" in HsDecls ****
+-}
-\begin{code}
getLocalNonValBinders :: MiniFixityEnv -> HsGroup RdrName
-> RnM ((TcGblEnv, TcLclEnv), NameSet)
-- Get all the top-level binders bound the group *except*
@@ -568,8 +565,8 @@ getLocalNonValBinders fixity_env
; sub_names <- mapM newTopSrcBinder (hsDataFamInstBinders ti_decl)
; return (AvailTC (unLoc main_name) sub_names) }
-- main_name is not bound here!
-\end{code}
+{-
Note [Looking up family names in family instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
@@ -588,21 +585,21 @@ Solution is simple: process the type family declarations first, extend
the environment, and then process the type instances.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Filtering imports}
-%* *
-%************************************************************************
+* *
+************************************************************************
@filterImports@ takes the @ExportEnv@ telling what the imported module makes
available, and filters it through the import spec (if any).
Note [Dealing with imports]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
-For import M( ies ), we take the mi_exports of M, and make
- imp_occ_env :: OccEnv (Name, AvailInfo, Maybe Name)
+For import M( ies ), we take the mi_exports of M, and make
+ imp_occ_env :: OccEnv (Name, AvailInfo, Maybe Name)
One entry for each Name that M exports; the AvailInfo describes just
-that Name.
+that Name.
The situation is made more complicated by associated types. E.g.
module M where
@@ -619,8 +616,8 @@ From this we construct the imp_occ_env
Note that the imp_occ_env will have entries for data constructors too,
although we never look up data constructors.
+-}
-\begin{code}
filterImports
:: [ModIface]
-> ImpDeclSpec -- The span for the entire import decl
@@ -756,7 +753,7 @@ filterImports ifaces decl_spec (Just (want_hiding, L l import_items))
(name, AvailTC _ ns, mb_parent) <- lookup_name rdr_tc
-- Look up the children in the sub-names of the parent
- let subnames = case ns of -- The tc is first in ns,
+ let subnames = case ns of -- The tc is first in ns,
[] -> [] -- if it is there at all
-- See the AvailTC Invariant in Avail.hs
(n1:ns1) | n1 == name -> ns1
@@ -814,15 +811,15 @@ catchIELookup m h = case m of
catIELookupM :: [IELookupM a] -> [a]
catIELookupM ms = [ a | Succeeded a <- ms ]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Import/Export Utils}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
greExportAvail :: GlobalRdrElt -> AvailInfo
greExportAvail gre
= case gre_par gre of
@@ -914,14 +911,13 @@ nubAvails :: [AvailInfo] -> [AvailInfo]
nubAvails avails = nameEnvElts (foldl add emptyNameEnv avails)
where
add env avail = extendNameEnv_C plusAvail env (availName avail) avail
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Export list processing}
-%* *
-%************************************************************************
+* *
+************************************************************************
Processing the export list.
@@ -961,8 +957,8 @@ At one point I implemented a compromise:
But the compromise seemed too much of a hack, so we backed it out.
You just have to use an explicit export list:
module M( F(..) ) where ...
+-}
-\begin{code}
type ExportAccum -- The type of the accumulating parameter of
-- the main worker function in rnExports
= ([LIE Name], -- Export items with Names
@@ -1262,16 +1258,15 @@ dupExport_ok n ie1 ie2
single (IEVar {}) = True
single (IEThingAbs {}) = True
single _ = False
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Unused names}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
reportUnusedNames :: Maybe (Located [LIE RdrName]) -- Export list
-> TcGblEnv -> RnM ()
reportUnusedNames _export_decls gbl_env
@@ -1313,26 +1308,24 @@ reportUnusedNames _export_decls gbl_env
unused_locals = filter is_unused_local defined_but_not_used
is_unused_local :: GlobalRdrElt -> Bool
is_unused_local gre = isLocalGRE gre && isExternalName (gre_name gre)
-\end{code}
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Unused imports}
-%* *
-%*********************************************************
+* *
+*********************************************************
This code finds which import declarations are unused. The
specification and implementation notes are here:
http://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/UnusedImports
+-}
-\begin{code}
type ImportDeclUsage
= ( LImportDecl Name -- The import declaration
, [AvailInfo] -- What *is* used (normalised)
, [Name] ) -- What is imported but *not* used
-\end{code}
-\begin{code}
warnUnusedImportDecls :: TcGblEnv -> RnM ()
warnUnusedImportDecls gbl_env
= do { uses <- readMutVar (tcg_used_rdrnames gbl_env)
@@ -1352,9 +1345,8 @@ warnUnusedImportDecls gbl_env
; whenGOptM Opt_D_dump_minimal_imports $
printMinimalImports usage }
-\end{code}
-
+{-
Note [The ImportMap]
~~~~~~~~~~~~~~~~~~~~
The ImportMap is a short-lived intermediate data struture records, for
@@ -1374,8 +1366,8 @@ It's just a cheap hack; we could equally well use the Span too.
The AvailInfos are the things imported from that decl (just a list,
not normalised).
+-}
-\begin{code}
type ImportMap = Map SrcLoc [AvailInfo] -- See [The ImportMap]
findImportUsage :: [LImportDecl Name]
@@ -1462,9 +1454,7 @@ extendImportMap rdr_env rdr imp_map
isImpAll :: ImportSpec -> Bool
isImpAll (ImpSpec { is_item = ImpAll }) = True
isImpAll _other = False
-\end{code}
-\begin{code}
warnUnusedImport :: ImportDeclUsage -> RnM ()
warnUnusedImport (L loc decl, used, unused)
| Just (False,L _ []) <- ideclHiding decl
@@ -1491,8 +1481,8 @@ warnUnusedImport (L loc decl, used, unused)
| otherwise = Outputable.empty
pp_mod = ppr (unLoc (ideclName decl))
pp_not_used = text "is redundant"
-\end{code}
+{-
Note [Do not warn about Prelude hiding]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We do not warn about
@@ -1513,8 +1503,8 @@ decls, and simply trim their import lists. NB that
* We do not disard a decl altogether; we might need instances
from it. Instead we just trim to an empty import list
+-}
-\begin{code}
printMinimalImports :: [ImportDeclUsage] -> RnM ()
-- See Note [Printing minimal imports]
printMinimalImports imports_w_usage
@@ -1571,8 +1561,8 @@ printMinimalImports imports_w_usage
_other -> map (IEVar . noLoc) ns
where
all_used avail_occs = all (`elem` ns) avail_occs
-\end{code}
+{-
Note [Partial export]
~~~~~~~~~~~~~~~~~~~~~
Suppose we have
@@ -1593,13 +1583,13 @@ which we would usually generate if C was exported from B. Hence
the (x `elem` xs) test when deciding what to generate.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Errors}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
qualImportItemErr :: RdrName -> SDoc
qualImportItemErr rdr
= hang (ptext (sLit "Illegal qualified name in import item:"))
@@ -1789,4 +1779,3 @@ checkConName name = checkErr (isRdrDataCon name) (badDataCon name)
badDataCon :: RdrName -> SDoc
badDataCon name
= hsep [ptext (sLit "Illegal data constructor name"), quotes (ppr name)]
-\end{code}
diff --git a/compiler/rename/RnPat.lhs b/compiler/rename/RnPat.hs
index 90002d8b7e..160f9ad2d1 100644
--- a/compiler/rename/RnPat.lhs
+++ b/compiler/rename/RnPat.hs
@@ -1,6 +1,6 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[RnPat]{Renaming of patterns}
Basically dependency analysis.
@@ -8,8 +8,8 @@ Basically dependency analysis.
Handles @Match@, @GRHSs@, @HsExpr@, and @Qualifier@ datatypes. In
general, all of these functions return a renamed thing, and a set of
free variables.
+-}
-\begin{code}
{-# LANGUAGE CPP, RankNTypes, ScopedTypeVariables #-}
module RnPat (-- main entry points
@@ -40,7 +40,7 @@ import {-# SOURCE #-} TcSplice ( runQuasiQuotePat )
#include "HsVersions.h"
-import HsSyn
+import HsSyn
import TcRnMonad
import TcHsSyn ( hsOverLitName )
import RnEnv
@@ -65,14 +65,13 @@ import TysWiredIn ( nilDataCon )
import DataCon ( dataConName )
import Control.Monad ( when, liftM, ap )
import Data.Ratio
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
The CpsRn Monad
-%* *
-%*********************************************************
+* *
+*********************************************************
Note [CpsRn monad]
~~~~~~~~~~~~~~~~~~
@@ -85,17 +84,17 @@ style of programming:
where rs::[RdrName], ns::[Name]
-The idea is that '...blah...'
+The idea is that '...blah...'
a) sees the bindings of ns
b) returns the free variables it mentions
so that bindNames can report unused ones
-In particular,
+In particular,
mapM rnPatAndThen [p1, p2, p3]
-has a *left-to-right* scoping: it makes the binders in
+has a *left-to-right* scoping: it makes the binders in
p1 scope over p2,p3.
+-}
-\begin{code}
newtype CpsRn b = CpsRn { unCpsRn :: forall r. (b -> RnM (r, FreeVars))
-> RnM (r, FreeVars) }
-- See Note [CpsRn monad]
@@ -125,19 +124,19 @@ liftCpsFV rn_thing = CpsRn (\k -> do { (v,fvs1) <- rn_thing
wrapSrcSpanCps :: (a -> CpsRn b) -> Located a -> CpsRn (Located b)
-- Set the location, and also wrap it around the value returned
wrapSrcSpanCps fn (L loc a)
- = CpsRn (\k -> setSrcSpan loc $
- unCpsRn (fn a) $ \v ->
+ = CpsRn (\k -> setSrcSpan loc $
+ unCpsRn (fn a) $ \v ->
k (L loc v))
lookupConCps :: Located RdrName -> CpsRn (Located Name)
-lookupConCps con_rdr
+lookupConCps con_rdr
= CpsRn (\k -> do { con_name <- lookupLocatedOccRn con_rdr
; (r, fvs) <- k con_name
; return (r, addOneFV fvs (unLoc con_name)) })
-- We add the constructor name to the free vars
-- See Note [Patterns are uses]
-\end{code}
+{-
Note [Patterns are uses]
~~~~~~~~~~~~~~~~~~~~~~~~
Consider
@@ -165,20 +164,20 @@ where we don't know yet whether P2 is a constructor or a pattern
synonym. So for now, we do report conid occurrences in patterns as
uses.
-%*********************************************************
-%* *
+*********************************************************
+* *
Name makers
-%* *
-%*********************************************************
+* *
+*********************************************************
Externally abstract type of name makers,
which is how you go from a RdrName to a Name
+-}
-\begin{code}
-data NameMaker
- = LamMk -- Lambdas
+data NameMaker
+ = LamMk -- Lambdas
Bool -- True <=> report unused bindings
- -- (even if True, the warning only comes out
+ -- (even if True, the warning only comes out
-- if -fwarn-unused-matches is on)
| LetMk -- Let bindings, incl top level
@@ -194,7 +193,7 @@ isTopRecNameMaker (LetMk TopLevel _) = True
isTopRecNameMaker _ = False
localRecNameMaker :: MiniFixityEnv -> NameMaker
-localRecNameMaker fix_env = LetMk NotTopLevel fix_env
+localRecNameMaker fix_env = LetMk NotTopLevel fix_env
matchNameMaker :: HsMatchContext a -> NameMaker
matchNameMaker ctxt = LamMk report_unused
@@ -210,19 +209,19 @@ matchNameMaker ctxt = LamMk report_unused
rnHsSigCps :: HsWithBndrs RdrName (LHsType RdrName)
-> CpsRn (HsWithBndrs Name (LHsType Name))
-rnHsSigCps sig
+rnHsSigCps sig
= CpsRn (rnHsBndrSig PatCtx sig)
newPatName :: NameMaker -> Located RdrName -> CpsRn Name
newPatName (LamMk report_unused) rdr_name
- = CpsRn (\ thing_inside ->
+ = CpsRn (\ thing_inside ->
do { name <- newLocalBndrRn rdr_name
; (res, fvs) <- bindLocalNames [name] (thing_inside name)
; when report_unused $ warnUnusedMatches [name] fvs
; return (res, name `delFV` fvs) })
newPatName (LetMk is_top fix_env) rdr_name
- = CpsRn (\ thing_inside ->
+ = CpsRn (\ thing_inside ->
do { name <- case is_top of
NotTopLevel -> newLocalBndrRn rdr_name
TopLevel -> newTopSrcBinder rdr_name
@@ -230,15 +229,15 @@ newPatName (LetMk is_top fix_env) rdr_name
-- See Note [View pattern usage]
addLocalFixities fix_env [name] $
thing_inside name })
-
+
-- Note: the bindLocalNames is somewhat suspicious
-- because it binds a top-level name as a local name.
-- however, this binding seems to work, and it only exists for
-- the duration of the patterns and the continuation;
-- then the top-level name is added to the global env
-- before going on to the RHSes (see RnSource.lhs).
-\end{code}
+{-
Note [View pattern usage]
~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
@@ -248,28 +247,28 @@ We want to "see" this use, and in let-bindings we collect all uses and
report unused variables at the binding level. So we must use bindLocalNames
here, *not* bindLocalNameFV. Trac #3943.
-%*********************************************************
-%* *
+*********************************************************
+* *
External entry points
-%* *
-%*********************************************************
+* *
+*********************************************************
There are various entry points to renaming patterns, depending on
(1) whether the names created should be top-level names or local names
(2) whether the scope of the names is entirely given in a continuation
(e.g., in a case or lambda, but not in a let or at the top-level,
because of the way mutually recursive bindings are handled)
- (3) whether the a type signature in the pattern can bind
- lexically-scoped type variables (for unpacking existential
+ (3) whether the a type signature in the pattern can bind
+ lexically-scoped type variables (for unpacking existential
type vars in data constructors)
(4) whether we do duplicate and unused variable checking
(5) whether there are fixity declarations associated with the names
bound by the patterns that need to be brought into scope with them.
-
+
Rather than burdening the clients of this module with all of these choices,
we export the three points in this design space that we actually need:
+-}
-\begin{code}
-- ----------- Entry point 1: rnPats -------------------
-- Binds local names; the scope of the bindings is entirely in the thing_inside
-- * allows type sigs to bind type vars
@@ -277,7 +276,7 @@ There are various entry points to renaming patterns, depending on
-- * unused and duplicate checking
-- * no fixities
rnPats :: HsMatchContext Name -- for error messages
- -> [LPat RdrName]
+ -> [LPat RdrName]
-> ([LPat Name] -> RnM (a, FreeVars))
-> RnM (a, FreeVars)
rnPats ctxt pats thing_inside
@@ -286,14 +285,14 @@ rnPats ctxt pats thing_inside
-- (1) rename the patterns, bringing into scope all of the term variables
-- (2) then do the thing inside.
; unCpsRn (rnLPatsAndThen (matchNameMaker ctxt) pats) $ \ pats' -> do
- { -- Check for duplicated and shadowed names
+ { -- Check for duplicated and shadowed names
-- Must do this *after* renaming the patterns
-- See Note [Collect binders only after renaming] in HsUtils
-- Because we don't bind the vars all at once, we can't
- -- check incrementally for duplicates;
+ -- check incrementally for duplicates;
-- Nor can we check incrementally for shadowing, else we'll
-- complain *twice* about duplicates e.g. f (x,x) = ...
- ; addErrCtxt doc_pat $
+ ; addErrCtxt doc_pat $
checkDupAndShadowedNames envs_before $
collectPatsBinders pats'
; thing_inside pats' } }
@@ -301,11 +300,11 @@ rnPats ctxt pats thing_inside
doc_pat = ptext (sLit "In") <+> pprMatchContext ctxt
rnPat :: HsMatchContext Name -- for error messages
- -> LPat RdrName
+ -> LPat RdrName
-> (LPat Name -> RnM (a, FreeVars))
- -> RnM (a, FreeVars) -- Variables bound by pattern do not
- -- appear in the result FreeVars
-rnPat ctxt pat thing_inside
+ -> RnM (a, FreeVars) -- Variables bound by pattern do not
+ -- appear in the result FreeVars
+rnPat ctxt pat thing_inside
= rnPats ctxt [pat] (\pats' -> let [pat'] = pats' in thing_inside pat')
applyNameMaker :: NameMaker -> Located RdrName -> RnM Name
@@ -322,19 +321,18 @@ rnBindPat :: NameMaker
-> LPat RdrName
-> RnM (LPat Name, FreeVars)
-- Returned FreeVars are the free variables of the pattern,
- -- of course excluding variables bound by this pattern
+ -- of course excluding variables bound by this pattern
rnBindPat name_maker pat = runCps (rnLPatAndThen name_maker pat)
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
The main event
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
-- ----------- Entry point 3: rnLPatAndThen -------------------
-- General version: parametrized by how you make new names
@@ -358,7 +356,7 @@ rnPatAndThen mk (VarPat rdr) = do { loc <- liftCps getSrcSpanM
; return (VarPat name) }
-- we need to bind pattern variables for view pattern expressions
-- (e.g. in the pattern (x, x -> y) x needs to be bound in the rhs of the tuple)
-
+
rnPatAndThen mk (SigPatIn pat sig)
-- When renaming a pattern type signature (e.g. f (a :: T) = ...), it is
-- important to rename its type signature _before_ renaming the rest of the
@@ -372,11 +370,11 @@ rnPatAndThen mk (SigPatIn pat sig)
= do { sig' <- rnHsSigCps sig
; pat' <- rnLPatAndThen mk pat
; return (SigPatIn pat' sig') }
-
+
rnPatAndThen mk (LitPat lit)
| HsString src s <- lit
= do { ovlStr <- liftCps (xoptM Opt_OverloadedStrings)
- ; if ovlStr
+ ; if ovlStr
then rnPatAndThen mk (mkNPat (mkHsIsString src s placeHolderType)
Nothing)
else normal_lit }
@@ -410,8 +408,8 @@ rnPatAndThen mk p@(ViewPat expr pat _ty)
= do { liftCps $ do { vp_flag <- xoptM Opt_ViewPatterns
; checkErr vp_flag (badViewPat p) }
-- Because of the way we're arranging the recursive calls,
- -- this will be in the right context
- ; expr' <- liftCpsFV $ rnLExpr expr
+ -- this will be in the right context
+ ; expr' <- liftCpsFV $ rnLExpr expr
; pat' <- rnLPatAndThen mk pat
-- Note: at this point the PreTcType in ty can only be a placeHolder
-- ; return (ViewPat expr' pat' ty) }
@@ -423,7 +421,7 @@ rnPatAndThen mk (ConPatIn con stuff)
= case unLoc con == nameRdrName (dataConName nilDataCon) of
True -> do { ol_flag <- liftCps $ xoptM Opt_OverloadedLists
; if ol_flag then rnPatAndThen mk (ListPat [] placeHolderType Nothing)
- else rnConPatAndThen mk con stuff}
+ else rnConPatAndThen mk con stuff}
False -> rnConPatAndThen mk con stuff
rnPatAndThen mk (ListPat pats _ _)
@@ -448,12 +446,12 @@ rnPatAndThen mk (SplicePat splice)
= do { eith <- liftCpsFV $ rnSplicePat splice
; case eith of -- See Note [rnSplicePat] in RnSplice
Left not_yet_renamed -> rnPatAndThen mk not_yet_renamed
- Right already_renamed -> return already_renamed }
-
+ Right already_renamed -> return already_renamed }
+
rnPatAndThen mk (QuasiQuotePat qq)
= do { pat <- liftCps $ runQuasiQuotePat qq
-- Wrap the result of the quasi-quoter in parens so that we don't
- -- lose the outermost location set by runQuasiQuote (#7918)
+ -- lose the outermost location set by runQuasiQuote (#7918)
; rnPatAndThen mk (ParPat pat) }
rnPatAndThen _ pat = pprPanic "rnLPatAndThen" (ppr pat)
@@ -462,7 +460,7 @@ rnPatAndThen _ pat = pprPanic "rnLPatAndThen" (ppr pat)
--------------------
rnConPatAndThen :: NameMaker
-> Located RdrName -- the constructor
- -> HsConPatDetails RdrName
+ -> HsConPatDetails RdrName
-> CpsRn (Pat Name)
rnConPatAndThen mk con (PrefixCon pats)
@@ -491,7 +489,7 @@ rnHsRecPatsAndThen mk (L _ con) hs_rec_fields@(HsRecFields { rec_dotdot = dd })
= do { flds <- liftCpsFV $ rnHsRecFields (HsRecFieldPat con) VarPat hs_rec_fields
; flds' <- mapM rn_field (flds `zip` [1..])
; return (HsRecFields { rec_flds = flds', rec_dotdot = dd }) }
- where
+ where
rn_field (L l fld, n') = do { arg' <- rnLPatAndThen (nested_mk dd mk n')
(hsRecFieldArg fld)
; return (L l (fld { hsRecFieldArg = arg' })) }
@@ -500,23 +498,22 @@ rnHsRecPatsAndThen mk (L _ con) hs_rec_fields@(HsRecFields { rec_dotdot = dd })
nested_mk Nothing mk _ = mk
nested_mk (Just _) mk@(LetMk {}) _ = mk
nested_mk (Just n) (LamMk report_unused) n' = LamMk (report_unused && (n' <= n))
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Record fields
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-data HsRecFieldContext
+data HsRecFieldContext
= HsRecFieldCon Name
| HsRecFieldPat Name
| HsRecFieldUpd
rnHsRecFields
- :: forall arg.
+ :: forall arg.
HsRecFieldContext
-> (RdrName -> arg) -- When punning, use this to build a new field
-> HsRecFields RdrName (Located arg)
@@ -552,9 +549,9 @@ rnHsRecFields ctxt mk_arg (HsRecFields { rec_flds = flds, rec_dotdot = dotdot })
HsRecFieldCon con | not (isUnboundName con) -> Just con
HsRecFieldPat con | not (isUnboundName con) -> Just con
_ {- update or isUnboundName con -} -> Nothing
- -- The unbound name test is because if the constructor
+ -- The unbound name test is because if the constructor
-- isn't in scope the constructor lookup will add an error
- -- add an error, but still return an unbound name.
+ -- add an error, but still return an unbound name.
-- We don't want that to screw up the dot-dot fill-in stuff.
doc = case mb_con of
@@ -565,7 +562,7 @@ rnHsRecFields ctxt mk_arg (HsRecFields { rec_flds = flds, rec_dotdot = dotdot })
, hsRecFieldArg = arg
, hsRecPun = pun }))
= do { fld'@(L loc fld_nm) <- wrapLocM (lookupSubBndrOcc True parent doc) fld
- ; arg' <- if pun
+ ; arg' <- if pun
then do { checkErr pun_ok (badPun fld)
; return (L loc (mk_arg (mkRdrUnqual (nameOccName fld_nm)))) }
else return arg
@@ -601,7 +598,7 @@ rnHsRecFields ctxt mk_arg (HsRecFields { rec_flds = flds, rec_dotdot = dotdot })
-- ignoring the record field itself
-- Eg. data R = R { x,y :: Int }
-- f x = R { .. } -- Should expand to R {x=x}, not R{x=x,y=y}
- arg_in_scope fld
+ arg_in_scope fld
= rdr `elemLocalRdrEnv` lcl_env
|| notNull [ gre | gre <- lookupGRE_RdrName rdr rdr_env
, case gre_par gre of
@@ -617,7 +614,7 @@ rnHsRecFields ctxt mk_arg (HsRecFields { rec_flds = flds, rec_dotdot = dotdot })
, not (null gres) -- Check field is in scope
, case ctxt of
HsRecFieldCon {} -> arg_in_scope fld
- _other -> True ]
+ _other -> True ]
; addUsedRdrNames (map greRdrName dot_dot_gres)
; return [ L loc (HsRecField
@@ -629,17 +626,17 @@ rnHsRecFields ctxt mk_arg (HsRecFields { rec_flds = flds, rec_dotdot = dotdot })
arg_rdr = mkRdrUnqual (nameOccName fld) ] }
check_disambiguation :: Bool -> Maybe Name -> RnM Parent
- -- When disambiguation is on,
+ -- When disambiguation is on,
check_disambiguation disambig_ok mb_con
| disambig_ok, Just con <- mb_con
= do { env <- getGlobalRdrEnv; return (ParentIs (find_tycon env con)) }
| otherwise = return NoParent
-
+
find_tycon :: GlobalRdrEnv -> Name {- DataCon -} -> Name {- TyCon -}
-- Return the parent *type constructor* of the data constructor
- -- That is, the parent of the data constructor.
+ -- That is, the parent of the data constructor.
-- That's the parent to use for looking up record fields.
- find_tycon env con
+ find_tycon env con
| Just (AConLike (RealDataCon dc)) <- wiredInNameTyThing_maybe con
= tyConName (dataConTyCon dc) -- Special case for [], which is built-in syntax
-- and not in the GlobalRdrEnv (Trac #8448)
@@ -679,7 +676,7 @@ badPun fld = vcat [ptext (sLit "Illegal use of punning for field") <+> quotes (p
dupFieldErr :: HsRecFieldContext -> [RdrName] -> SDoc
dupFieldErr ctxt dups
- = hsep [ptext (sLit "duplicate field name"),
+ = hsep [ptext (sLit "duplicate field name"),
quotes (ppr (head dups)),
ptext (sLit "in record"), pprRFC ctxt]
@@ -687,20 +684,19 @@ pprRFC :: HsRecFieldContext -> SDoc
pprRFC (HsRecFieldCon {}) = ptext (sLit "construction")
pprRFC (HsRecFieldPat {}) = ptext (sLit "pattern")
pprRFC (HsRecFieldUpd {}) = ptext (sLit "update")
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{Literals}
-%* *
-%************************************************************************
+* *
+************************************************************************
When literals occur we have to make sure
that the types and classes they involve
are made available.
+-}
-\begin{code}
rnLit :: HsLit -> RnM ()
rnLit (HsChar _ c) = checkErr (inCharRange c) (bogusCharError c)
rnLit _ = return ()
@@ -727,15 +723,15 @@ rnOverLit origLit
; return (lit { ol_witness = from_thing_name
, ol_rebindable = rebindable
, ol_type = placeHolderType }, fvs) }
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{Errors}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
patSigErr :: Outputable a => a -> SDoc
patSigErr ty
= (ptext (sLit "Illegal signature in pattern:") <+> ppr ty)
@@ -748,4 +744,3 @@ bogusCharError c
badViewPat :: Pat RdrName -> SDoc
badViewPat pat = vcat [ptext (sLit "Illegal view pattern: ") <+> ppr pat,
ptext (sLit "Use ViewPatterns to enable view patterns")]
-\end{code}
diff --git a/compiler/rename/RnSource.lhs b/compiler/rename/RnSource.hs
index f99bc810d5..95211cbdfc 100644
--- a/compiler/rename/RnSource.lhs
+++ b/compiler/rename/RnSource.hs
@@ -1,9 +1,9 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[RnSource]{Main pass of renamer}
+-}
-\begin{code}
{-# LANGUAGE CPP, ScopedTypeVariables #-}
module RnSource (
@@ -52,8 +52,8 @@ import Data.List( partition, sortBy )
import Data.Traversable (traverse)
#endif
import Maybes( orElse, mapMaybe )
-\end{code}
+{-
@rnSourceDecl@ `renames' declarations.
It simultaneously performs dependency analysis and precedence parsing.
It also does the following error checks:
@@ -68,9 +68,8 @@ Checks that all variable occurrences are defined.
\item
Checks the @(..)@ etc constraints in the export list.
\end{enumerate}
+-}
-
-\begin{code}
-- Brings the binders of the group into scope in the appropriate places;
-- does NOT assume that anything is in scope already
rnSrcDecls :: [Name] -> HsGroup RdrName -> RnM (TcGblEnv, HsGroup Name)
@@ -221,16 +220,15 @@ addTcgDUs tcg_env dus = tcg_env { tcg_dus = tcg_dus tcg_env `plusDU` dus }
rnList :: (a -> RnM (b, FreeVars)) -> [Located a] -> RnM ([Located b], FreeVars)
rnList f xs = mapFvRn (wrapLocFstM f) xs
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
HsDoc stuff
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
rnDocDecl :: DocDecl -> RnM DocDecl
rnDocDecl (DocCommentNext doc) = do
rn_doc <- rnHsDoc doc
@@ -244,16 +242,15 @@ rnDocDecl (DocCommentNamed str doc) = do
rnDocDecl (DocGroup lev doc) = do
rn_doc <- rnHsDoc doc
return (DocGroup lev rn_doc)
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
Source-code fixity declarations
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
rnSrcFixityDecls :: NameSet -> [LFixitySig RdrName] -> RnM [LFixitySig Name]
-- Rename the fixity decls, so we can put
-- the renamed decls in the renamed syntax tree
@@ -285,22 +282,21 @@ rnSrcFixityDecls bndr_set fix_decls
do names <- lookupLocalTcNames sig_ctxt what rdr_name
return [ L name_loc name | name <- names ]
what = ptext (sLit "fixity signature")
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
Source-code deprecations declarations
-%* *
-%*********************************************************
+* *
+*********************************************************
Check that the deprecated names are defined, are defined locally, and
that there are no duplicate deprecations.
It's only imported deprecations, dealt with in RnIfaces, that we
gather them together.
+-}
-\begin{code}
-- checks that the deprecations are defined locally, and that there are no duplicates
rnSrcWarnDecls :: NameSet -> [LWarnDecl RdrName] -> RnM Warnings
rnSrcWarnDecls _ []
@@ -339,15 +335,14 @@ dupWarnDecl (L loc _) rdr_name
= vcat [ptext (sLit "Multiple warning declarations for") <+> quotes (ppr rdr_name),
ptext (sLit "also at ") <+> ppr loc]
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Annotation declarations}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
rnAnnDecl :: AnnDecl RdrName -> RnM (AnnDecl Name, FreeVars)
rnAnnDecl ann@(HsAnnotation provenance expr)
= addErrCtxt (annCtxt ann) $
@@ -360,30 +355,30 @@ rnAnnProvenance :: AnnProvenance RdrName -> RnM (AnnProvenance Name, FreeVars)
rnAnnProvenance provenance = do
provenance' <- traverse lookupTopBndrRn provenance
return (provenance', maybe emptyFVs unitFV (annProvenanceName_maybe provenance'))
-\end{code}
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Default declarations}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
rnDefaultDecl :: DefaultDecl RdrName -> RnM (DefaultDecl Name, FreeVars)
rnDefaultDecl (DefaultDecl tys)
= do { (tys', fvs) <- rnLHsTypes doc_str tys
; return (DefaultDecl tys', fvs) }
where
doc_str = DefaultDeclCtx
-\end{code}
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Foreign declarations}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
rnHsForeignDecl :: ForeignDecl RdrName -> RnM (ForeignDecl Name, FreeVars)
rnHsForeignDecl (ForeignImport name ty _ spec)
= do { topEnv :: HscEnv <- getTopEnv
@@ -425,17 +420,14 @@ patchCCallTarget packageKey callTarget =
StaticTarget label Nothing isFun -> StaticTarget label (Just packageKey) isFun
_ -> callTarget
-
-\end{code}
-
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Instance declarations}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
rnSrcInstDecl :: InstDecl RdrName -> RnM (InstDecl Name, FreeVars)
rnSrcInstDecl (TyFamInstD { tfid_inst = tfi })
= do { (tfi', fvs) <- rnTyFamInstDecl Nothing tfi
@@ -612,11 +604,9 @@ rnDataFamInstDecl mb_cls (DataFamInstDecl { dfid_tycon = tycon
, dfid_pats = pats'
, dfid_defn = defn'
, dfid_fvs = fvs }, fvs) }
-\end{code}
-Renaming of the associated types in instances.
+-- Renaming of the associated types in instances.
-\begin{code}
-- Rename associated type family decl in class
rnATDecls :: Name -- Class
-> [LFamilyDecl RdrName]
@@ -641,8 +631,8 @@ rnATInstDecls rnFun cls hs_tvs at_insts
where
tv_ns = hsLKiTyVarNames hs_tvs
-- See Note [Renaming associated types]
-\end{code}
+{-
Note [Renaming associated types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Check that the RHS of the decl mentions only type variables
@@ -669,9 +659,8 @@ can all be in scope (Trac #5862):
id :: Ob x a => x a a
(.) :: (Ob x a, Ob x b, Ob x c) => x b c -> x a b -> x a c
Here 'k' is in scope in the kind signature, just like 'x'.
+-}
-
-\begin{code}
extendTyVarEnvForMethodBinds :: [Name]
-> RnM (LHsBinds Name, FreeVars)
-> RnM (LHsBinds Name, FreeVars)
@@ -684,15 +673,15 @@ extendTyVarEnvForMethodBinds ktv_names thing_inside
extendTyVarEnvFVRn ktv_names thing_inside
else
thing_inside }
-\end{code}
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Stand-alone deriving declarations}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
rnSrcDerivDecl :: DerivDecl RdrName -> RnM (DerivDecl Name, FreeVars)
rnSrcDerivDecl (DerivDecl ty overlap)
= do { standalone_deriv_ok <- xoptM Opt_StandaloneDeriving
@@ -704,15 +693,15 @@ standaloneDerivErr :: SDoc
standaloneDerivErr
= hang (ptext (sLit "Illegal standalone deriving declaration"))
2 (ptext (sLit "Use StandaloneDeriving to enable this extension"))
-\end{code}
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Rules}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
rnHsRuleDecl :: RuleDecl RdrName -> RnM (RuleDecl Name, FreeVars)
rnHsRuleDecl (HsRule rule_name act vars lhs _fv_lhs rhs _fv_rhs)
= do { let rdr_names_w_loc = map get_var vars
@@ -749,8 +738,8 @@ bindHsRuleVars rule_name vars names thing_inside
go [] [] thing_inside = thing_inside []
go vars names _ = pprPanic "bindRuleVars" (ppr vars $$ ppr names)
-\end{code}
+{-
Note [Rule LHS validity checking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Check the shape of a transformation rule LHS. Currently we only allow
@@ -764,8 +753,8 @@ with LHSs with a complicated desugaring (and hence unlikely to match);
But there are legitimate non-trivial args ei, like sections and
lambdas. So it seems simmpler not to check at all, and that is why
check_e is commented out.
+-}
-\begin{code}
checkValidRule :: FastString -> [Name] -> LHsExpr Name -> NameSet -> RnM ()
checkValidRule rule_name ids lhs' fv_lhs'
= do { -- Check for the form of the LHS
@@ -821,16 +810,15 @@ badRuleLhsErr name lhs bad_e
ptext (sLit "in left-hand side:") <+> ppr lhs])]
$$
ptext (sLit "LHS must be of form (f e1 .. en) where f is not forall'd")
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Vectorisation declarations}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
rnHsVectDecl :: VectDecl RdrName -> RnM (VectDecl Name, FreeVars)
-- FIXME: For the moment, the right-hand side is restricted to be a variable as we cannot properly
-- typecheck a complex right-hand side without invoking 'vectType' from the vectoriser.
@@ -872,13 +860,13 @@ rnHsVectDecl (HsVectInstIn instTy)
}
rnHsVectDecl (HsVectInstOut _)
= panic "RnSource.rnHsVectDecl: Unexpected 'HsVectInstOut'"
-\end{code}
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Type, class and iface sig declarations}
-%* *
-%*********************************************************
+* *
+*********************************************************
@rnTyDecl@ uses the `global name function' to create a new type
declaration in which local names have been replaced by their original
@@ -920,8 +908,8 @@ that live on other packages. Since we don't have mutual dependencies across
packages, it is safe not to add the dependencies on the .hs-boot stuff to B2.
See also Note [Grouping of type and class declarations] in TcTyClsDecls.
+-}
-\begin{code}
isInPackage :: PackageKey -> Name -> Bool
isInPackage pkgId nm = case nameModule_maybe nm of
Nothing -> False
@@ -1196,8 +1184,7 @@ rnFamDecl mb_cls (FamilyDecl { fdLName = tycon, fdTyVars = tyvars
rn_info OpenTypeFamily = return (OpenTypeFamily, emptyFVs)
rn_info DataFamily = return (DataFamily, emptyFVs)
-\end{code}
-
+{-
Note [Stupid theta]
~~~~~~~~~~~~~~~~~~~
Trac #3850 complains about a regression wrt 6.10 for
@@ -1206,9 +1193,8 @@ There is no reason not to allow the stupid theta if there are no data
constructors. It's still stupid, but does no harm, and I don't want
to cause programs to break unnecessarily (notably HList). So if there
are no data constructors we allow h98_style = True
+-}
-
-\begin{code}
depAnalTyClDecls :: [(LTyClDecl Name, FreeVars)] -> [SCC (LTyClDecl Name)]
-- See Note [Dependency analysis of type and class decls]
depAnalTyClDecls ds_w_fvs
@@ -1236,8 +1222,8 @@ depAnalTyClDecls ds_w_fvs
-> do L _ dc <- cons
return $ zip (map unLoc $ con_names dc) (repeat data_name)
_ -> []
-\end{code}
+{-
Note [Dependency analysis of type and class decls]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to do dependency analysis on type and class declarations
@@ -1281,13 +1267,13 @@ the case of staged module compilation (Template Haskell, GHCi).
See #8485. With the new lookup process (which includes types declared in other
modules), we get better error messages, too.
-%*********************************************************
-%* *
+*********************************************************
+* *
\subsection{Support code for type/data declarations}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
---------------
badAssocRhs :: [Name] -> RnM ()
badAssocRhs ns
@@ -1396,17 +1382,18 @@ deprecRecSyntax decl
badRecResTy :: SDoc -> SDoc
badRecResTy doc = ptext (sLit "Malformed constructor signature") $$ doc
-\end{code}
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Support code for type/data declarations}
-%* *
-%*********************************************************
+* *
+*********************************************************
Get the mapping from constructors to fields for this module.
It's convenient to do this after the data type decls have been renamed
-\begin{code}
+-}
+
extendRecordFieldEnv :: [TyClGroup RdrName] -> [LInstDecl RdrName] -> TcM TcGblEnv
extendRecordFieldEnv tycl_decls inst_decls
= do { tcg_env <- getGblEnv
@@ -1439,15 +1426,15 @@ extendRecordFieldEnv tycl_decls inst_decls
fld_set' = extendNameSetList fld_set flds'
; return $ (RecFields env' fld_set') }
get_con _ env = return env
-\end{code}
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Support code to rename types}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
rnFds :: [Located (FunDep RdrName)] -> RnM [Located (FunDep Name)]
rnFds fds
= mapM (wrapLocM rn_fds) fds
@@ -1462,21 +1449,20 @@ rnHsTyVars tvs = mapM rnHsTyVar tvs
rnHsTyVar :: RdrName -> RnM Name
rnHsTyVar tyvar = lookupOccRn tyvar
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
findSplice
-%* *
-%*********************************************************
+* *
+*********************************************************
This code marches down the declarations, looking for the first
Template Haskell splice. As it does so it
a) groups the declarations into a HsGroup
b) runs any top-level quasi-quotes
+-}
-\begin{code}
findSplice :: [LHsDecl RdrName] -> RnM (HsGroup RdrName, Maybe (SpliceDecl RdrName, [LHsDecl RdrName]))
findSplice ds = addl emptyRdrGroup ds
@@ -1567,4 +1553,3 @@ add_bind _ (ValBindsOut {}) = panic "RdrHsSyn:add_bind"
add_sig :: LSig a -> HsValBinds a -> HsValBinds a
add_sig s (ValBindsIn bs sigs) = ValBindsIn bs (s:sigs)
add_sig _ (ValBindsOut {}) = panic "RdrHsSyn:add_sig"
-\end{code}
diff --git a/compiler/rename/RnSplice.lhs b/compiler/rename/RnSplice.hs
index b0c81b0a92..e147e6a883 100644
--- a/compiler/rename/RnSplice.lhs
+++ b/compiler/rename/RnSplice.hs
@@ -1,4 +1,3 @@
-\begin{code}
{-# LANGUAGE CPP #-}
module RnSplice (
@@ -37,9 +36,7 @@ import {-# SOURCE #-} RnExpr ( rnLExpr )
import {-# SOURCE #-} TcExpr ( tcMonoExpr )
import {-# SOURCE #-} TcSplice ( runMetaD, runMetaE, runMetaP, runMetaT, tcTopSpliceExpr )
#endif
-\end{code}
-\begin{code}
#ifndef GHCI
rnBracket :: HsExpr RdrName -> HsBracket RdrName -> RnM (HsExpr Name, FreeVars)
rnBracket e _ = failTH e "Template Haskell bracket"
@@ -60,13 +57,13 @@ rnSplicePat e = failTH e "Template Haskell pattern splice"
rnSpliceDecl :: SpliceDecl RdrName -> RnM (SpliceDecl Name, FreeVars)
rnSpliceDecl e = failTH e "Template Haskell declaration splice"
#else
-\end{code}
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
Splices
-%* *
-%*********************************************************
+* *
+*********************************************************
Note [Free variables of typed splices]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -95,8 +92,8 @@ It's important to wrap renamer calls in checkNoErrs, because the
renamer does not fail for out of scope variables etc. Instead it
returns a bogus term/type, so that it can report more than one error.
We don't want the type checker to see these bogus unbound variables.
+-}
-\begin{code}
rnSpliceGen :: Bool -- Typed splice?
-> (HsSplice Name -> RnM (a, FreeVars)) -- Outside brackets, run splice
-> (HsSplice Name -> (PendingRnSplice, a)) -- Inside brackets, make it pending
@@ -206,8 +203,7 @@ rnSpliceType splice k
}
; return (unLoc hs_ty3, fvs) }
-\end{code}
-
+{-
Note [rnSplicePat]
~~~~~~~~~~~~~~~~~~
Renaming a pattern splice is a bit tricky, because we need the variables
@@ -229,8 +225,7 @@ In any case, when we're done in rnSplicePat, we'll either have a
Pat RdrName (the result of running a top-level splice) or a Pat Name
(the renamed nested splice). Thus, the awkward return type of
rnSplicePat.
-
-\begin{code}
+-}
-- | Rename a splice pattern. See Note [rnSplicePat]
rnSplicePat :: HsSplice RdrName -> RnM ( Either (Pat RdrName) (Pat Name)
@@ -265,9 +260,7 @@ rnSpliceDecl (SpliceDecl (L loc splice) flg)
= (PendingRnDeclSplice (PendSplice n e), SpliceDecl(L loc rn_splice) flg)
run_decl_splice rn_splice = pprPanic "rnSpliceDecl" (ppr rn_splice)
-\end{code}
-\begin{code}
rnTopSpliceDecls :: HsSplice RdrName -> RnM ([LHsDecl RdrName], FreeVars)
-- Declaration splice at the very top level of the module
rnTopSpliceDecls (HsSplice _ expr'')
@@ -285,15 +278,15 @@ rnTopSpliceDecls (HsSplice _ expr'')
(ppr (getLoc expr) $$ (vcat (map ppr decls)))
; return (decls,fvs) }
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Template Haskell brackets
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
rnBracket :: HsExpr RdrName -> HsBracket RdrName -> RnM (HsExpr Name, FreeVars)
rnBracket e br_body
= addErrCtxt (quotationCtxtDoc br_body) $
@@ -401,9 +394,7 @@ rn_bracket _ (DecBrG _) = panic "rn_bracket: unexpected DecBrG"
rn_bracket _ (TExpBr e) = do { (e', fvs) <- rnLExpr e
; return (TExpBr e', fvs) }
-\end{code}
-\begin{code}
spliceCtxt :: HsExpr RdrName -> SDoc
spliceCtxt expr= hang (ptext (sLit "In the splice:")) 2 (ppr expr)
@@ -451,9 +442,7 @@ quotationCtxtDoc br_body
-- 2 (char '$' <> pprParendExpr expr)
-- , ptext (sLit "To see what the splice expanded to, use -ddump-splices") ]
#endif
-\end{code}
-\begin{code}
checkThLocalName :: Name -> RnM ()
#ifndef GHCI /* GHCI and TH is off */
--------------------------------------
@@ -462,7 +451,7 @@ checkThLocalName _name
= return ()
#else /* GHCI and TH is on */
-checkThLocalName name
+checkThLocalName name
= do { traceRn (text "checkThLocalName" <+> ppr name)
; mb_local_use <- getStageAndBindLevel name
; case mb_local_use of {
@@ -510,8 +499,8 @@ checkCrossStageLifting top_lvl name (Brack _ (RnPendingUntyped ps_var))
checkCrossStageLifting _ _ _ = return ()
#endif /* GHCI */
-\end{code}
+{-
Note [Keeping things alive for Template Haskell]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
@@ -571,4 +560,4 @@ Examples:
\y. [| \x. $(f 'y) |] -- Not ok (bind =1, use = 1)
[| \x. $(f 'x) |] -- OK (bind = 2, use = 1)
-
+-}
diff --git a/compiler/rename/RnSplice.lhs-boot b/compiler/rename/RnSplice.hs-boot
index de6da775d2..ece78f8408 100644
--- a/compiler/rename/RnSplice.lhs-boot
+++ b/compiler/rename/RnSplice.hs-boot
@@ -1,4 +1,3 @@
-\begin{code}
module RnSplice where
import HsSyn
@@ -14,4 +13,3 @@ rnSpliceType :: HsSplice RdrName -> PostTc Name Kind
rnSplicePat :: HsSplice RdrName -> RnM ( Either (Pat RdrName) (Pat Name)
, FreeVars )
rnSpliceDecl :: SpliceDecl RdrName -> RnM (SpliceDecl Name, FreeVars)
-\end{code}
diff --git a/compiler/rename/RnTypes.lhs b/compiler/rename/RnTypes.hs
index d0877dc423..9eb2581748 100644
--- a/compiler/rename/RnTypes.lhs
+++ b/compiler/rename/RnTypes.hs
@@ -1,9 +1,9 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[RnSource]{Main pass of renamer}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module RnTypes (
@@ -50,18 +50,18 @@ import Data.List ( nub, nubBy )
import Control.Monad ( unless, when )
#include "HsVersions.h"
-\end{code}
+{-
These type renamers are in a separate module, rather than in (say) RnSource,
to break several loop.
-%*********************************************************
-%* *
+*********************************************************
+* *
\subsection{Renaming types}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
rnHsSigType :: SDoc -> LHsType RdrName -> RnM (LHsType Name, FreeVars)
-- rnHsSigType is used for source-language type signatures,
-- which use *implicit* universal quantification.
@@ -81,8 +81,8 @@ rnLHsInstType doc_str ty
badInstTy :: LHsType RdrName -> SDoc
badInstTy ty = ptext (sLit "Malformed instance:") <+> ppr ty
-\end{code}
+{-
rnHsType is here because we call it from loadInstDecl, and I didn't
want a gratuitous knot.
@@ -104,8 +104,8 @@ f :: forall a. a -> (() => b) binds "a" and "b"
The -fwarn-context-quantification flag warns about
this situation. See rnHsTyKi for case HsForAllTy Qualified.
+-}
-\begin{code}
rnLHsTyKi :: Bool -- True <=> renaming a type, False <=> a kind
-> HsDocContext -> LHsType RdrName -> RnM (LHsType Name, FreeVars)
rnLHsTyKi isType doc (L loc ty)
@@ -299,7 +299,7 @@ rnHsTyKi isType doc (HsQuasiQuoteTy qq)
= ASSERT( isType )
do { ty <- runQuasiQuoteType qq
-- Wrap the result of the quasi-quoter in parens so that we don't
- -- lose the outermost location set by runQuasiQuote (#7918)
+ -- lose the outermost location set by runQuasiQuote (#7918)
; rnHsType doc (HsParTy ty) }
rnHsTyKi isType _ (HsCoreTy ty)
@@ -344,10 +344,7 @@ rnTyVar is_type rdr_name
rnLHsTypes :: HsDocContext -> [LHsType RdrName]
-> RnM ([LHsType Name], FreeVars)
rnLHsTypes doc tys = mapFvRn (rnLHsType doc) tys
-\end{code}
-
-\begin{code}
rnForAll :: HsDocContext -> HsExplicitFlag
-> Maybe SrcSpan -- Location of an extra-constraints wildcard
-> [RdrName] -- Kind variables
@@ -515,15 +512,15 @@ dataKindsErr is_type thing
where
what | is_type = ptext (sLit "type")
| otherwise = ptext (sLit "kind")
-\end{code}
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Contexts and predicates}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
rnConDeclFields :: HsDocContext -> [LConDeclField RdrName]
-> RnM ([LConDeclField Name], FreeVars)
rnConDeclFields doc fields = mapFvRn (rnField doc) fields
@@ -540,14 +537,13 @@ rnContext :: HsDocContext -> LHsContext RdrName -> RnM (LHsContext Name, FreeVar
rnContext doc (L loc cxt)
= do { (cxt', fvs) <- rnLHsTypes doc cxt
; return (L loc cxt', fvs) }
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Fixities and precedence parsing
-%* *
-%************************************************************************
+* *
+************************************************************************
@mkOpAppRn@ deals with operator fixities. The argument expressions
are assumed to be already correctly arranged. It needs the fixities
@@ -566,8 +562,8 @@ is always read in as
mkHsOpTyRn rearranges where necessary. The two arguments
have already been renamed and rearranged. It's made rather tiresome
by the presence of ->, which is a separate syntactic construct.
+-}
-\begin{code}
---------------
-- Building (ty1 `op1` (ty21 `op2` ty22))
mkHsOpTyRn :: (LHsType Name -> LHsType Name -> HsType Name)
@@ -795,11 +791,9 @@ checkSectionPrec direction section op arg
|| (op_prec == arg_prec && direction == assoc))
(sectionPrecErr (op_name, op_fix)
(arg_op, arg_fix) section)
-\end{code}
-Precedence-related error messages
+-- Precedence-related error messages
-\begin{code}
precParseErr :: (Name, Fixity) -> (Name, Fixity) -> RnM ()
precParseErr op1@(n1,_) op2@(n2,_)
| isUnboundName n1 || isUnboundName n2
@@ -825,15 +819,15 @@ ppr_opfix (op, fixity) = pp_op <+> brackets (ppr fixity)
where
pp_op | op == negateName = ptext (sLit "prefix `-'")
| otherwise = quotes (ppr op)
-\end{code}
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Errors}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
warnUnusedForAlls :: SDoc -> LHsTyVarBndrs RdrName -> [RdrName] -> TcM ()
warnUnusedForAlls in_doc bound mentioned_rdrs
= whenWOptM Opt_WarnUnusedMatches $
@@ -874,13 +868,13 @@ opTyErr op ty@(HsOpTy ty1 _ _)
forall_head (L _ (HsAppTy ty _)) = forall_head ty
forall_head _other = False
opTyErr _ ty = pprPanic "opTyErr: Not an op" (ppr ty)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Finding the free type variables of a (HsType RdrName)
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Kind and type-variable binders]
@@ -910,8 +904,8 @@ In general we want to walk over a type, and find
Hence we returns a pair (kind-vars, type vars)
See also Note [HsBSig binder lists] in HsTypes
+-}
-\begin{code}
type FreeKiTyVars = ([RdrName], [RdrName])
filterInScope :: LocalRdrEnv -> FreeKiTyVars -> FreeKiTyVars
@@ -1082,5 +1076,3 @@ extractWildcards ty
return (nwcs, awcs, tys')
goList f tys = do (nwcs, awcs, tys') <- extList tys
return (nwcs, awcs, L l $ f tys')
-
-\end{code}
diff --git a/compiler/simplCore/CSE.lhs b/compiler/simplCore/CSE.hs
index ccd4b2e721..7dbf892f9e 100644
--- a/compiler/simplCore/CSE.lhs
+++ b/compiler/simplCore/CSE.hs
@@ -1,9 +1,9 @@
-%
-% (c) The AQUA Project, Glasgow University, 1993-1998
-%
+{-
+(c) The AQUA Project, Glasgow University, 1993-1998
+
\section{Common subexpression}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module CSE (cseProgram) where
@@ -22,9 +22,8 @@ import BasicTypes ( isAlwaysActive )
import TrieMap
import Data.List
-\end{code}
-
+{-
Simple common sub-expression
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we see
@@ -146,13 +145,13 @@ Consider
Then we can CSE the inner (f x) to y. In fact 'case' is like a strict
let-binding, and we can use cseRhs for dealing with the scrutinee.
-%************************************************************************
-%* *
+************************************************************************
+* *
\section{Common subexpression}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
cseProgram :: CoreProgram -> CoreProgram
cseProgram binds = snd (mapAccumL cseBind emptyCSEnv binds)
@@ -256,16 +255,15 @@ cseAlts env scrut' bndr bndr' alts
= (con, args', tryForCSE env' rhs)
where
(env', args') = addBinders alt_env args
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\section{The CSE envt}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type InExpr = CoreExpr -- Pre-cloning
type InBndr = CoreBndr
type InAlt = CoreAlt
@@ -313,4 +311,3 @@ addRecBinders :: CSEnv -> [Id] -> (CSEnv, [Id])
addRecBinders cse vs = (cse { cs_subst = sub' }, vs')
where
(sub', vs') = substRecBndrs (cs_subst cse) vs
-\end{code}
diff --git a/compiler/simplCore/CoreMonad.lhs b/compiler/simplCore/CoreMonad.hs
index c175b07384..d50027c6ea 100644
--- a/compiler/simplCore/CoreMonad.lhs
+++ b/compiler/simplCore/CoreMonad.hs
@@ -1,9 +1,9 @@
-%
-% (c) The AQUA Project, Glasgow University, 1993-1998
-%
+{-
+(c) The AQUA Project, Glasgow University, 1993-1998
+
\section[CoreMonad]{The core pipeline monad}
+-}
-\begin{code}
{-# LANGUAGE CPP, UndecidableInstances #-}
module CoreMonad (
@@ -118,19 +118,19 @@ saveLinkerGlobals = return ()
restoreLinkerGlobals :: () -> IO ()
restoreLinkerGlobals () = return ()
#endif
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Debug output
-%* *
-%************************************************************************
+* *
+************************************************************************
These functions are not CoreM monad stuff, but they probably ought to
be, and it makes a conveneint place. place for them. They print out
stuff before and after core passes, and do Core Lint when necessary.
+-}
-\begin{code}
showPass :: CoreToDo -> CoreM ()
showPass pass = do { dflags <- getDynFlags
; liftIO $ showPassIO dflags pass }
@@ -286,17 +286,15 @@ interactiveInScope hsc_env
-- I think it's because of the GHCi debugger, which can bind variables
-- f :: [t] -> [t]
-- where t is a RuntimeUnk (see TcType)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
The CoreToDo type and related types
Abstraction of core-to-core passes to run.
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
data CoreToDo -- These are diff core-to-core passes,
-- which may be invoked in any order,
@@ -330,9 +328,6 @@ data CoreToDo -- These are diff core-to-core passes,
| CoreTidy
| CorePrep
-\end{code}
-
-\begin{code}
coreDumpFlag :: CoreToDo -> Maybe DumpFlag
coreDumpFlag (CoreDoSimplify {}) = Just Opt_D_verbose_core2core
coreDumpFlag (CoreDoPluginPass {}) = Just Opt_D_verbose_core2core
@@ -384,9 +379,7 @@ pprPassDetails :: CoreToDo -> SDoc
pprPassDetails (CoreDoSimplify n md) = vcat [ ptext (sLit "Max iterations =") <+> int n
, ppr md ]
pprPassDetails _ = Outputable.empty
-\end{code}
-\begin{code}
data SimplifierMode -- See comments in SimplMonad
= SimplMode
{ sm_names :: [String] -- Name(s) of the phase
@@ -410,10 +403,7 @@ instance Outputable SimplifierMode where
, pp_flag cc (sLit "case-of-case") ])
where
pp_flag f s = ppUnless f (ptext (sLit "no")) <+> ptext s
-\end{code}
-
-\begin{code}
data FloatOutSwitches = FloatOutSwitches {
floatOutLambdas :: Maybe Int, -- ^ Just n <=> float lambdas to top level, if
-- doing so will abstract over n or fewer
@@ -450,9 +440,7 @@ runMaybe :: Maybe a -> (a -> CoreToDo) -> CoreToDo
runMaybe (Just x) f = f x
runMaybe Nothing _ = CoreDoNothing
-\end{code}
-
-
+{-
Note [RULEs enabled in SimplGently]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
RULES are enabled when doing "gentle" simplification. Two reasons:
@@ -470,13 +458,13 @@ But watch out: list fusion can prevent floating. So use phase control
to switch off those rules until after floating.
-%************************************************************************
-%* *
+************************************************************************
+* *
Types for Plugins
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A description of the plugin pass itself
type PluginPass = ModGuts -> CoreM ModGuts
@@ -484,16 +472,15 @@ bindsOnlyPass :: (CoreProgram -> CoreM CoreProgram) -> ModGuts -> CoreM ModGuts
bindsOnlyPass pass guts
= do { binds' <- pass (mg_binds guts)
; return (guts { mg_binds = binds' }) }
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Counting and logging
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
verboseSimplStats :: Bool
verboseSimplStats = opt_PprStyle_Debug -- For now, anyway
@@ -504,9 +491,7 @@ pprSimplCount :: SimplCount -> SDoc
doSimplTick :: DynFlags -> Tick -> SimplCount -> SimplCount
doFreeSimplTick :: Tick -> SimplCount -> SimplCount
plusSimplCount :: SimplCount -> SimplCount -> SimplCount
-\end{code}
-\begin{code}
data SimplCount
= VerySimplCount !Int -- Used when don't want detailed stats
@@ -608,10 +593,7 @@ pprTickGroup group@((tick1,_):_)
-- flip as we want largest first
| (tick,n) <- sortBy (flip (comparing snd)) group])
pprTickGroup [] = panic "pprTickGroup"
-\end{code}
-
-\begin{code}
data Tick
= PreInlineUnconditionally Id
| PostInlineUnconditionally Id
@@ -725,16 +707,15 @@ cmpEqTick (CaseElim a) (CaseElim b) = a `com
cmpEqTick (CaseIdentity a) (CaseIdentity b) = a `compare` b
cmpEqTick (FillInCaseDefault a) (FillInCaseDefault b) = a `compare` b
cmpEqTick _ _ = EQ
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Monad and carried data structure definitions
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
newtype CoreState = CoreState {
cs_uniq_supply :: UniqSupply
}
@@ -841,16 +822,13 @@ runCoreM hsc_env rule_base us mod print_unqual m = do
extract :: (a, CoreState, CoreWriter) -> (a, SimplCount)
extract (value, _, writer) = (value, cw_simpl_count writer)
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Core combinators, not exported
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
nop :: CoreState -> a -> CoreIOEnv (a, CoreState, CoreWriter)
nop s x = do
@@ -869,11 +847,7 @@ modifyS f = CoreM (\s -> nop (f s) ())
write :: CoreWriter -> CoreM ()
write w = CoreM (\s -> return ((), s, w))
-\end{code}
-
-\subsection{Lifting IO into the monad}
-
-\begin{code}
+-- \subsection{Lifting IO into the monad}
-- | Lift an 'IOEnv' operation into 'CoreM'
liftIOEnv :: CoreIOEnv a -> CoreM a
@@ -886,16 +860,14 @@ instance MonadIO CoreM where
liftIOWithCount :: IO (SimplCount, a) -> CoreM a
liftIOWithCount what = liftIO what >>= (\(count, x) -> addSimplCount count >> return x)
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Reader, writer and state accessors
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
getHscEnv :: CoreM HscEnv
getHscEnv = read cr_hsc_env
@@ -928,13 +900,13 @@ getPackageFamInstEnv = do
hsc_env <- getHscEnv
eps <- liftIO $ hscEPS hsc_env
return $ eps_fam_inst_env eps
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Initializing globals
-%* *
-%************************************************************************
+* *
+************************************************************************
This is a rather annoying function. When a plugin is loaded, it currently
gets linked against a *newly loaded* copy of the GHC package. This would
@@ -973,8 +945,8 @@ will have to say `reinitializeGlobals` before it does anything, but never mind.
I've threaded the cr_globals through CoreM rather than giving them as an
argument to the plugin function so that we can turn this function into
(return ()) without breaking any plugins when we eventually get 1. working.
+-}
-\begin{code}
reinitializeGlobals :: CoreM ()
reinitializeGlobals = do
linker_globals <- read cr_globals
@@ -982,15 +954,15 @@ reinitializeGlobals = do
let dflags = hsc_dflags hsc_env
liftIO $ restoreLinkerGlobals linker_globals
liftIO $ setUnsafeGlobalDynFlags dflags
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Dealing with annotations
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Get all annotations of a given type. This happens lazily, that is
-- no deserialization will take place until the [a] is actually demanded and
-- the [a] can also be empty (the UniqFM is not filtered).
@@ -1011,8 +983,7 @@ getFirstAnnotations deserialize guts
= liftM (mapUFM head . filterUFM (not . null))
$ getAnnotations deserialize guts
-\end{code}
-
+{-
Note [Annotations]
~~~~~~~~~~~~~~~~~~
A Core-to-Core pass that wants to make use of annotations calls
@@ -1031,13 +1002,12 @@ only want to deserialise every annotation once, we would have to build a cache
for every module in the HTP. In the end, it's probably not worth it as long as
we aren't using annotations heavily.
-%************************************************************************
-%* *
+************************************************************************
+* *
Direct screen output
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
msg :: (DynFlags -> SDoc -> IO ()) -> SDoc -> CoreM ()
msg how doc = do
@@ -1079,29 +1049,28 @@ debugTraceMsg = msg (flip Err.debugTraceMsg 3)
-- | Show some labelled 'SDoc' if a particular flag is set or at a verbosity level of @-v -ddump-most@ or higher
dumpIfSet_dyn :: DumpFlag -> String -> SDoc -> CoreM ()
dumpIfSet_dyn flag str = msg (\dflags -> Err.dumpIfSet_dyn dflags flag str)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Finding TyThings
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
instance MonadThings CoreM where
lookupThing name = do
hsc_env <- getHscEnv
liftIO $ initTcForLookup hsc_env (tcLookupGlobal name)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Template Haskell interoperability
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
#ifdef GHCI
-- | Attempt to convert a Template Haskell name to one that GHC can
-- understand. Original TH names such as those you get when you use
@@ -1114,4 +1083,3 @@ thNameToGhcName th_name = do
hsc_env <- getHscEnv
liftIO $ initTcForLookup hsc_env (lookupThName_maybe th_name)
#endif
-\end{code}
diff --git a/compiler/simplCore/FloatIn.lhs b/compiler/simplCore/FloatIn.hs
index 13d03efa24..34252881ab 100644
--- a/compiler/simplCore/FloatIn.lhs
+++ b/compiler/simplCore/FloatIn.hs
@@ -1,17 +1,17 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
-%************************************************************************
-%* *
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
+************************************************************************
+* *
\section[FloatIn]{Floating Inwards pass}
-%* *
-%************************************************************************
+* *
+************************************************************************
The main purpose of @floatInwards@ is floating into branches of a
case, so that we don't allocate things, save them on the stack, and
then discover that they aren't needed in the chosen branch.
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module FloatIn ( floatInwards ) where
@@ -31,12 +31,12 @@ import UniqFM
import DynFlags
import Outputable
import Data.List( mapAccumL )
-\end{code}
+{-
Top-level interface function, @floatInwards@. Note that we do not
actually float any bindings downwards from the top-level.
+-}
-\begin{code}
floatInwards :: DynFlags -> CoreProgram -> CoreProgram
floatInwards dflags = map fi_top_bind
where
@@ -44,13 +44,13 @@ floatInwards dflags = map fi_top_bind
= NonRec binder (fiExpr dflags [] (freeVars rhs))
fi_top_bind (Rec pairs)
= Rec [ (b, fiExpr dflags [] (freeVars rhs)) | (b, rhs) <- pairs ]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Mail from Andr\'e [edited]}
-%* *
-%************************************************************************
+* *
+************************************************************************
{\em Will wrote: What??? I thought the idea was to float as far
inwards as possible, no matter what. This is dropping all bindings
@@ -110,13 +110,13 @@ Also, even if a is not found to be strict in the new context and is
still left as a let, if the branch is not taken (or b is not entered)
the closure for a is not built.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Main floating-inwards code}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type FreeVarSet = IdSet
type BoundVarSet = IdSet
@@ -143,13 +143,13 @@ fiExpr dflags to_drop (_, AnnCast expr (fvs_co, co))
Cast (fiExpr dflags e_drop expr) co
where
[drop_here, e_drop, co_drop] = sepBindsByDropPoint dflags False [freeVarsOf expr, fvs_co] to_drop
-\end{code}
+{-
Applications: we do float inside applications, mainly because we
need to get at all the arguments. The next simplifier run will
pull out any silly ones.
+-}
-\begin{code}
fiExpr dflags to_drop ann_expr@(_,AnnApp {})
= wrapFloats drop_here $ wrapFloats extra_drop $
mkApps (fiExpr dflags fun_drop ann_fun)
@@ -175,8 +175,8 @@ fiExpr dflags to_drop ann_expr@(_,AnnApp {})
drop_here : extra_drop : fun_drop : arg_drops
= sepBindsByDropPoint dflags False (extra_fvs : fun_fvs : arg_fvs) to_drop
-\end{code}
+{-
Note [Do not destroy the let/app invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Watch out for
@@ -223,8 +223,8 @@ This is what the 'go' function in the AnnLam case is doing.
Urk! if all are tyvars, and we don't float in, we may miss an
opportunity to float inside a nested case branch
+-}
-\begin{code}
fiExpr dflags to_drop lam@(_, AnnLam _ _)
| okToFloatInside bndrs -- Float in
-- NB: Must line up with noFloatIntoRhs (AnnLam...); see Trac #7088
@@ -235,14 +235,14 @@ fiExpr dflags to_drop lam@(_, AnnLam _ _)
where
(bndrs, body) = collectAnnBndrs lam
-\end{code}
+{-
We don't float lets inwards past an SCC.
ToDo: keep info on current cc, and when passing
one, if it is not the same, annotate all lets in binds with current
cc, change current cc to the new one and float binds into expr.
+-}
-\begin{code}
fiExpr dflags to_drop (_, AnnTick tickish expr)
| tickishScoped tickish
= -- Wimp out for now - we could push values in
@@ -250,8 +250,8 @@ fiExpr dflags to_drop (_, AnnTick tickish expr)
| otherwise
= Tick tickish (fiExpr dflags to_drop expr)
-\end{code}
+{-
For @Lets@, the possible ``drop points'' for the \tr{to_drop}
bindings are: (a)~in the body, (b1)~in the RHS of a NonRec binding,
or~(b2), in each of the RHSs of the pairs of a @Rec@.
@@ -300,9 +300,8 @@ Here y is not free in rhs or body; but we still want to dump bindings
that bind y outside the let. So we augment extra_fvs with the
idRuleAndUnfoldingVars of x. No need for type variables, hence not using
idFreeVars.
+-}
-
-\begin{code}
fiExpr dflags to_drop (_,AnnLet (AnnNonRec id rhs@(rhs_fvs, ann_rhs)) body)
= fiExpr dflags new_to_drop body
where
@@ -365,8 +364,8 @@ fiExpr dflags to_drop (_,AnnLet (AnnRec bindings) body)
fi_bind to_drops pairs
= [ (binder, fiExpr dflags to_drop rhs)
| ((binder, rhs), to_drop) <- zipEqual "fi_bind" pairs to_drops ]
-\end{code}
+{-
For @Case@, the possible ``drop points'' for the \tr{to_drop}
bindings are: (a)~inside the scrutinee, (b)~inside one of the
alternatives/default [default FVs always {\em first}!].
@@ -378,8 +377,8 @@ inward. SIMD primops for unpacking SIMD vectors into an unboxed tuple of unboxed
scalars also need to be floated inward, but unpacks have a single non-DEFAULT
alternative that binds the elements of the tuple. We now therefore also support
floating in cases with a single alternative that may bind values.
+-}
-\begin{code}
fiExpr dflags to_drop (_, AnnCase scrut case_bndr _ [(con,alt_bndrs,rhs)])
| isUnLiftedType (idType case_bndr)
, exprOkForSideEffects (deAnnotate scrut)
@@ -448,14 +447,13 @@ noFloatIntoExpr (AnnLam bndr e)
noFloatIntoExpr rhs = exprIsExpandable (deAnnotate' rhs)
-- We'd just float right back out again...
-- Should match the test in SimplEnv.doFloatFromRhs
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{@sepBindsByDropPoint@}
-%* *
-%************************************************************************
+* *
+************************************************************************
This is the crucial function. The idea is: We have a wad of bindings
that we'd like to distribute inside a collection of {\em drop points};
@@ -471,8 +469,8 @@ then it has to go in a you-must-drop-it-above-all-these-drop-points
point.
We have to maintain the order on these drop-point-related lists.
+-}
-\begin{code}
sepBindsByDropPoint
:: DynFlags
-> Bool -- True <=> is case expression
@@ -560,4 +558,3 @@ floatIsDupable :: DynFlags -> FloatBind -> Bool
floatIsDupable dflags (FloatCase scrut _ _ _) = exprIsDupable dflags scrut
floatIsDupable dflags (FloatLet (Rec prs)) = all (exprIsDupable dflags . snd) prs
floatIsDupable dflags (FloatLet (NonRec _ r)) = exprIsDupable dflags r
-\end{code}
diff --git a/compiler/simplCore/FloatOut.lhs b/compiler/simplCore/FloatOut.hs
index 55ed111a70..4cd871334d 100644
--- a/compiler/simplCore/FloatOut.lhs
+++ b/compiler/simplCore/FloatOut.hs
@@ -1,11 +1,11 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[FloatOut]{Float bindings outwards (towards the top level)}
``Long-distance'' floating of bindings towards the top level.
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
@@ -31,8 +31,8 @@ import FastString
import qualified Data.IntMap as M
#include "HsVersions.h"
-\end{code}
+{-
-----------------
Overall game plan
-----------------
@@ -106,13 +106,13 @@ vwhich might usefully be separated to
Well, maybe. We don't do this at the moment.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection[floatOutwards]{@floatOutwards@: let-floating interface function}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
floatOutwards :: FloatOutSwitches
-> DynFlags
-> UniqSupply
@@ -144,15 +144,15 @@ floatTopBind bind
in case bind' of
Rec prs -> (fs, unitBag (Rec (addTopFloatPairs float_bag prs)))
NonRec {} -> (fs, float_bag `snocBag` bind') }
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[FloatOut-Bind]{Floating in a binding (the business end)}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
floatBind :: LevelledBind -> (FloatStats, FloatBinds, CoreBind)
floatBind (NonRec (TB var _) rhs)
= case (floatExpr rhs) of { (fs, rhs_floats, rhs') ->
@@ -205,8 +205,8 @@ floatList _ [] = (zeroStats, emptyFloats, [])
floatList f (a:as) = case f a of { (fs_a, binds_a, b) ->
case floatList f as of { (fs_as, binds_as, bs) ->
(fs_a `add_stats` fs_as, binds_a `plusFloats` binds_as, b:bs) }}
-\end{code}
+{-
Note [Floating out of Rec rhss]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider Rec { f<1,0> = \xy. body }
@@ -239,13 +239,13 @@ We could perhaps get rid of the 'tops' component of the floating binds,
but this case works just as well.
-%************************************************************************
+************************************************************************
\subsection[FloatOut-Expr]{Floating in expressions}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
floatBody :: Level
-> LevelledExpr
-> (FloatStats, FloatBinds, CoreExpr)
@@ -342,8 +342,8 @@ floatExpr (Case scrut (TB case_bndr case_spec) ty alts)
float_alt bind_lvl (con, bs, rhs)
= case (floatBody bind_lvl rhs) of { (fs, rhs_floats, rhs') ->
(fs, rhs_floats, (con, [b | TB b _ <- bs], rhs')) }
-\end{code}
+{-
Note [Avoiding unnecessary floating]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general we want to avoid floating a let unnecessarily, because
@@ -383,16 +383,16 @@ altogether when profiling got in the way.
So now we do the partition right at the (Let..) itself.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Utility bits for floating stats}
-%* *
-%************************************************************************
+* *
+************************************************************************
I didn't implement this with unboxed numbers. I don't want to be too
strict in this stuff, as it is rarely turned on. (WDP 95/09)
+-}
-\begin{code}
data FloatStats
= FlS Int -- Number of top-floats * lambda groups they've been past
Int -- Number of non-top-floats * lambda groups they've been past
@@ -414,14 +414,13 @@ add_stats (FlS a1 b1 c1) (FlS a2 b2 c2)
add_to_stats :: FloatStats -> FloatBinds -> FloatStats
add_to_stats (FlS a b c) (FB tops others)
= FlS (a + lengthBag tops) (b + lengthBag (flattenMajor others)) (c + 1)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Utility bits for floating}
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Representation of FloatBinds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -441,9 +440,8 @@ That is why MajorEnv is represented as a finite map.
We keep the bindings destined for the *top* level separate, because
we float them out even if they don't escape a *value* lambda; see
partitionByMajorLevel.
+-}
-
-\begin{code}
type FloatLet = CoreBind -- INVARIANT: a FloatLet is always lifted
type MajorEnv = M.IntMap MinorEnv -- Keyed by major level
type MinorEnv = M.IntMap (Bag FloatBind) -- Keyed by minor level
@@ -563,4 +561,3 @@ wrapTick t (FB tops defns)
-- Conversely, inlining of HNFs inside an SCC is allowed, and
-- indeed the HNF we're floating here might well be inlined back
-- again, and we don't want to end up with duplicate ticks.
-\end{code}
diff --git a/compiler/simplCore/LiberateCase.lhs b/compiler/simplCore/LiberateCase.hs
index 21adf20f44..1df1405329 100644
--- a/compiler/simplCore/LiberateCase.lhs
+++ b/compiler/simplCore/LiberateCase.hs
@@ -1,9 +1,9 @@
-%
-% (c) The AQUA Project, Glasgow University, 1994-1998
-%
+{-
+(c) The AQUA Project, Glasgow University, 1994-1998
+
\section[LiberateCase]{Unroll recursion to allow evals to be lifted from a loop}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module LiberateCase ( liberateCase ) where
@@ -15,8 +15,8 @@ import CoreUnfold ( couldBeSmallEnoughToInline )
import Id
import VarEnv
import Util ( notNull )
-\end{code}
+{-
The liberate-case transformation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module walks over @Core@, and looks for @case@ on free variables.
@@ -111,13 +111,13 @@ Here, the level of @f@ is zero, the level of @g@ is one,
and the level of @h@ is zero (NB not one).
-%************************************************************************
-%* *
+************************************************************************
+* *
Top-level code
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
liberateCase :: DynFlags -> CoreProgram -> CoreProgram
liberateCase dflags binds = do_prog (initEnv dflags) binds
where
@@ -125,18 +125,18 @@ liberateCase dflags binds = do_prog (initEnv dflags) binds
do_prog env (bind:binds) = bind' : do_prog env' binds
where
(env', bind') = libCaseBind env bind
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Main payload
-%* *
-%************************************************************************
+* *
+************************************************************************
Bindings
~~~~~~~~
-\begin{code}
+-}
+
libCaseBind :: LibCaseEnv -> CoreBind -> (LibCaseEnv, CoreBind)
libCaseBind env (NonRec binder rhs)
@@ -164,8 +164,8 @@ libCaseBind env (Rec pairs)
= idArity id > 0 -- Note [Only functions!]
&& maybe True (\size -> couldBeSmallEnoughToInline (lc_dflags env) size rhs)
(bombOutSize env)
-\end{code}
+{-
Note [Need to localiseId in libCaseBind]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The call to localiseId is needed for two subtle reasons
@@ -191,8 +191,8 @@ rhs_small_enough call in the comprehension for env_rhs does.
Expressions
~~~~~~~~~~~
+-}
-\begin{code}
libCase :: LibCaseEnv
-> CoreExpr
-> CoreExpr
@@ -224,12 +224,12 @@ libCase env (Case scrut bndr ty alts)
libCaseAlt :: LibCaseEnv -> (AltCon, [CoreBndr], CoreExpr)
-> (AltCon, [CoreBndr], CoreExpr)
libCaseAlt env (con,args,rhs) = (con, args, libCase (addBinders env args) rhs)
-\end{code}
-
+{-
Ids
~~~
-\begin{code}
+-}
+
libCaseId :: LibCaseEnv -> Id -> CoreExpr
libCaseId env v
| Just the_bind <- lookupRecId env v -- It's a use of a recursive thing
@@ -253,8 +253,8 @@ freeScruts env rec_bind_lvl
, scrut_at_lvl > rec_bind_lvl]
-- Note [When to specialise]
-- Note [Avoiding fruitless liberate-case]
-\end{code}
+{-
Note [When to specialise]
~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
@@ -294,13 +294,13 @@ an occurrence of 'g', we want to check that there's a scruted-var v st
b) v's scrutinisation site is *inside* g
-%************************************************************************
-%* *
+************************************************************************
+* *
Utility functions
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
addBinders :: LibCaseEnv -> [CoreBndr] -> LibCaseEnv
addBinders env@(LibCaseEnv { lc_lvl = lvl, lc_lvl_env = lvl_env }) binders
= env { lc_lvl_env = lvl_env' }
@@ -342,22 +342,20 @@ lookupLevel env id
= case lookupVarEnv (lc_lvl_env env) id of
Just lvl -> lvl
Nothing -> topLevel
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
The environment
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type LibCaseLevel = Int
topLevel :: LibCaseLevel
topLevel = 0
-\end{code}
-\begin{code}
data LibCaseEnv
= LibCaseEnv {
lc_dflags :: DynFlags,
@@ -408,4 +406,3 @@ initEnv dflags
-- (passed in from cmd-line args)
bombOutSize :: LibCaseEnv -> Maybe Int
bombOutSize = liberateCaseThreshold . lc_dflags
-\end{code}
diff --git a/compiler/simplCore/OccurAnal.lhs b/compiler/simplCore/OccurAnal.hs
index ef212bca85..26aec9dcc7 100644
--- a/compiler/simplCore/OccurAnal.lhs
+++ b/compiler/simplCore/OccurAnal.hs
@@ -1,17 +1,16 @@
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
-%************************************************************************
-%* *
+************************************************************************
+* *
\section[OccurAnal]{Occurrence analysis pass}
-%* *
-%************************************************************************
+* *
+************************************************************************
The occurrence analyser re-typechecks a core expression, returning a new
core expression with (hopefully) improved usage information.
+-}
-\begin{code}
{-# LANGUAGE CPP, BangPatterns #-}
module OccurAnal (
@@ -41,18 +40,17 @@ import Util
import Outputable
import FastString
import Data.List
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[OccurAnal-main]{Counting occurrences: main function}
-%* *
-%************************************************************************
+* *
+************************************************************************
Here's the externally-callable interface:
+-}
-\begin{code}
occurAnalysePgm :: Module -- Used only in debug output
-> (Activation -> Bool)
-> [CoreRule] -> [CoreVect] -> VarSet
@@ -114,19 +112,18 @@ occurAnalyseExpr' enable_binder_swap expr
env = (initOccEnv all_active_rules) {occ_binder_swap = enable_binder_swap}
-- To be conservative, we say that all inlines and rules are active
all_active_rules = \_ -> True
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[OccurAnal-main]{Counting occurrences: main function}
-%* *
-%************************************************************************
+* *
+************************************************************************
Bindings
~~~~~~~~
+-}
-\begin{code}
occAnalBind :: OccEnv -- The incoming OccEnv
-> IdEnv IdSet -- Mapping from FVs of imported RULE LHSs to RHS FVs
-> CoreBind
@@ -177,8 +174,8 @@ occAnalRecBind env imp_rules_edges pairs body_usage
nodes :: [Node Details]
nodes = {-# SCC "occAnalBind.assoc" #-} map (makeNode env imp_rules_edges bndr_set) pairs
-\end{code}
+{-
Note [Dead code]
~~~~~~~~~~~~~~~~
Dropping dead code for a cyclic Strongly Connected Component is done
@@ -634,9 +631,8 @@ But watch out! If 'fs' is not chosen as a loop breaker, we may get an infinite
- now there's another opportunity to apply the RULE
This showed up when compiling Control.Concurrent.Chan.getChanContents.
+-}
-
-\begin{code}
type Node details = (details, Unique, [Unique]) -- The Ints are gotten from the Unique,
-- which is gotten from the Id.
data Details
@@ -793,8 +789,8 @@ occAnalRec (CyclicSCC nodes) (body_uds, binds)
| (ND { nd_bndr = b, nd_active_rule_fvs = rule_fvs },_,_) <- nodes
, let trimmed_rule_fvs = rule_fvs `intersectVarSet` bndr_set
, not (isEmptyVarSet trimmed_rule_fvs)]
-\end{code}
+{-
@loopBreakSCC@ is applied to the list of (binder,rhs) pairs for a cyclic
strongly connected component (there's guaranteed to be a cycle). It returns the
same pairs, but
@@ -809,8 +805,8 @@ Furthermore, the order of the binds is such that if we neglect dependencies
on the no-inline Ids then the binds are topologically sorted. This means
that the simplifier will generally do a good job if it works from top bottom,
recording inlinings for any Ids which aren't marked as "no-inline" as it goes.
+-}
-\begin{code}
type Binding = (Id,CoreExpr)
mk_loop_breaker :: Node Details -> Binding
@@ -944,8 +940,8 @@ reOrderNodes depth bndr_set weak_fvs (node : nodes) binds
is_con_app (Lam _ e) = is_con_app e
is_con_app (Tick _ e) = is_con_app e
is_con_app _ = False
-\end{code}
+{-
Note [Complexity of loop breaking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The loop-breaking algorithm knocks out one binder at a time, and
@@ -1067,9 +1063,8 @@ ToDo: try using the occurrence info for the inline'd binder.
[March 97] We do the same for atomic RHSs. Reason: see notes with loopBreakSCC.
[June 98, SLPJ] I've undone this change; I don't understand it. See notes with loopBreakSCC.
+-}
-
-\begin{code}
occAnalRecRhs :: OccEnv -> CoreExpr -- Rhs
-> (UsageDetails, CoreExpr)
-- Returned usage details covers only the RHS,
@@ -1111,8 +1106,8 @@ addIdOccs usage id_set = foldVarSet add usage id_set
-- b) We don't want to substitute a BIG expression inside a RULE
-- even if that's the only occurrence of the thing
-- (Same goes for INLINE.)
-\end{code}
+{-
Note [Cascading inlines]
~~~~~~~~~~~~~~~~~~~~~~~~
By default we use an rhsCtxt for the RHS of a binding. This tells the
@@ -1155,7 +1150,8 @@ for the various clauses.
Expressions
~~~~~~~~~~~
-\begin{code}
+-}
+
occAnal :: OccEnv
-> CoreExpr
-> (UsageDetails, -- Gives info only about the "interesting" Ids
@@ -1174,14 +1170,14 @@ occAnal env expr@(Var v) = (mkOneOcc env v False, expr)
occAnal _ (Coercion co)
= (addIdOccs emptyDetails (coVarsOfCo co), Coercion co)
-- See Note [Gather occurrences of coercion variables]
-\end{code}
+{-
Note [Gather occurrences of coercion variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to gather info about what coercion variables appear, so that
we can sort them into the right place when doing dependency analysis.
+-}
-\begin{code}
occAnal env (Tick tickish body)
| Breakpoint _ ids <- tickish
= (mapVarEnv markInsideSCC usage
@@ -1206,9 +1202,7 @@ occAnal env (Cast expr co)
-- then mark y as 'Many' so that we don't
-- immediately inline y again.
}
-\end{code}
-\begin{code}
occAnal env app@(App _ _)
= occAnalApp env (collectArgs app)
@@ -1286,7 +1280,7 @@ occAnal env (Let bind body)
(final_usage, mkLets new_binds body') }}
occAnalArgs :: OccEnv -> [CoreExpr] -> [OneShots] -> (UsageDetails, [CoreExpr])
-occAnalArgs _ [] _
+occAnalArgs _ [] _
= (emptyDetails, [])
occAnalArgs env (arg:args) one_shots
@@ -1299,8 +1293,8 @@ occAnalArgs env (arg:args) one_shots
case occAnal arg_env arg of { (uds1, arg') ->
case occAnalArgs env args one_shots' of { (uds2, args') ->
(uds1 +++ uds2, arg':args') }}}
-\end{code}
+{-
Applications are dealt with specially because we want
the "build hack" to work.
@@ -1315,8 +1309,8 @@ that y may be duplicated thereby.
If we aren't careful we duplicate the (expensive x) call!
Constructors are rather like lambdas in this way.
+-}
-\begin{code}
occAnalApp :: OccEnv
-> (Expr CoreBndr, [Arg CoreBndr])
-> (UsageDetails, Expr CoreBndr)
@@ -1371,8 +1365,8 @@ markManyIf :: Bool -- If this is true
-> UsageDetails
markManyIf True uds = mapVarEnv markMany uds
markManyIf False uds = uds
-\end{code}
+{-
Note [Use one-shot information]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The occurrrence analyser propagates one-shot-lambda information in two situation
@@ -1402,8 +1396,8 @@ Simplify.mkDupableAlt
In this example, though, the Simplifier will bring 'a' and 'b' back to
life, beause it binds 'y' to (a,b) (imagine got inlined and
scrutinised y).
+-}
-\begin{code}
occAnalAlt :: (OccEnv, Maybe (Id, CoreExpr))
-> CoreAlt
-> (UsageDetails, Alt IdWithOccInfo)
@@ -1440,16 +1434,15 @@ wrapAltRHS env (Just (scrut_var, let_rhs)) alt_usg bndrs alt_rhs
wrapAltRHS _ _ alt_usg _ alt_rhs
= (alt_usg, alt_rhs)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
OccEnv
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data OccEnv
= OccEnv { occ_encl :: !OccEncl -- Enclosing context information
, occ_one_shots :: !OneShots -- Tells about linearity
@@ -1502,16 +1495,16 @@ rhsCtxt :: OccEnv -> OccEnv
rhsCtxt env = env { occ_encl = OccRhs, occ_one_shots = [] }
argCtxt :: OccEnv -> [OneShots] -> (OccEnv, [OneShots])
-argCtxt env []
+argCtxt env []
= (env { occ_encl = OccVanilla, occ_one_shots = [] }, [])
-argCtxt env (one_shots:one_shots_s)
+argCtxt env (one_shots:one_shots_s)
= (env { occ_encl = OccVanilla, occ_one_shots = one_shots }, one_shots_s)
isRhsEnv :: OccEnv -> Bool
isRhsEnv (OccEnv { occ_encl = OccRhs }) = True
isRhsEnv (OccEnv { occ_encl = OccVanilla }) = False
-oneShotGroup :: OccEnv -> [CoreBndr]
+oneShotGroup :: OccEnv -> [CoreBndr]
-> ( OccEnv
, [CoreBndr] )
-- The result binders have one-shot-ness set that they might not have had originally.
@@ -1532,7 +1525,7 @@ oneShotGroup env@(OccEnv { occ_one_shots = ctxt }) bndrs
go ctxt (bndr:bndrs) rev_bndrs
| isId bndr
-
+
= case ctxt of
[] -> go [] bndrs (bndr : rev_bndrs)
(one_shot : ctxt) -> go ctxt bndrs (bndr': rev_bndrs)
@@ -1544,10 +1537,7 @@ oneShotGroup env@(OccEnv { occ_one_shots = ctxt }) bndrs
addAppCtxt :: OccEnv -> [Arg CoreBndr] -> OccEnv
addAppCtxt env@(OccEnv { occ_one_shots = ctxt }) args
= env { occ_one_shots = replicate (valArgCount args) OneShotLam ++ ctxt }
-\end{code}
-
-\begin{code}
transClosureFV :: UniqFM VarSet -> UniqFM VarSet
-- If (f,g), (g,h) are in the input, then (f,h) is in the output
-- as well as (f,g), (g,h)
@@ -1578,14 +1568,13 @@ extendFvs env s
extras :: VarSet -- env(s)
extras = foldUFM unionVarSet emptyVarSet $
intersectUFM_C (\x _ -> x) env s
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Binder swap
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Binder swap]
~~~~~~~~~~~~~~~~~~
@@ -1656,7 +1645,7 @@ When the scrutinee is a GlobalId we must take care in two ways
i) In order to *know* whether 'x' occurs free in the RHS, we need its
occurrence info. BUT, we don't gather occurrence info for
- GlobalIds. That's the reason for the (small) occ_gbl_scrut env in
+ GlobalIds. That's the reason for the (small) occ_gbl_scrut env in
OccEnv is for: it says "gather occurrence info for these".
ii) We must call localiseId on 'x' first, in case it's a GlobalId, or
@@ -1734,8 +1723,8 @@ binder-swap in OccAnal:
It's fixed by doing the binder-swap in OccAnal because we can do the
binder-swap unconditionally and still get occurrence analysis
information right.
+-}
-\begin{code}
mkAltEnv :: OccEnv -> CoreExpr -> Id -> (OccEnv, Maybe (Id, CoreExpr))
-- Does two things: a) makes the occ_one_shots = OccVanilla
-- b) extends the GlobalScruts if possible
@@ -1758,16 +1747,15 @@ mkAltEnv env@(OccEnv { occ_gbl_scrut = pe }) scrut case_bndr
-- new binding for it, and it might have an External Name, or
-- even be a GlobalId; Note [Binder swap on GlobalId scrutinees]
-- Also we don't want any INLINE or NOINLINE pragmas!
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[OccurAnal-types]{OccEnv}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type UsageDetails = IdEnv OccInfo -- A finite map from ids to their usage
-- INVARIANT: never IAmDead
-- (Deadness is signalled by not being in the map at all)
@@ -1835,16 +1823,15 @@ setBinderOcc usage bndr
| otherwise = setIdOccInfo bndr occ_info
where
occ_info = lookupVarEnv usage bndr `orElse` IAmDead
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Operations over OccInfo}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
mkOneOcc :: OccEnv -> Id -> InterestingCxt -> UsageDetails
mkOneOcc env id int_cxt
| isLocalId id
@@ -1882,4 +1869,3 @@ orOccInfo (OneOcc in_lam1 _ int_cxt1)
(int_cxt1 && int_cxt2)
orOccInfo a1 a2 = ASSERT( not (isDeadOcc a1 || isDeadOcc a2) )
NoOccInfo
-\end{code}
diff --git a/compiler/simplCore/SAT.lhs b/compiler/simplCore/SAT.hs
index bd5b718669..dc76df0e08 100644
--- a/compiler/simplCore/SAT.lhs
+++ b/compiler/simplCore/SAT.hs
@@ -1,12 +1,12 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
-%************************************************************************
+************************************************************************
Static Argument Transformation pass
-%************************************************************************
+************************************************************************
May be seen as removing invariants from loops:
Arguments of recursive functions that do not change in recursive
@@ -46,9 +46,8 @@ Geometric Mean +0.0% -0.2% -6.9%
The previous patch, to fix polymorphic floatout demand signatures, is
essential to make this work well!
+-}
-
-\begin{code}
{-# LANGUAGE CPP #-}
module SAT ( doStaticArgs ) where
@@ -72,17 +71,14 @@ import Data.List
import FastString
#include "HsVersions.h"
-\end{code}
-\begin{code}
doStaticArgs :: UniqSupply -> CoreProgram -> CoreProgram
doStaticArgs us binds = snd $ mapAccumL sat_bind_threaded_us us binds
where
sat_bind_threaded_us us bind =
let (us1, us2) = splitUniqSupply us
in (us1, fst $ runSAT us2 (satBind bind emptyUniqSet))
-\end{code}
-\begin{code}
+
-- We don't bother to SAT recursive groups since it can lead
-- to massive code expansion: see Andre Santos' thesis for details.
-- This means we only apply the actual SAT to Rec groups of one element,
@@ -111,8 +107,7 @@ satBind (Rec pairs) interesting_ids = do
rhss_SATed <- mapM (\e -> satTopLevelExpr e interesting_ids) rhss
let (rhss', sat_info_rhss') = unzip rhss_SATed
return (Rec (zipEqual "satBind" binders rhss'), mergeIdSATInfos sat_info_rhss')
-\end{code}
-\begin{code}
+
data App = VarApp Id | TypeApp Type | CoApp Coercion
data Staticness a = Static a | NotStatic
@@ -177,8 +172,7 @@ finalizeApp (Just (v, sat_info')) id_sat_info =
Nothing -> sat_info'
Just sat_info -> mergeSATInfo sat_info sat_info'
in extendVarEnv id_sat_info v sat_info''
-\end{code}
-\begin{code}
+
satTopLevelExpr :: CoreExpr -> IdSet -> SatM (CoreExpr, IdSATInfo)
satTopLevelExpr expr interesting_ids = do
(expr', sat_info_expr, expr_app) <- satExpr expr interesting_ids
@@ -249,15 +243,15 @@ satExpr co@(Coercion _) _ = do
satExpr (Cast expr coercion) interesting_ids = do
(expr', sat_info_expr, expr_app) <- satExpr expr interesting_ids
return (Cast expr' coercion, sat_info_expr, expr_app)
-\end{code}
-%************************************************************************
+{-
+************************************************************************
Static Argument Transformation Monad
-%************************************************************************
+************************************************************************
+-}
-\begin{code}
type SatM result = UniqSM result
runSAT :: UniqSupply -> SatM a -> a
@@ -265,14 +259,13 @@ runSAT = initUs_
newUnique :: SatM Unique
newUnique = getUniqueM
-\end{code}
-
-%************************************************************************
+{-
+************************************************************************
Static Argument Transformation Monad
-%************************************************************************
+************************************************************************
To do the transformation, the game plan is to:
@@ -368,8 +361,8 @@ GHC.Base.until =
Where sat_shadow has captured the type variables of x_a6X etc as it has a a_aiK
type argument. This is bad because it means the application sat_worker_s1aU x_a6X
is not well typed.
+-}
-\begin{code}
saTransformMaybe :: Id -> Maybe SATInfo -> [Id] -> CoreExpr -> SatM CoreBind
saTransformMaybe binder maybe_arg_staticness rhs_binders rhs_body
| Just arg_staticness <- maybe_arg_staticness
@@ -436,5 +429,3 @@ saTransform binder arg_staticness rhs_binders rhs_body
isStaticValue :: Staticness App -> Bool
isStaticValue (Static (VarApp _)) = True
isStaticValue _ = False
-
-\end{code}
diff --git a/compiler/simplCore/SetLevels.lhs b/compiler/simplCore/SetLevels.hs
index b8726d93a4..e7000409e7 100644
--- a/compiler/simplCore/SetLevels.lhs
+++ b/compiler/simplCore/SetLevels.hs
@@ -1,6 +1,6 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section{SetLevels}
***************************
@@ -40,8 +40,8 @@
The simplifier tries to get rid of occurrences of x, in favour of wild,
in the hope that there will only be one remaining occurrence of x, namely
the scrutinee of the case, and we can inline it.
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module SetLevels (
setLevels,
@@ -80,15 +80,15 @@ import UniqSupply
import Util
import Outputable
import FastString
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Level numbers}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type LevelledExpr = TaggedExpr FloatSpec
type LevelledBind = TaggedBind FloatSpec
type LevelledBndr = TaggedBndr FloatSpec
@@ -107,8 +107,8 @@ data FloatSpec
floatSpecLevel :: FloatSpec -> Level
floatSpecLevel (FloatMe l) = l
floatSpecLevel (StayPut l) = l
-\end{code}
+{-
The {\em level number} on a (type-)lambda-bound variable is the
nesting depth of the (type-)lambda which binds it. The outermost lambda
has level 1, so (Level 0 0) means that the variable is bound outside any lambda.
@@ -162,8 +162,8 @@ One particular case is that of workers: we don't want to float the
call to the worker outside the wrapper, otherwise the worker might get
inlined into the floated expression, and an importing module won't see
the worker at all.
+-}
-\begin{code}
instance Outputable FloatSpec where
ppr (FloatMe l) = char 'F' <> ppr l
ppr (StayPut l) = ppr l
@@ -199,16 +199,15 @@ instance Outputable Level where
instance Eq Level where
(Level maj1 min1) == (Level maj2 min2) = maj1 == maj2 && min1 == min2
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Main level-setting code}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
setLevels :: FloatOutSwitches
-> CoreProgram
-> UniqSupply
@@ -237,13 +236,13 @@ lvlTopBind env (Rec pairs)
(env', bndrs') = substAndLvlBndrs Recursive env tOP_LEVEL bndrs
rhss' <- mapM (lvlExpr env' . freeVars) rhss
return (Rec (bndrs' `zip` rhss'), env')
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Setting expression levels}
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Floating over-saturated applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -258,13 +257,13 @@ is minimal, and the extra local thunks allocated cost money.
Arguably we could float even class-op applications if they were going to
top level -- but then they must be applied to a constant dictionary and
will almost certainly be optimised away anyway.
+-}
-\begin{code}
lvlExpr :: LevelEnv -- Context
-> CoreExprWithFVs -- Input expression
-> LvlM LevelledExpr -- Result expression
-\end{code}
+{-
The @ctxt_lvl@ is, roughly, the level of the innermost enclosing
binder. Here's an example
@@ -279,8 +278,8 @@ don't want @lvlExpr@ to turn the scrutinee of the @case@ into an MFE
--- because it isn't a *maximal* free expression.
If there were another lambda in @r@'s rhs, it would get level-2 as well.
+-}
-\begin{code}
lvlExpr env (_, AnnType ty) = return (Type (substTy (le_subst env) ty))
lvlExpr env (_, AnnCoercion co) = return (Coercion (substCo (le_subst env) co))
lvlExpr env (_, AnnVar v) = return (lookupVar env v)
@@ -398,8 +397,8 @@ lvlCase env scrut_fvs scrut' case_bndr ty alts
; return (con, bs', rhs') }
where
(new_env, bs') = substAndLvlBndrs NonRecursive alts_env incd_lvl bs
-\end{code}
+{-
Note [Floating cases]
~~~~~~~~~~~~~~~~~~~~~
Consider this:
@@ -443,8 +442,8 @@ the inner case out, at least not unless x is also evaluated at its
binding site.
That's why we apply exprOkForSpeculation to scrut' and not to scrut.
+-}
-\begin{code}
lvlMFE :: Bool -- True <=> strict context [body of case or let]
-> LevelEnv -- Level of in-scope names/tyvars
-> CoreExprWithFVs -- input expression
@@ -516,8 +515,8 @@ lvlMFE strict_ctxt env ann_expr@(fvs, _)
--
-- Also a strict contxt includes uboxed values, and they
-- can't be bound at top level
-\end{code}
+{-
Note [Unlifted MFEs]
~~~~~~~~~~~~~~~~~~~~
We don't float unlifted MFEs, which potentially loses big opportunites.
@@ -566,8 +565,8 @@ Because in doing so we share a tiny bit of computation (the switch) but
in exchange we build a thunk, which is bad. This case reduces allocation
by 7% in spectral/puzzle (a rather strange benchmark) and 1.2% in real/fem.
Doesn't change any other allocation at all.
+-}
-\begin{code}
annotateBotStr :: Id -> Maybe (Arity, StrictSig) -> Id
-- See Note [Bottoming floats] for why we want to add
-- bottoming information right now
@@ -608,8 +607,8 @@ notWorthFloating e abs_vars
is_triv (_, AnnApp e (_, AnnType {})) = is_triv e
is_triv (_, AnnApp e (_, AnnCoercion {})) = is_triv e
is_triv _ = False
-\end{code}
+{-
Note [Floating literals]
~~~~~~~~~~~~~~~~~~~~~~~~
It's important to float Integer literals, so that they get shared,
@@ -663,15 +662,15 @@ OLD comment was:
to the condition above. We should really try this out.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Bindings}
-%* *
-%************************************************************************
+* *
+************************************************************************
The binding stuff works for top level too.
+-}
-\begin{code}
lvlBind :: LevelEnv
-> CoreBindWithFVs
-> LvlM (LevelledBind, LevelEnv)
@@ -789,16 +788,15 @@ lvlFloatRhs abs_vars dest_lvl env rhs
; return (mkLams abs_vars_w_lvls rhs') }
where
(rhs_env, abs_vars_w_lvls) = lvlLamBndrs env dest_lvl abs_vars
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Deciding floatability}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
substAndLvlBndrs :: RecFlag -> LevelEnv -> Level -> [InVar] -> (LevelEnv, [LevelledBndr])
substAndLvlBndrs is_rec env lvl bndrs
= lvlBndrs subst_env lvl subst_bndrs
@@ -847,9 +845,7 @@ lvlBndrs env@(LE { le_lvl_env = lvl_env }) new_lvl bndrs
where
lvld_bndrs = [TB bndr (StayPut new_lvl) | bndr <- bndrs]
add_lvl env v = extendVarEnv env v new_lvl
-\end{code}
-\begin{code}
-- Destination level is the max Id level of the expression
-- (We'll abstract the type variables, if any.)
destLevel :: LevelEnv -> VarSet
@@ -895,16 +891,15 @@ countFreeIds = foldVarSet add 0
add :: Var -> Int -> Int
add v n | isId v = n+1
| otherwise = n
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Free-To-Level Monad}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type InVar = Var -- Pre cloning
type InId = Id -- Pre cloning
type OutVar = Var -- Post cloning
@@ -1028,17 +1023,12 @@ abstractVars dest_lvl (LE { le_subst = subst, le_lvl_env = lvl_env }) in_fvs
close v = foldVarSet (unionVarSet . close)
(unitVarSet v)
(varTypeTyVars v)
-\end{code}
-\begin{code}
type LvlM result = UniqSM result
initLvl :: UniqSupply -> UniqSM a -> a
initLvl = initUs_
-\end{code}
-
-\begin{code}
newPolyBndrs :: Level -> LevelEnv -> [OutVar] -> [InId] -> UniqSM (LevelEnv, [OutId])
-- The envt is extended to bind the new bndrs to dest_lvl, but
-- the ctxt_lvl is unaffected
@@ -1109,8 +1099,8 @@ zap_demand_info :: Var -> Var
zap_demand_info v
| isId v = zapDemandIdInfo v
| otherwise = v
-\end{code}
+{-
Note [Zapping the demand info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
VERY IMPORTANT: we must zap the demand info if the thing is going to
@@ -1119,3 +1109,4 @@ binding site. Eg
f :: Int -> Int
f x = let v = 3*4 in v+x
Here v is strict; but if we float v to top level, it isn't any more.
+-}
diff --git a/compiler/simplCore/SimplCore.lhs b/compiler/simplCore/SimplCore.hs
index 883f2ef7f9..75766e8ef2 100644
--- a/compiler/simplCore/SimplCore.lhs
+++ b/compiler/simplCore/SimplCore.hs
@@ -1,9 +1,9 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[SimplCore]{Driver for simplifying @Core@ programs}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module SimplCore ( core2core, simplifyExpr ) where
@@ -55,15 +55,15 @@ import Control.Monad
import DynamicLoading ( loadPlugins )
import Plugins ( installCoreToDos )
#endif
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{The driver for the simplifier}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
core2core :: HscEnv -> ModGuts -> IO ModGuts
core2core hsc_env guts
= do { us <- mkSplitUniqSupply 's'
@@ -91,16 +91,15 @@ core2core hsc_env guts
-- _theoretically_ be changed during the Core pipeline (it's part of ModGuts), which
-- would mean our cached value would go out of date.
print_unqual = mkPrintUnqualified dflags (mg_rdr_env guts)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Generating the main optimisation pipeline
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
getCoreToDo :: DynFlags -> [CoreToDo]
getCoreToDo dflags
= core_todo
@@ -311,11 +310,9 @@ getCoreToDo dflags
maybe_rule_check (Phase 0)
]
-\end{code}
-Loading plugins
+-- Loading plugins
-\begin{code}
addPluginPasses :: [CoreToDo] -> CoreM [CoreToDo]
#ifndef GHCI
addPluginPasses builtin_passes = return builtin_passes
@@ -327,15 +324,15 @@ addPluginPasses builtin_passes
where
query_plug todos (_, plug, options) = installCoreToDos plug options todos
#endif
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
The CoreToDo interpreter
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
runCorePasses :: [CoreToDo] -> ModGuts -> CoreM ModGuts
runCorePasses passes guts
= foldM do_pass guts passes
@@ -395,15 +392,15 @@ doCorePass (CoreDoPluginPass _ pass) = {-# SCC "Plugin" #-} pass
#endif
doCorePass pass = pprPanic "doCorePass" (ppr pass)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Core pass combinators}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
printCore :: DynFlags -> CoreProgram -> IO ()
printCore dflags binds
= Err.dumpIfSet dflags True "Print Core" (pprCoreBindings binds)
@@ -467,16 +464,15 @@ observe do_pass = doPassM $ \binds -> do
dflags <- getDynFlags
_ <- liftIO $ do_pass dflags binds
return binds
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Gentle simplification
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
simplifyExpr :: DynFlags -- includes spec of what core-to-core passes to do
-> CoreExpr
-> IO CoreExpr
@@ -525,16 +521,15 @@ simplExprGently :: SimplEnv -> CoreExpr -> SimplM CoreExpr
simplExprGently env expr = do
expr1 <- simplExpr env (occurAnalyseExpr expr)
simplExpr env (occurAnalyseExpr expr1)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{The driver for the simplifier}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
simplifyPgm :: CoreToDo -> ModGuts -> CoreM ModGuts
simplifyPgm pass guts
= do { hsc_env <- getHscEnv
@@ -700,14 +695,13 @@ dump_end_iteration dflags print_unqual iteration_no counts binds rules
pp_counts = vcat [ ptext (sLit "---- Simplifier counts for") <+> hdr
, pprSimplCount counts
, ptext (sLit "---- End of simplifier counts for") <+> hdr ]
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Shorting out indirections
-%* *
-%************************************************************************
+* *
+************************************************************************
If we have this:
@@ -826,8 +820,8 @@ could be eliminated. But I don't think it's very common
and it's dangerous to do this fiddling in STG land
because we might elminate a binding that's mentioned in the
unfolding for something.
+-}
-\begin{code}
type IndEnv = IdEnv Id -- Maps local_id -> exported_id
shortOutIndirections :: CoreProgram -> CoreProgram
@@ -920,4 +914,3 @@ transferIdInfo exported_id local_id
(specInfo local_info)
-- Remember to set the function-name field of the
-- rules as we transfer them from one function to another
-\end{code}
diff --git a/compiler/simplCore/SimplEnv.lhs b/compiler/simplCore/SimplEnv.hs
index d8aec03b03..a5d8551a3a 100644
--- a/compiler/simplCore/SimplEnv.lhs
+++ b/compiler/simplCore/SimplEnv.hs
@@ -1,9 +1,9 @@
-%
-% (c) The AQUA Project, Glasgow University, 1993-1998
-%
+{-
+(c) The AQUA Project, Glasgow University, 1993-1998
+
\section[SimplMonad]{The simplifier Monad}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module SimplEnv (
@@ -61,15 +61,15 @@ import FastString
import Util
import Data.List
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Simplify-types]{Type declarations}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
type InBndr = CoreBndr
type InVar = Var -- Not yet cloned
type InId = Id -- Not yet cloned
@@ -90,16 +90,15 @@ type OutBind = CoreBind
type OutExpr = CoreExpr
type OutAlt = CoreAlt
type OutArg = CoreArg
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{The @SimplEnv@ type}
-%* *
-%************************************************************************
-
+* *
+************************************************************************
+-}
-\begin{code}
data SimplEnv
= SimplEnv {
----------- Static part of the environment -----------
@@ -159,8 +158,8 @@ instance Outputable SimplSR where
-- fvs = exprFreeVars e
-- filter_env env = filterVarEnv_Directly keep env
-- keep uniq _ = uniq `elemUFM_Directly` fvs
-\end{code}
+{-
Note [SimplEnv invariants]
~~~~~~~~~~~~~~~~~~~~~~~~~~
seInScope:
@@ -224,9 +223,8 @@ seIdSubst:
map to the same target: x->x, y->x. Notably:
case y of x { ... }
That's why the "set" is actually a VarEnv Var
+-}
-
-\begin{code}
mkSimplEnv :: SimplifierMode -> SimplEnv
mkSimplEnv mode
= SimplEnv { seMode = mode
@@ -240,8 +238,8 @@ mkSimplEnv mode
init_in_scope :: InScopeSet
init_in_scope = mkInScopeSet (unitVarSet (mkWildValBinder unitTy))
-- See Note [WildCard binders]
-\end{code}
+{-
Note [WildCard binders]
~~~~~~~~~~~~~~~~~~~~~~~
The program to be simplified may have wild binders
@@ -259,8 +257,8 @@ thing. Generally, you want to run the simplifier to get rid of the
wild-ids before doing much else.
It's a very dark corner of GHC. Maybe it should be cleaned up.
+-}
-\begin{code}
getMode :: SimplEnv -> SimplifierMode
getMode env = seMode env
@@ -330,15 +328,13 @@ setSubstEnv env tvs cvs ids = env { seTvSubst = tvs, seCvSubst = cvs, seIdSubst
mkContEx :: SimplEnv -> InExpr -> SimplSR
mkContEx (SimplEnv { seTvSubst = tvs, seCvSubst = cvs, seIdSubst = ids }) e = ContEx tvs cvs ids e
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Floats}
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Simplifier floats]
~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -359,8 +355,8 @@ Examples
Can't happen:
NonRec x# (a /# b) -- Might fail; does not satisfy let/app
NonRec x# (f y) -- Might diverge; does not satisfy let/app
+-}
-\begin{code}
data Floats = Floats (OrdList OutBind) FloatFlag
-- See Note [Simplifier floats]
@@ -399,25 +395,25 @@ doFloatFromRhs :: TopLevelFlag -> RecFlag -> Bool -> OutExpr -> SimplEnv -> Bool
doFloatFromRhs lvl rec str rhs (SimplEnv {seFloats = Floats fs ff})
= not (isNilOL fs) && want_to_float && can_float
where
- want_to_float = isTopLevel lvl || exprIsCheap rhs || exprIsExpandable rhs
+ want_to_float = isTopLevel lvl || exprIsCheap rhs || exprIsExpandable rhs
-- See Note [Float when cheap or expandable]
can_float = case ff of
FltLifted -> True
FltOkSpec -> isNotTopLevel lvl && isNonRec rec
FltCareful -> isNotTopLevel lvl && isNonRec rec && str
-\end{code}
+{-
Note [Float when cheap or expandable]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We want to float a let from a let if the residual RHS is
a) cheap, such as (\x. blah)
b) expandable, such as (f b) if f is CONLIKE
-But there are
+But there are
- cheap things that are not expandable (eg \x. expensive)
- expandable things that are not cheap (eg (f b) where b is CONLIKE)
so we must take the 'or' of the two.
+-}
-\begin{code}
emptyFloats :: Floats
emptyFloats = Floats nilOL FltLifted
@@ -489,8 +485,8 @@ getFloatBinds (SimplEnv {seFloats = Floats bs _})
isEmptyFloats :: SimplEnv -> Bool
isEmptyFloats (SimplEnv {seFloats = Floats bs _})
= isNilOL bs
-\end{code}
+{-
-- mapFloats commented out: used only in a commented-out bit of Simplify,
-- concerning ticks
--
@@ -502,11 +498,11 @@ isEmptyFloats (SimplEnv {seFloats = Floats bs _})
-- app (Rec bs) = Rec (map fun bs)
-%************************************************************************
-%* *
+************************************************************************
+* *
Substitution of Vars
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Global Ids in the substitution]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -518,8 +514,8 @@ for a LocalId version of g (with the same unique though):
... case X.g_34 of { (p,q) -> ...} ... }
So we want to look up the inner X.g_34 in the substitution, where we'll
find that it has been substituted by b. (Or conceivably cloned.)
+-}
-\begin{code}
substId :: SimplEnv -> InId -> SimplSR
-- Returns DoneEx only on a non-Var expression
substId (SimplEnv { seInScope = in_scope, seIdSubst = ids }) v
@@ -547,19 +543,18 @@ lookupRecBndr (SimplEnv { seInScope = in_scope, seIdSubst = ids }) v
Just (DoneId v) -> v
Just _ -> pprPanic "lookupRecBndr" (ppr v)
Nothing -> refine in_scope v
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\section{Substituting an Id binder}
-%* *
-%************************************************************************
+* *
+************************************************************************
These functions are in the monad only so that they can be made strict via seq.
+-}
-\begin{code}
simplBinders, simplLamBndrs
:: SimplEnv -> [InBndr] -> SimplM (SimplEnv, [OutBndr])
simplBinders env bndrs = mapAccumLM simplBinder env bndrs
@@ -656,9 +651,7 @@ substNonCoVarIdBndr env@(SimplEnv { seInScope = in_scope, seIdSubst = id_subst }
= extendVarEnv id_subst old_id (DoneId new_id)
| otherwise
= delVarEnv id_subst old_id
-\end{code}
-\begin{code}
------------------------------------
seqTyVar :: TyVar -> ()
seqTyVar b = b `seq` ()
@@ -671,9 +664,8 @@ seqId id = seqType (idType id) `seq`
seqIds :: [Id] -> ()
seqIds [] = ()
seqIds (id:ids) = seqId id `seq` seqIds ids
-\end{code}
-
+{-
Note [Arity robustness]
~~~~~~~~~~~~~~~~~~~~~~~
We *do* transfer the arity from from the in_id of a let binding to the
@@ -719,9 +711,8 @@ cases where he really, really wanted a RULE for a recursive function
to apply in that function's own right-hand side.
See Note [Loop breaking and RULES] in OccAnal.
+-}
-
-\begin{code}
addBndrRules :: SimplEnv -> InBndr -> OutBndr -> (SimplEnv, OutBndr)
-- Rules are added back into the bin
addBndrRules env in_id out_id
@@ -732,16 +723,15 @@ addBndrRules env in_id out_id
old_rules = idSpecialisation in_id
new_rules = CoreSubst.substSpec subst out_id old_rules
final_id = out_id `setIdSpecialisation` new_rules
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Impedence matching to type substitution
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
getTvSubst :: SimplEnv -> TvSubst
getTvSubst (SimplEnv { seInScope = in_scope, seTvSubst = tv_env })
= mkTvSubst in_scope tv_env
@@ -813,5 +803,3 @@ substUnfolding :: SimplEnv -> Unfolding -> Unfolding
substUnfolding env unf = CoreSubst.substUnfolding (mkCoreSubst (text "subst-unfolding") env) unf
-- Do *not* short-cut in the case of an empty substitution
-- See Note [SimplEnv invariants]
-\end{code}
-
diff --git a/compiler/simplCore/SimplMonad.lhs b/compiler/simplCore/SimplMonad.hs
index ca14688583..451bf34f7c 100644
--- a/compiler/simplCore/SimplMonad.lhs
+++ b/compiler/simplCore/SimplMonad.hs
@@ -1,9 +1,9 @@
-%
-% (c) The AQUA Project, Glasgow University, 1993-1998
-%
+{-
+(c) The AQUA Project, Glasgow University, 1993-1998
+
\section[SimplMonad]{The simplifier Monad}
+-}
-\begin{code}
module SimplMonad (
-- The monad
SimplM,
@@ -31,18 +31,18 @@ import FastString
import MonadUtils
import ErrUtils
import Control.Monad ( when, liftM, ap )
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Monad plumbing}
-%* *
-%************************************************************************
+* *
+************************************************************************
For the simplifier monad, we want to {\em thread} a unique supply and a counter.
(Command-line switches move around through the explicitly-passed SimplEnv.)
+-}
-\begin{code}
newtype SimplM result
= SM { unSM :: SimplTopEnv -- Envt that does not change much
-> UniqSupply -- We thread the unique supply because
@@ -57,9 +57,7 @@ data SimplTopEnv
-- Zero means infinity!
, st_rules :: RuleBase
, st_fams :: (FamInstEnv, FamInstEnv) }
-\end{code}
-\begin{code}
initSmpl :: DynFlags -> RuleBase -> (FamInstEnv, FamInstEnv)
-> UniqSupply -- No init count; set to 0
-> Int -- Size of the bindings, used to limit
@@ -136,19 +134,18 @@ thenSmpl_ m k
traceSmpl :: String -> SDoc -> SimplM ()
traceSmpl herald doc
= do { dflags <- getDynFlags
- ; when (dopt Opt_D_dump_simpl_trace dflags) $ liftIO $
+ ; when (dopt Opt_D_dump_simpl_trace dflags) $ liftIO $
printInfoForUser dflags alwaysQualify $
hang (text herald) 2 doc }
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{The unique supply}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
instance MonadUnique SimplM where
getUniqueSupplyM
= SM (\_st_env us sc -> case splitUniqSupply us of
@@ -179,16 +176,15 @@ getFamEnvs = SM (\st_env us sc -> return (st_fams st_env, us, sc))
newId :: FastString -> Type -> SimplM Id
newId fs ty = do uniq <- getUniqueM
return (mkSysLocal fs uniq ty)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Counting up what we've done}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
getSimplCount :: SimplM SimplCount
getSimplCount = SM (\_st_env us sc -> return (sc, us, sc))
@@ -220,4 +216,3 @@ freeTick :: Tick -> SimplM ()
freeTick t
= SM (\_st_env us sc -> let sc' = doFreeSimplTick t sc
in sc' `seq` return ((), us, sc'))
-\end{code}
diff --git a/compiler/simplCore/SimplUtils.lhs b/compiler/simplCore/SimplUtils.hs
index 1cfba43c5e..eec0f4b230 100644
--- a/compiler/simplCore/SimplUtils.lhs
+++ b/compiler/simplCore/SimplUtils.hs
@@ -1,9 +1,9 @@
-%
-% (c) The AQUA Project, Glasgow University, 1993-1998
-%
+{-
+(c) The AQUA Project, Glasgow University, 1993-1998
+
\section[SimplUtils]{The simplifier utilities}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module SimplUtils (
@@ -17,16 +17,16 @@ module SimplUtils (
simplEnvForGHCi, updModeForStableUnfoldings,
-- The continuation type
- SimplCont(..), DupFlag(..),
+ SimplCont(..), DupFlag(..),
isSimplified,
contIsDupable, contResultType, contInputType,
contIsTrivial, contArgs, dropArgs,
- pushSimplifiedArgs, countValArgs, countArgs,
+ pushSimplifiedArgs, countValArgs, countArgs,
mkBoringStop, mkRhsStop, mkLazyArgStop, contIsRhsOrArg,
- interestingCallContext, interestingArg,
+ interestingCallContext, interestingArg,
-- ArgInfo
- ArgInfo(..), ArgSpec(..), mkArgInfo, addArgTo, addCastTo,
+ ArgInfo(..), ArgSpec(..), mkArgInfo, addArgTo, addCastTo,
argInfoExpr, argInfoValArgs,
abstractFloats
@@ -62,14 +62,13 @@ import FastString
import Pair
import Control.Monad ( when )
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
The SimplCont type
-%* *
-%************************************************************************
+* *
+************************************************************************
A SimplCont allows the simplifier to traverse the expression in a
zipper-like fashion. The SimplCont represents the rest of the expression,
@@ -90,8 +89,8 @@ Key points:
* A SimplCont describes a context that *does not* bind
any variables. E.g. \x. [] is not a SimplCont
+-}
-\begin{code}
data SimplCont
= Stop -- An empty context, or <hole>
OutType -- Type of the <hole>
@@ -210,8 +209,8 @@ instance Outputable DupFlag where
ppr OkToDup = ptext (sLit "ok")
ppr NoDup = ptext (sLit "nodup")
ppr Simplified = ptext (sLit "simpl")
-\end{code}
+{-
Note [DupFlag invariants]
~~~~~~~~~~~~~~~~~~~~~~~~~
In both (ApplyTo dup _ env k)
@@ -221,8 +220,8 @@ the following invariants hold
(a) if dup = OkToDup, then continuation k is also ok-to-dup
(b) if dup = OkToDup or Simplified, the subst-env is empty
(and and hence no need to re-simplify)
+-}
-\begin{code}
-------------------
mkBoringStop :: OutType -> SimplCont
mkBoringStop ty = Stop ty BoringCtxt
@@ -297,7 +296,7 @@ countArgs _ = 0
contArgs :: SimplCont -> (Bool, [ArgSummary], SimplCont)
-- Summarises value args, discards type args and coercions
--- The returned continuation of the call is only used to
+-- The returned continuation of the call is only used to
-- answer questions like "are you interesting?"
contArgs cont
| lone cont = (True, [], cont)
@@ -326,9 +325,8 @@ dropArgs :: Int -> SimplCont -> SimplCont
dropArgs 0 cont = cont
dropArgs n (ApplyTo _ _ _ cont) = dropArgs (n-1) cont
dropArgs n other = pprPanic "dropArgs" (ppr n <+> ppr other)
-\end{code}
-
+{-
Note [Interesting call context]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We want to avoid inlining an expression where there can't possibly be
@@ -361,9 +359,8 @@ since we can just eliminate this case instead (x is in WHNF). Similar
applies when x is bound to a lambda expression. Hence
contIsInteresting looks for case expressions with just a single
default case.
+-}
-
-\begin{code}
interestingCallContext :: SimplCont -> CallCtxt
-- See Note [Interesting call context]
interestingCallContext cont
@@ -511,14 +508,13 @@ interestingArgContext rules call_cont
interesting RuleArgCtxt = True
interesting _ = False
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
SimplifierMode
-%* *
-%************************************************************************
+* *
+************************************************************************
The SimplifierMode controls several switches; see its definition in
CoreMonad
@@ -526,8 +522,8 @@ CoreMonad
sm_inline :: Bool -- Whether inlining is enabled
sm_case_case :: Bool -- Whether case-of-case is enabled
sm_eta_expand :: Bool -- Whether eta-expansion is enabled
+-}
-\begin{code}
simplEnvForGHCi :: DynFlags -> SimplEnv
simplEnvForGHCi dflags
= mkSimplEnv $ SimplMode { sm_names = ["GHCi"]
@@ -553,8 +549,8 @@ updModeForStableUnfoldings inline_rule_act current_mode
where
phaseFromActivation (ActiveAfter n) = Phase n
phaseFromActivation _ = InitialPhase
-\end{code}
+{-
Note [Inlining in gentle mode]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Something is inlined if
@@ -670,8 +666,8 @@ the wrapper (initially, the worker's only call site!). But,
if the wrapper is sure to be called, the strictness analyser will
mark it 'demanded', so when the RHS is simplified, it'll get an ArgOf
continuation.
+-}
-\begin{code}
activeUnfolding :: SimplEnv -> Id -> Bool
activeUnfolding env
| not (sm_inline mode) = active_unfolding_minimal
@@ -733,15 +729,13 @@ activeRule env
| otherwise = isActive (sm_phase mode)
where
mode = getMode env
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
preInlineUnconditionally
-%* *
-%************************************************************************
+* *
+************************************************************************
preInlineUnconditionally
~~~~~~~~~~~~~~~~~~~~~~~~
@@ -851,8 +845,8 @@ Note [Do not inline CoVars unconditionally]
Coercion variables appear inside coercions, and the RHS of a let-binding
is a term (not a coercion) so we can't necessarily inline the latter in
the former.
+-}
-\begin{code}
preInlineUnconditionally :: DynFlags -> SimplEnv -> TopLevelFlag -> InId -> InExpr -> Bool
-- Precondition: rhs satisfies the let/app invariant
-- See Note [CoreSyn let/app invariant] in CoreSyn
@@ -922,13 +916,12 @@ preInlineUnconditionally dflags env top_lvl bndr rhs
-- top level things, but then we become more leery about inlining
-- them.
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
postInlineUnconditionally
-%* *
-%************************************************************************
+* *
+************************************************************************
postInlineUnconditionally
~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -957,8 +950,8 @@ it's best to inline it anyway. We often get a=E; b=a from desugaring,
with both a and b marked NOINLINE. But that seems incompatible with
our new view that inlining is like a RULE, so I'm sticking to the 'active'
story for now.
+-}
-\begin{code}
postInlineUnconditionally
:: DynFlags -> SimplEnv -> TopLevelFlag
-> OutId -- The binder (an InId would be fine too)
@@ -1041,8 +1034,8 @@ postInlineUnconditionally dflags env top_lvl bndr occ_info rhs unfolding
where
active = isActive (sm_phase (getMode env)) (idInlineActivation bndr)
-- See Note [pre/postInlineUnconditionally in gentle mode]
-\end{code}
+{-
Note [Top level and postInlineUnconditionally]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We don't do postInlineUnconditionally for top-level things (even for
@@ -1089,13 +1082,13 @@ won't inline because 'e' is too big.
c.f. Note [Stable unfoldings and preInlineUnconditionally]
-%************************************************************************
-%* *
+************************************************************************
+* *
Rebuilding a lambda
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
mkLam :: [OutBndr] -> OutExpr -> SimplCont -> SimplM OutExpr
-- mkLam tries three things
-- a) eta reduction, if that gives a trivial expression
@@ -1138,9 +1131,8 @@ mkLam bndrs body cont
| otherwise
= return (mkLams bndrs body)
-\end{code}
-
+{-
Note [Eta expanding lambdas]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general we *do* want to eta-expand lambdas. Consider
@@ -1191,13 +1183,13 @@ It does not make sense to transform
/\g. e `cast` g ==> (/\g.e) `cast` (/\g.g)
because the latter is not well-kinded.
-%************************************************************************
-%* *
+************************************************************************
+* *
Eta expansion
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
tryEtaExpandRhs :: SimplEnv -> OutId -> OutExpr -> SimplM (Arity, OutExpr)
-- See Note [Eta-expanding at let bindings]
tryEtaExpandRhs env bndr rhs
@@ -1226,8 +1218,8 @@ tryEtaExpandRhs env bndr rhs
old_arity = exprArity rhs -- See Note [Do not expand eta-expand PAPs]
old_id_arity = idArity bndr
-\end{code}
+{-
Note [Eta-expanding at let bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We now eta expand at let-bindings, which is where the payoff comes.
@@ -1256,7 +1248,7 @@ Note [Do not eta-expand PAPs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We used to have old_arity = manifestArity rhs, which meant that we
would eta-expand even PAPs. But this gives no particular advantage,
-and can lead to a massive blow-up in code size, exhibited by Trac #9020.
+and can lead to a massive blow-up in code size, exhibited by Trac #9020.
Suppose we have a PAP
foo :: IO ()
foo = returnIO ()
@@ -1276,11 +1268,11 @@ Does it matter not eta-expanding such functions? I'm not sure. Perhaps
strictness analysis will have less to bite on?
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Floating lets out of big lambdas}
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Floating and type abstraction]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1356,9 +1348,8 @@ as we would normally do.
That's why the whole transformation is part of the same process that
floats let-bindings and constructor arguments out of RHSs. In particular,
it is guarded by the doFloatFromRhs call in simplLazyBind.
+-}
-
-\begin{code}
abstractFloats :: [OutTyVar] -> SimplEnv -> OutExpr -> SimplM ([OutBind], OutExpr)
abstractFloats main_tvs body_env body
= ASSERT( notNull body_floats )
@@ -1437,8 +1428,8 @@ abstractFloats main_tvs body_env body
-- where x* has an INLINE prag on it. Now, once x* is inlined,
-- the occurrences of x' will be just the occurrences originally
-- pinned on x.
-\end{code}
+{-
Note [Abstract over coercions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If a coercion variable (g :: a ~ Int) is free in the RHS, then so is the
@@ -1468,11 +1459,11 @@ Historical note: if you use let-bindings instead of a substitution, beware of th
-- to appear many times. (NB: mkInlineMe eliminates
-- such notes on trivial RHSs, so do it manually.)
-%************************************************************************
-%* *
+************************************************************************
+* *
prepareAlts
-%* *
-%************************************************************************
+* *
+************************************************************************
prepareAlts tries these things:
@@ -1515,8 +1506,8 @@ h y = case y of
If we inline h into f, the default case of the inlined h can't happen.
If we don't notice this, we may end up filtering out *all* the cases
of the inner case y, which give us nowhere to go!
+-}
-\begin{code}
prepareAlts :: OutExpr -> OutId -> [InAlt] -> SimplM ([AltCon], [InAlt])
-- The returned alternatives can be empty, none are possible
prepareAlts scrut case_bndr' alts
@@ -1524,18 +1515,18 @@ prepareAlts scrut case_bndr' alts
-- OutId, it has maximum information; this is important.
-- Test simpl013 is an example
= do { us <- getUniquesM
- ; let (imposs_deflt_cons, refined_deflt, alts')
+ ; let (imposs_deflt_cons, refined_deflt, alts')
= filterAlts us (varType case_bndr') imposs_cons alts
; when refined_deflt $ tick (FillInCaseDefault case_bndr')
-
+
; alts'' <- combineIdenticalAlts case_bndr' alts'
; return (imposs_deflt_cons, alts'') }
where
imposs_cons = case scrut of
Var v -> otherCons (idUnfolding v)
_ -> []
-\end{code}
+{-
Note [Combine identical alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If several alternatives are identical, merge them into
@@ -1578,8 +1569,8 @@ NB: it's important that all this is done in [InAlt], *before* we work
on the alternatives themselves, because Simpify.simplAlt may zap the
occurrence info on the binders in the alternatives, which in turn
defeats combineIdenticalAlts (see Trac #7360).
+-}
-\begin{code}
combineIdenticalAlts :: OutId -> [InAlt] -> SimplM [InAlt]
-- See Note [Combine identical alternatives]
combineIdenticalAlts case_bndr ((_con1,bndrs1,rhs1) : con_alts)
@@ -1592,14 +1583,13 @@ combineIdenticalAlts case_bndr ((_con1,bndrs1,rhs1) : con_alts)
identical_to_alt1 (_con,bndrs,rhs) = all isDeadBinder bndrs && rhs `cheapEqExpr` rhs1
combineIdenticalAlts _ alts = return alts
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
mkCase
-%* *
-%************************************************************************
+* *
+************************************************************************
mkCase tries these things
@@ -1628,9 +1618,8 @@ mkCase tries these things
False -> False
and similar friends.
+-}
-
-\begin{code}
mkCase, mkCase1, mkCase2
:: DynFlags
-> OutExpr -> OutId
@@ -1720,8 +1709,8 @@ mkCase1 dflags scrut bndr alts_ty alts = mkCase2 dflags scrut bndr alts_ty alts
--------------------------------------------------
mkCase2 _dflags scrut bndr alts_ty alts
= return (Case scrut bndr alts_ty alts)
-\end{code}
+{-
Note [Dead binders]
~~~~~~~~~~~~~~~~~~~~
Note that dead-ness is maintained by the simplifier, so that it is
@@ -1787,5 +1776,4 @@ without getting changed to c1=I# c2.
I don't think this is worth fixing, even if I knew how. It'll
all come out in the next pass anyway.
-
-
+-}
diff --git a/compiler/simplCore/Simplify.lhs b/compiler/simplCore/Simplify.hs
index cc55529906..7611f56a4b 100644
--- a/compiler/simplCore/Simplify.lhs
+++ b/compiler/simplCore/Simplify.hs
@@ -1,9 +1,9 @@
-%
-% (c) The AQUA Project, Glasgow University, 1993-1998
-%
+{-
+(c) The AQUA Project, Glasgow University, 1993-1998
+
\section[Simplify]{The main module of the simplifier}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module Simplify ( simplTopBinds, simplExpr ) where
@@ -49,9 +49,8 @@ import FastString
import Pair
import Util
import ErrUtils
-\end{code}
-
+{-
The guts of the simplifier is in this module, but the driver loop for
the simplifier is in SimplCore.lhs.
@@ -205,13 +204,13 @@ we should eta expand wherever we find a (value) lambda? Then the eta
expansion at a let RHS can concentrate solely on the PAP case.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Bindings}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
simplTopBinds :: SimplEnv -> [InBind] -> SimplM SimplEnv
simplTopBinds env0 binds0
@@ -238,19 +237,18 @@ simplTopBinds env0 binds0
simpl_bind env (NonRec b r) = simplRecOrTopPair env' TopLevel NonRecursive b b' r
where
(env', b') = addBndrRules env b (lookupRecBndr env b)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Lazy bindings}
-%* *
-%************************************************************************
+* *
+************************************************************************
simplRecBind is used for
* recursive bindings only
+-}
-\begin{code}
simplRecBind :: SimplEnv -> TopLevelFlag
-> [(InId, InExpr)]
-> SimplM SimplEnv
@@ -272,15 +270,15 @@ simplRecBind env0 top_lvl pairs0
go env ((old_bndr, new_bndr, rhs) : pairs)
= do { env' <- simplRecOrTopPair env top_lvl Recursive old_bndr new_bndr rhs
; go env' pairs }
-\end{code}
+{-
simplOrTopPair is used for
* recursive bindings (whether top level or not)
* top-level non-recursive bindings
It assumes the binder has already been simplified, but not its IdInfo.
+-}
-\begin{code}
simplRecOrTopPair :: SimplEnv
-> TopLevelFlag -> RecFlag
-> InId -> OutBndr -> InExpr -- Binder and rhs
@@ -302,9 +300,8 @@ simplRecOrTopPair env top_lvl is_rec old_bndr new_bndr rhs
= pprTrace "SimplBind" (ppr old_bndr) thing_inside
-- trace_bind emits a trace for each top-level binding, which
-- helps to locate the tracing for inlining and rule firing
-\end{code}
-
+{-
simplLazyBind is used for
* [simplRecOrTopPair] recursive bindings (whether top level or not)
* [simplRecOrTopPair] top-level non-recursive bindings
@@ -318,8 +315,8 @@ Nota bene:
3. It does not check for pre-inline-unconditionally;
that should have been done already.
+-}
-\begin{code}
simplLazyBind :: SimplEnv
-> TopLevelFlag -> RecFlag
-> InId -> OutId -- Binder, both pre-and post simpl
@@ -368,12 +365,12 @@ simplLazyBind env top_lvl is_rec bndr bndr1 rhs rhs_se
; return (env', rhs') }
; completeBind env' top_lvl bndr bndr1 rhs' }
-\end{code}
+{-
A specialised variant of simplNonRec used when the RHS is already simplified,
notably in knownCon. It uses case-binding where necessary.
+-}
-\begin{code}
simplNonRecX :: SimplEnv
-> InId -- Old binder
-> OutExpr -- Simplified RHS
@@ -409,8 +406,8 @@ completeNonRecX top_lvl env is_strict old_bndr new_bndr new_rhs
; return (addFloats env env1, rhs1) } -- Add the floats to the main env
else return (env, wrapFloats env1 rhs1) -- Wrap the floats around the RHS
; completeBind env2 NotTopLevel old_bndr new_bndr rhs2 }
-\end{code}
+{-
{- No, no, no! Do not try preInlineUnconditionally in completeNonRecX
Doing so risks exponential behaviour, because new_rhs has been simplified once already
In the cases described by the folowing commment, postInlineUnconditionally will
@@ -451,8 +448,8 @@ We also want to deal well cases like this
Here we want to make e1,e2 trivial and get
x1 = e1; x2 = e2; v = (f x1 `cast` co) v2
That's what the 'go' loop in prepareRhs does
+-}
-\begin{code}
prepareRhs :: TopLevelFlag -> SimplEnv -> OutId -> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Adds new floats to the env iff that allows us to return a good RHS
prepareRhs top_lvl env id (Cast rhs co) -- Note [Float coercions]
@@ -491,9 +488,8 @@ prepareRhs top_lvl env0 _ rhs0
go _ env other
= return (False, env, other)
-\end{code}
-
+{-
Note [Float coercions]
~~~~~~~~~~~~~~~~~~~~~~
When we find the binding
@@ -542,9 +538,8 @@ But 'v' isn't in scope!
These strange casts can happen as a result of case-of-case
bar = case (case x of { T -> (# 2,3 #); F -> error "urk" }) of
(# p,q #) -> p+q
+-}
-
-\begin{code}
makeTrivialArg :: SimplEnv -> ArgSpec -> SimplM (SimplEnv, ArgSpec)
makeTrivialArg env (ValArg e) = do { (env', e') <- makeTrivial NotTopLevel env e
; return (env', ValArg e') }
@@ -589,8 +584,8 @@ bindingOk :: TopLevelFlag -> CoreExpr -> Type -> Bool
bindingOk top_lvl _ expr_ty
| isTopLevel top_lvl = not (isUnLiftedType expr_ty)
| otherwise = True
-\end{code}
+{-
Note [Cannot trivialise]
~~~~~~~~~~~~~~~~~~~~~~~~
Consider tih
@@ -613,11 +608,11 @@ trivial):
We don't want to ANF-ise this.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Completing a lazy binding}
-%* *
-%************************************************************************
+* *
+************************************************************************
completeBind
* deals only with Ids, not TyVars
@@ -637,8 +632,8 @@ It does *not* attempt to do let-to-case. Why? Because it is used for
(so let-to-case is inappropriate).
Nor does it do the atomic-argument thing
+-}
-\begin{code}
completeBind :: SimplEnv
-> TopLevelFlag -- Flag stuck into unfolding
-> InId -- Old binder
@@ -782,8 +777,8 @@ simplUnfolding env top_lvl id new_rhs unf
act = idInlineActivation id
rule_env = updMode (updModeForStableUnfoldings act) env
-- See Note [Simplifying inside stable unfoldings] in SimplUtils
-\end{code}
+{-
Note [Force bottoming field]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to force bottoming, or the new unfolding holds
@@ -845,11 +840,11 @@ After inlining f at some of its call sites the original binding may
The solution here is a bit ad hoc...
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection[Simplify-simplExpr]{The main function: simplExpr}
-%* *
-%************************************************************************
+* *
+************************************************************************
The reason for this OutExprStuff stuff is that we want to float *after*
simplifying a RHS, not before. If we do so naively we get quadratic
@@ -887,9 +882,8 @@ whole round if we float first. This can cascade. Consider
Only in this second round can the \y be applied, and it
might do the same again.
+-}
-
-\begin{code}
simplExpr :: SimplEnv -> CoreExpr -> SimplM CoreExpr
simplExpr env expr = simplExprC env expr (mkBoringStop expr_out_ty)
where
@@ -1149,16 +1143,15 @@ simplTick env tickish expr cont
-- So we've moved a constant amount of work out of the scc to expose
-- the case. We only do this when the continuation is interesting: in
-- for now, it has to be another Case (maybe generalise this later).
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{The main rebuilder}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
rebuild :: SimplEnv -> OutExpr -> SimplCont -> SimplM (SimplEnv, OutExpr)
-- At this point the substitution in the SimplEnv should be irrelevant
-- only the in-scope set and floats should matter
@@ -1178,16 +1171,15 @@ rebuild env expr cont
| otherwise -> do { arg' <- simplExpr (se `setInScope` env) arg
; rebuild env (App expr arg') cont }
TickIt t cont -> rebuild env (mkTick t expr) cont
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Lambdas}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
simplCast :: SimplEnv -> InExpr -> Coercion -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplCast env body co0 cont0
@@ -1253,14 +1245,13 @@ simplCast env body co0 cont0
arg_se' = arg_se `setInScope` env
add_coerce co _ cont = CoerceIt co cont
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Lambdas}
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Zap unfolding when beta-reducing]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1274,8 +1265,8 @@ stupid situation of
let b{Unf=Just x} = y
in ...b...
Here it'd be far better to drop the unfolding and use the actual RHS.
+-}
-\begin{code}
simplLam :: SimplEnv -> [InId] -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
@@ -1355,15 +1346,15 @@ simplNonRecE env bndr (rhs, rhs_se) (bndrs, body) cont
; let (env2, bndr2) = addBndrRules env1 bndr bndr1
; env3 <- simplLazyBind env2 NotTopLevel NonRecursive bndr bndr2 rhs rhs_se
; simplLam env3 bndrs body cont }
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Variables
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
simplVar :: SimplEnv -> InVar -> SimplM OutExpr
-- Look up an InVar in the environment
simplVar env var
@@ -1501,8 +1492,8 @@ rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_rules = rules })
-- Rules don't match
; Nothing -> rebuild env (argInfoExpr fun rev_args) cont -- No rules
} }
-\end{code}
+{-
Note [RULES apply to simplified arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's very desirable to try RULES once the arguments have been simplified, because
@@ -1550,13 +1541,13 @@ discard the entire application and replace it with (error "foo"). Getting
all this at once is TOO HARD!
-%************************************************************************
-%* *
+************************************************************************
+* *
Rewrite rules
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
tryRules :: SimplEnv -> [CoreRule]
-> Id -> [OutExpr] -> SimplCont
-> SimplM (Maybe (CoreExpr, SimplCont))
@@ -1618,8 +1609,8 @@ tryRules env rules fn args call_cont
log_rule dflags flag hdr details
= liftIO . dumpSDoc dflags alwaysQualify flag "" $
sep [text hdr, nest 4 details]
-\end{code}
+{-
Note [Optimising tagToEnum#]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have an enumeration data type:
@@ -1654,11 +1645,11 @@ is recursive, and hence a loop breaker:
So it's up to the programmer: rules can cause divergence
-%************************************************************************
-%* *
+************************************************************************
+* *
Rebuilding a case expression
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Case elimination]
~~~~~~~~~~~~~~~~~~~~~~~
@@ -1754,7 +1745,7 @@ let-bound to (error "good").
Nevertheless, the paper "A semantics for imprecise exceptions" allows
this transformation. If you want to fix the evaluation order, use
'pseq'. See Trac #8900 for an example where the loss of this
-transformation bit us in practice.
+transformation bit us in practice.
See also Note [Empty case alternatives] in CoreSyn.
@@ -1828,8 +1819,8 @@ Why don't we drop the case? Because it's strict in v. It's technically
wrong to drop even unnecessary evaluations, and in practice they
may be a result of 'seq' so we *definitely* don't want to drop those.
I don't really know how to improve this situation.
+-}
-\begin{code}
---------------------------------------------------------
-- Eliminate the case if possible
@@ -1957,8 +1948,8 @@ reallyRebuildCase env scrut case_bndr alts cont
-- (which in any case is only build in simplAlts)
-- The case binder *not* scope over the whole returned case-expression
; rebuild env' case_expr nodup_cont }
-\end{code}
+{-
simplCaseBinder checks whether the scrutinee is a variable, v. If so,
try to eliminate uses of v in the RHSs in favour of case_bndr; that
way, there's a chance that v will now only be used once, and hence
@@ -2039,8 +2030,8 @@ taking advantage of the `seq`.
At one point I did transformation in LiberateCase, but it's more
robust here. (Otherwise, there's a danger that we'll simply drop the
'seq' altogether, before LiberateCase gets to see it.)
+-}
-\begin{code}
simplAlts :: SimplEnv
-> OutExpr
-> InId -- Case binder
@@ -2183,8 +2174,8 @@ zapBndrOccInfo :: Bool -> Id -> Id
zapBndrOccInfo keep_occ_info pat_id
| keep_occ_info = pat_id
| otherwise = zapIdOccInfo pat_id
-\end{code}
+{-
Note [Add unfolding for scrutinee]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general it's unlikely that a variable scrutinee will appear
@@ -2220,11 +2211,11 @@ So instead we add the unfolding x -> Just a, and x -> Nothing in the
respective RHSs.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Known constructor}
-%* *
-%************************************************************************
+* *
+************************************************************************
We are a bit careful with occurrence info. Here's an example
@@ -2238,8 +2229,8 @@ and then
f (h v)
All this should happen in one sweep.
+-}
-\begin{code}
knownCon :: SimplEnv
-> OutExpr -- The scrutinee
-> DataCon -> [OutType] -> [OutExpr] -- The scrutinee (in pieces)
@@ -2304,16 +2295,15 @@ missingAlt :: SimplEnv -> Id -> [InAlt] -> SimplCont -> SimplM (SimplEnv, OutExp
missingAlt env case_bndr _ cont
= WARN( True, ptext (sLit "missingAlt") <+> ppr case_bndr )
return (env, mkImpossibleExpr (contResultType cont))
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Duplicating continuations}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
prepareCaseCont :: SimplEnv
-> [InAlt] -> SimplCont
-> SimplM (SimplEnv,
@@ -2346,8 +2336,8 @@ prepareCaseCont env alts cont
| otherwise = not (all is_bot_alt alts)
is_bot_alt (_,_,rhs) = exprIsBottom rhs
-\end{code}
+{-
Note [Bottom alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~
When we have
@@ -2358,8 +2348,8 @@ will disappear immediately. This is more direct than creating
join points and inlining them away; and in some cases we would
not even create the join points (see Note [Single-alternative case])
and we would keep the case-of-case which is silly. See Trac #4930.
+-}
-\begin{code}
mkDupableCont :: SimplEnv -> SimplCont
-> SimplM (SimplEnv, SimplCont, SimplCont)
@@ -2512,8 +2502,8 @@ mkDupableAlt env case_bndr (con, bndrs', rhs') = do
; env' <- addPolyBind NotTopLevel env (NonRec (join_bndr `setIdArity` join_arity) join_rhs)
; return (env', (con, bndrs', join_call)) }
-- See Note [Duplicated env]
-\end{code}
+{-
Note [Fusing case continuations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's important to fuse two successive case continuations when the
@@ -2846,3 +2836,4 @@ whether to use a real join point or just duplicate the continuation:
Hence: check whether the case binder's type is unlifted, because then
the outer case is *not* a seq.
+-}
diff --git a/compiler/simplStg/SimplStg.lhs b/compiler/simplStg/SimplStg.hs
index 4d33e3392e..b8804a47dd 100644
--- a/compiler/simplStg/SimplStg.lhs
+++ b/compiler/simplStg/SimplStg.hs
@@ -1,9 +1,9 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1993-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
+
\section[SimplStg]{Driver for simplifying @STG@ programs}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module SimplStg ( stg2stg ) where
@@ -25,9 +25,7 @@ import SrcLoc
import UniqSupply ( mkSplitUniqSupply, splitUniqSupply )
import Outputable
import Control.Monad
-\end{code}
-\begin{code}
stg2stg :: DynFlags -- includes spec of what stg-to-stg passes to do
-> Module -- module name (profiling only)
-> [StgBinding] -- input...
@@ -89,4 +87,3 @@ stg2stg dflags module_name binds
-- UniqueSupply for the next guy to use
-- cost-centres to be declared/registered (specialised)
-- add to description of what's happened (reverse order)
-\end{code}
diff --git a/compiler/simplStg/StgStats.lhs b/compiler/simplStg/StgStats.hs
index 2a776757da..4823baea3d 100644
--- a/compiler/simplStg/StgStats.lhs
+++ b/compiler/simplStg/StgStats.hs
@@ -1,6 +1,6 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[StgStats]{Gathers statistical information about programs}
@@ -19,8 +19,8 @@ The program gather statistics about
%\item number of top-level CAFs
\item number of constructors
\end{enumerate}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module StgStats ( showStgStats ) where
@@ -34,9 +34,7 @@ import Panic
import Data.Map (Map)
import qualified Data.Map as Map
-\end{code}
-\begin{code}
data CounterType
= Literals
| Applications
@@ -53,9 +51,7 @@ data CounterType
type Count = Int
type StatEnv = Map CounterType Count
-\end{code}
-\begin{code}
emptySE :: StatEnv
emptySE = Map.empty
@@ -70,15 +66,15 @@ countOne c = Map.singleton c 1
countN :: CounterType -> Int -> StatEnv
countN = Map.singleton
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Top-level list of bindings (a ``program'')}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
showStgStats :: [StgBinding] -> String
showStgStats prog
@@ -107,15 +103,15 @@ gatherStgStats :: [StgBinding] -> StatEnv
gatherStgStats binds
= combineSEs (map (statBinding True{-top-level-}) binds)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Bindings}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
statBinding :: Bool -- True <=> top-level; False <=> nested
-> StgBinding
-> StatEnv
@@ -140,15 +136,15 @@ statRhs top (_, StgRhsClosure _ _ fv u _ _ body)
Updatable -> UpdatableBinds top
SingleEntry -> SingleEntryBinds top
)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Expressions}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
statExpr :: StgExpr -> StatEnv
statExpr (StgApp _ _) = countOne Applications
@@ -176,5 +172,3 @@ statExpr (StgCase expr _ _ _ _ _ alts)
= combineSEs (map statExpr [ e | (_,_,_,e) <- alts ])
statExpr (StgLam {}) = panic "statExpr StgLam"
-\end{code}
-
diff --git a/compiler/simplStg/UnariseStg.lhs b/compiler/simplStg/UnariseStg.hs
index 1f121f71fd..303bfa74ee 100644
--- a/compiler/simplStg/UnariseStg.lhs
+++ b/compiler/simplStg/UnariseStg.hs
@@ -1,6 +1,6 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-2012
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-2012
+
Note [Unarisation]
~~~~~~~~~~~~~~~~~~
@@ -25,8 +25,8 @@ Because of unarisation, the arity that will be recorded in the generated info ta
for an Id may be larger than the idArity. Instead we record what we call the RepArity,
which is the Arity taking into account any expanded arguments, and corresponds to
the number of (possibly-void) *registers* arguments will arrive in.
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module UnariseStg (unarise) where
@@ -69,13 +69,13 @@ unarise us binds = zipWith (\us -> unariseBinding us init_env) (listSplitUniqSup
unariseBinding :: UniqSupply -> UnariseEnv -> StgBinding -> StgBinding
unariseBinding us rho bind = case bind of
StgNonRec x rhs -> StgNonRec x (unariseRhs us rho rhs)
- StgRec xrhss -> StgRec $ zipWith (\us (x, rhs) -> (x, unariseRhs us rho rhs))
+ StgRec xrhss -> StgRec $ zipWith (\us (x, rhs) -> (x, unariseRhs us rho rhs))
(listSplitUniqSupply us) xrhss
unariseRhs :: UniqSupply -> UnariseEnv -> StgRhs -> StgRhs
unariseRhs us rho rhs = case rhs of
StgRhsClosure ccs b_info fvs update_flag srt args expr
- -> StgRhsClosure ccs b_info (unariseIds rho fvs) update_flag
+ -> StgRhsClosure ccs b_info (unariseIds rho fvs) update_flag
(unariseSRT rho srt) args' (unariseExpr us' rho' expr)
where (us', rho', args') = unariseIdBinders us rho args
StgRhsCon ccs con args
@@ -86,21 +86,21 @@ unariseExpr :: UniqSupply -> UnariseEnv -> StgExpr -> StgExpr
unariseExpr _ rho (StgApp f args)
| null args
, UbxTupleRep tys <- repType (idType f)
- = -- Particularly important where (##) is concerned
+ = -- Particularly important where (##) is concerned
-- See Note [Nullary unboxed tuple]
- StgConApp (tupleCon UnboxedTuple (length tys))
+ StgConApp (tupleCon UnboxedTuple (length tys))
(map StgVarArg (unariseId rho f))
| otherwise
= StgApp f (unariseArgs rho args)
-unariseExpr _ _ (StgLit l)
+unariseExpr _ _ (StgLit l)
= StgLit l
unariseExpr _ rho (StgConApp dc args)
| isUnboxedTupleCon dc = StgConApp (tupleCon UnboxedTuple (length args')) args'
| otherwise = StgConApp dc args'
- where
+ where
args' = unariseArgs rho args
unariseExpr _ rho (StgOpApp op args ty)
@@ -108,26 +108,26 @@ unariseExpr _ rho (StgOpApp op args ty)
unariseExpr us rho (StgLam xs e)
= StgLam xs' (unariseExpr us' rho' e)
- where
+ where
(us', rho', xs') = unariseIdBinders us rho xs
unariseExpr us rho (StgCase e case_lives alts_lives bndr srt alt_ty alts)
- = StgCase (unariseExpr us1 rho e) (unariseLives rho case_lives)
- (unariseLives rho alts_lives) bndr (unariseSRT rho srt)
+ = StgCase (unariseExpr us1 rho e) (unariseLives rho case_lives)
+ (unariseLives rho alts_lives) bndr (unariseSRT rho srt)
alt_ty' alts'
- where
+ where
(us1, us2) = splitUniqSupply us
(alt_ty', alts') = unariseAlts us2 rho alt_ty bndr (repType (idType bndr)) alts
unariseExpr us rho (StgLet bind e)
= StgLet (unariseBinding us1 rho bind) (unariseExpr us2 rho e)
- where
+ where
(us1, us2) = splitUniqSupply us
unariseExpr us rho (StgLetNoEscape live_in_let live_in_bind bind e)
- = StgLetNoEscape (unariseLives rho live_in_let) (unariseLives rho live_in_bind)
+ = StgLetNoEscape (unariseLives rho live_in_let) (unariseLives rho live_in_bind)
(unariseBinding us1 rho bind) (unariseExpr us2 rho e)
- where
+ where
(us1, us2) = splitUniqSupply us
unariseExpr us rho (StgSCC cc bump_entry push_cc e)
@@ -137,19 +137,19 @@ unariseExpr us rho (StgTick mod tick_n e)
------------------------
unariseAlts :: UniqSupply -> UnariseEnv -> AltType -> Id -> RepType -> [StgAlt] -> (AltType, [StgAlt])
-unariseAlts us rho alt_ty _ (UnaryRep _) alts
+unariseAlts us rho alt_ty _ (UnaryRep _) alts
= (alt_ty, zipWith (\us alt -> unariseAlt us rho alt) (listSplitUniqSupply us) alts)
unariseAlts us rho _ bndr (UbxTupleRep tys) ((DEFAULT, [], [], e) : _)
= (UbxTupAlt n, [(DataAlt (tupleCon UnboxedTuple n), ys, uses, unariseExpr us2' rho' e)])
- where
+ where
(us2', rho', ys) = unariseIdBinder us rho bndr
uses = replicate (length ys) (not (isDeadBinder bndr))
n = length tys
-unariseAlts us rho _ bndr (UbxTupleRep _) [(DataAlt _, ys, uses, e)]
+unariseAlts us rho _ bndr (UbxTupleRep _) [(DataAlt _, ys, uses, e)]
= (UbxTupAlt n, [(DataAlt (tupleCon UnboxedTuple n), ys', uses', unariseExpr us2' rho'' e)])
- where
+ where
(us2', rho', ys', uses') = unariseUsedIdBinders us rho ys uses
rho'' = extendVarEnv rho' bndr ys'
n = length ys'
@@ -159,9 +159,9 @@ unariseAlts _ _ _ _ (UbxTupleRep _) alts
--------------------------
unariseAlt :: UniqSupply -> UnariseEnv -> StgAlt -> StgAlt
-unariseAlt us rho (con, xs, uses, e)
+unariseAlt us rho (con, xs, uses, e)
= (con, xs', uses', unariseExpr us' rho' e)
- where
+ where
(us', rho', xs', uses') = unariseUsedIdBinders us rho xs uses
------------------------
@@ -184,9 +184,9 @@ unariseIds :: UnariseEnv -> [Id] -> [Id]
unariseIds rho = concatMap (unariseId rho)
unariseId :: UnariseEnv -> Id -> [Id]
-unariseId rho x
+unariseId rho x
| Just ys <- lookupVarEnv rho x
- = ASSERT2( case repType (idType x) of UbxTupleRep _ -> True; _ -> x == ubxTupleId0
+ = ASSERT2( case repType (idType x) of UbxTupleRep _ -> True; _ -> x == ubxTupleId0
, text "unariseId: not unboxed tuple" <+> ppr x )
ys
@@ -195,9 +195,9 @@ unariseId rho x
, text "unariseId: was unboxed tuple" <+> ppr x )
[x]
-unariseUsedIdBinders :: UniqSupply -> UnariseEnv -> [Id] -> [Bool]
+unariseUsedIdBinders :: UniqSupply -> UnariseEnv -> [Id] -> [Bool]
-> (UniqSupply, UnariseEnv, [Id], [Bool])
-unariseUsedIdBinders us rho xs uses
+unariseUsedIdBinders us rho xs uses
= case mapAccumL2 do_one us rho (zipEqual "unariseUsedIdBinders" xs uses) of
(us', rho', xs_usess) -> uncurry ((,,,) us' rho') (unzip (concat xs_usess))
where
@@ -220,4 +220,3 @@ unboxedTupleBindersFrom us x tys = zipWith (mkSysLocal fs) (uniqsFromSupply us)
concatMapVarSet :: (Var -> [Var]) -> VarSet -> VarSet
concatMapVarSet f xs = mkVarSet [x' | x <- varSetElems xs, x' <- f x]
-\end{code} \ No newline at end of file
diff --git a/compiler/specialise/Rules.lhs b/compiler/specialise/Rules.hs
index 2abf7fbdca..a768896763 100644
--- a/compiler/specialise/Rules.lhs
+++ b/compiler/specialise/Rules.hs
@@ -1,9 +1,9 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[CoreRules]{Transformation rules}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
-- | Functions for collecting together and applying rewrite rules to a module.
@@ -58,8 +58,8 @@ import Bag
import Util
import Data.List
import Data.Ord
-\end{code}
+{-
Note [Overall plumbing for rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* After the desugarer:
@@ -125,11 +125,11 @@ Note [Overall plumbing for rules]
matter.]
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection[specialisation-IdInfo]{Specialisation info about an @Id@}
-%* *
-%************************************************************************
+* *
+************************************************************************
A @CoreRule@ holds details of one rule for an @Id@, which
includes its specialisations.
@@ -158,8 +158,8 @@ might have a specialisation
[Int#] ===> (case pi' of Lift pi# -> pi#)
where pi' :: Lift Int# is the specialised version of pi.
+-}
-\begin{code}
mkRule :: Bool -> Bool -> RuleName -> Activation
-> Name -> [CoreBndr] -> [CoreExpr] -> CoreExpr -> CoreRule
-- ^ Used to make 'CoreRule' for an 'Id' defined in the module being
@@ -212,8 +212,8 @@ ruleCantMatch :: [Maybe Name] -> [Maybe Name] -> Bool
ruleCantMatch (Just n1 : ts) (Just n2 : as) = n1 /= n2 || ruleCantMatch ts as
ruleCantMatch (_ : ts) (_ : as) = ruleCantMatch ts as
ruleCantMatch _ _ = False
-\end{code}
+{-
Note [Care with roughTopName]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
@@ -231,9 +231,8 @@ However, suppose we have
where k is a *function* exported by M. We never really match
functions (lambdas) except by name, so in this case it seems like
a good idea to treat 'M.k' as a roughTopName of the call.
+-}
-
-\begin{code}
pprRulesForUser :: [CoreRule] -> SDoc
-- (a) tidy the rules
-- (b) sort them into order based on the rule name
@@ -245,16 +244,15 @@ pprRulesForUser rules
pprRules $
sortBy (comparing ru_name) $
tidyRules emptyTidyEnv rules
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
SpecInfo: the rules in an IdInfo
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Make a 'SpecInfo' containing a number of 'CoreRule's, suitable
-- for putting into an 'IdInfo'
mkSpecInfo :: [CoreRule] -> SpecInfo
@@ -285,8 +283,8 @@ getRules rule_base fn
= idCoreRules fn ++ imp_rules
where
imp_rules = lookupNameEnv rule_base (idName fn) `orElse` []
-\end{code}
+{-
Note [Where rules are found]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The rules for an Id come from two places:
@@ -307,13 +305,13 @@ but that isn't quite right:
the rules are kept in the global RuleBase
-%************************************************************************
-%* *
+************************************************************************
+* *
RuleBase
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Gathers a collection of 'CoreRule's. Maps (the name of) an 'Id' to its rules
type RuleBase = NameEnv [CoreRule]
-- The rules are are unordered;
@@ -339,16 +337,15 @@ extendRuleBase rule_base rule
pprRuleBase :: RuleBase -> SDoc
pprRuleBase rules = vcat [ pprRules (tidyRules emptyTidyEnv rs)
| rs <- nameEnvElts rules ]
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Matching
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | The main rule matching function. Attempts to apply all (active)
-- supplied rules to this instance of an application in a given
-- context, returning the rule applied and the resulting expression if
@@ -427,8 +424,8 @@ isMoreSpecific (Rule { ru_bndrs = bndrs1, ru_args = args1 })
noBlackList :: Activation -> Bool
noBlackList _ = False -- Nothing is black listed
-\end{code}
+{-
Note [Extra args in rule matching]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we find a matching rule, we return (Just (rule, rhs)),
@@ -444,8 +441,8 @@ You might think it'd be cleaner for lookupRule to deal with the
leftover arguments, by applying 'rhs' to them, but the main call
in the Simplifier works better as it is. Reason: the 'args' passed
to lookupRule are the result of a lazy substitution
+-}
-\begin{code}
------------------------------------
matchRule :: DynFlags -> InScopeEnv -> (Activation -> Bool)
-> Id -> [CoreExpr] -> [Maybe Name]
@@ -534,8 +531,8 @@ matchN (in_scope, id_unf) tmpl_vars tmpl_es target_es
unbound var = pprPanic "Template variable unbound in rewrite rule"
(ppr var $$ ppr tmpl_vars $$ ppr tmpl_vars' $$ ppr tmpl_es $$ ppr target_es)
-\end{code}
+{-
Note [Template binders]
~~~~~~~~~~~~~~~~~~~~~~~
Consider the following match:
@@ -553,24 +550,24 @@ To achive this, we use rnBndrL to rename the template variables if
necessary; the renamed ones are the tmpl_vars'
-%************************************************************************
-%* *
+************************************************************************
+* *
The main matcher
-%* *
-%************************************************************************
+* *
+************************************************************************
---------------------------------------------
The inner workings of matching
---------------------------------------------
+-}
-\begin{code}
-- * The domain of the TvSubstEnv and IdSubstEnv are the template
-- variables passed into the match.
--
-- * The BindWrapper in a RuleSubst are the bindings floated out
-- from nested matches; see the Let case of match, below
--
-data RuleMatchEnv
+data RuleMatchEnv
= RV { rv_tmpls :: VarSet -- Template variables
, rv_lcl :: RnEnv2 -- Renamings for *local bindings*
-- (lambda/case)
@@ -863,8 +860,8 @@ match_ty renv subst ty1 ty2
where
tv_subst = rs_tv_subst subst
menv = ME { me_tmpls = rv_tmpls renv, me_env = rv_lcl renv }
-\end{code}
+{-
Note [Expanding variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Here is another Very Important rule: if the term being matched is a
@@ -1005,16 +1002,16 @@ That is why the 'lookupRnInScope' call in the (Var v2) case of 'match'
is so important.
-%************************************************************************
-%* *
+************************************************************************
+* *
Rule-check the program
-%* *
-%************************************************************************
+* *
+************************************************************************
We want to know what sites have rules that could have fired but didn't.
This pass runs over the tree (without changing it) and reports such.
+-}
-\begin{code}
-- | Report partial matches for rules beginning with the specified
-- string for the purposes of error reporting
ruleCheckProgram :: CompilerPhase -- ^ Rule activation test
@@ -1068,9 +1065,7 @@ ruleCheckApp :: RuleCheckEnv -> Expr CoreBndr -> [Arg CoreBndr] -> Bag SDoc
ruleCheckApp env (App f a) as = ruleCheck env a `unionBags` ruleCheckApp env f (a:as)
ruleCheckApp env (Var f) as = ruleCheckFun env f as
ruleCheckApp env other _ = ruleCheck env other
-\end{code}
-\begin{code}
ruleCheckFun :: RuleCheckEnv -> Id -> [CoreExpr] -> Bag SDoc
-- Produce a report for all rules matching the predicate
-- saying why it doesn't match the specified application
@@ -1101,7 +1096,7 @@ ruleAppCheck_help env fn args rules
= ptext (sLit "Rule") <+> doubleQuotes (ftext name)
rule_info dflags rule
- | Just _ <- matchRule dflags (emptyInScopeSet, rc_id_unf env)
+ | Just _ <- matchRule dflags (emptyInScopeSet, rc_id_unf env)
noBlackList fn args rough_args rule
= text "matches (which is very peculiar!)"
@@ -1128,5 +1123,3 @@ ruleAppCheck_help env fn args rules
, rv_tmpls = mkVarSet rule_bndrs
, rv_fltR = mkEmptySubst in_scope
, rv_unf = rc_id_unf env }
-\end{code}
-
diff --git a/compiler/specialise/SpecConstr.lhs b/compiler/specialise/SpecConstr.hs
index 6cc8b04f9a..11ba67e8d2 100644
--- a/compiler/specialise/SpecConstr.lhs
+++ b/compiler/specialise/SpecConstr.hs
@@ -1,14 +1,15 @@
+{-
ToDo [Oct 2013]
~~~~~~~~~~~~~~~
1. Nuke ForceSpecConstr for good (it is subsumed by GHC.Types.SPEC in ghc-prim)
2. Nuke NoSpecConstr
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[SpecConstr]{Specialise over constructors}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module SpecConstr(
@@ -65,8 +66,8 @@ type SpecConstrAnnotation = ()
import TyCon ( TyCon )
import GHC.Exts( SpecConstrAnnotation(..) )
#endif
-\end{code}
+{-
-----------------------------------------------------
Game plan
-----------------------------------------------------
@@ -653,13 +654,13 @@ But perhaps the first one isn't good. After all, we know that tpl_B2 is
a T (I# x) really, because T is strict and Int has one constructor. (We can't
unbox the strict fields, because T is polymorphic!)
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Top level wrapper stuff}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
specConstrProgram :: ModGuts -> CoreM ModGuts
specConstrProgram guts
= do
@@ -682,18 +683,17 @@ specConstrProgram guts
go env usg (bind:binds) = do (usg', bind') <- scTopBind env usg bind
binds' <- go env usg' binds
return (bind' : binds')
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Environment: goes downwards}
-%* *
-%************************************************************************
+* *
+************************************************************************
Note [Work-free values only in environment]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-The sc_vals field keeps track of in-scope value bindings, so
+The sc_vals field keeps track of in-scope value bindings, so
that if we come across (case x of Just y ->...) we can reduce the
case from knowing that x is bound to a pair.
@@ -703,7 +703,7 @@ then we do NOT want to expand to
let y = expensive v in ...
because the x-binding still exists and we've now duplicated (expensive v).
-This seldom happens because let-bound constructor applications are
+This seldom happens because let-bound constructor applications are
ANF-ised, but it can happen as a result of on-the-fly transformations in
SpecConstr itself. Here is Trac #7865:
@@ -721,7 +721,7 @@ SpecConstr itself. Here is Trac #7865:
(GHC.Types.: @ GHC.Types.Int x_af7 wild_X6) (q'_afe, p'_afd)
} } in
-When processed knowing that xs_af8 was bound to a cons, we simplify to
+When processed knowing that xs_af8 was bound to a cons, we simplify to
a'_shr = (expensive x_af7, x_af7)
and we do NOT want to inline that at the occurrence of a'_shr in ds_sht.
(There are other occurrences of a'_shr.) No no no.
@@ -730,10 +730,10 @@ It would be possible to do some on-the-fly ANF-ising, so that a'_shr turned
into a work-free value again, thus
a1 = expensive x_af7
a'_shr = (a1, x_af7)
-but that's more work, so until its shown to be important I'm going to
+but that's more work, so until its shown to be important I'm going to
leave it for now.
+-}
-\begin{code}
data ScEnv = SCE { sc_dflags :: DynFlags,
sc_size :: Maybe Int, -- Size threshold
sc_count :: Maybe Int, -- Max # of specialisations for any one fn
@@ -869,7 +869,7 @@ extendBndr env bndr = (env { sc_subst = subst' }, bndr')
extendValEnv :: ScEnv -> Id -> Maybe Value -> ScEnv
extendValEnv env _ Nothing = env
-extendValEnv env id (Just cv)
+extendValEnv env id (Just cv)
| valueIsWorkFree cv -- Don't duplicate work!! Trac #7865
= env { sc_vals = extendVarEnv (sc_vals env) id cv }
extendValEnv env _ _ = env
@@ -959,8 +959,8 @@ forceSpecArgTy env ty
|| any (forceSpecArgTy env) tys
forceSpecArgTy _ _ = False
-\end{code}
+{-
Note [Add scrutinee to ValueEnv too]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this:
@@ -1003,13 +1003,13 @@ So when recursively specialising we divide the sc_count by the number of
copies we are making at this level, including the original.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Usage information: flows upwards}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data ScUsage
= SCU {
scu_calls :: CallEnv, -- Calls
@@ -1114,18 +1114,18 @@ setScrutOcc env usg (Var v) occ
| otherwise = usg
setScrutOcc _env usg _other _occ -- Catch-all
= usg
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{The main recursive function}
-%* *
-%************************************************************************
+* *
+************************************************************************
The main recursive function gathers up usage information, and
creates specialised versions of functions.
+-}
-\begin{code}
scExpr, scExpr' :: ScEnv -> CoreExpr -> UniqSM (ScUsage, CoreExpr)
-- The unique supply is needed when we invent
-- a new name for the specialised function and its args
@@ -1236,8 +1236,8 @@ scExpr' env (Let (Rec prs) body)
; return (all_usg { scu_calls = scu_calls all_usg `delVarEnvList` bndrs' },
Let bind' body') }
-\end{code}
+{-
Note [Local let bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~
It is not uncommon to find this
@@ -1253,9 +1253,8 @@ in the *RHS* of the function. Here we look for call patterns in the
At one point I predicated this on the RHS mentioning the outer
recursive function, but that's not essential and might even be
harmful. I'm not sure.
+-}
-
-\begin{code}
scApp :: ScEnv -> (InExpr, [InExpr]) -> UniqSM (ScUsage, CoreExpr)
scApp env (Var fn, args) -- Function is a variable
@@ -1380,16 +1379,14 @@ specInfoBinds (RI { ri_fn = fn, ri_new_rhs = new_rhs }) specs
where
rules = [r | OS _ r _ _ <- specs]
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
The specialiser itself
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data RhsInfo
= RI { ri_fn :: OutId -- The binder
, ri_new_rhs :: OutExpr -- The specialised RHS (in current envt)
@@ -1597,24 +1594,24 @@ spec_one env fn arg_bndrs body (call_pat@(qvars, pats), rule_number)
-- changes (#4012).
rule_name = mkFastString ("SC:" ++ occNameString fn_occ ++ show rule_number)
spec_name = mkInternalName spec_uniq spec_occ fn_loc
--- ; pprTrace "{spec_one" (ppr (sc_count env) <+> ppr fn <+> ppr pats <+> text "-->" <+> ppr spec_name) $
+-- ; pprTrace "{spec_one" (ppr (sc_count env) <+> ppr fn <+> ppr pats <+> text "-->" <+> ppr spec_name) $
-- return ()
-- Specialise the body
; (spec_usg, spec_body) <- scExpr spec_env body
--- ; pprTrace "done spec_one}" (ppr fn) $
+-- ; pprTrace "done spec_one}" (ppr fn) $
-- return ()
-- And build the results
- ; let spec_id = mkLocalId spec_name (mkPiTypes spec_lam_args body_ty)
+ ; let spec_id = mkLocalId spec_name (mkPiTypes spec_lam_args body_ty)
-- See Note [Transfer strictness]
`setIdStrictness` spec_str
`setIdArity` count isId spec_lam_args
spec_str = calcSpecStrictness fn spec_lam_args pats
-- Conditionally use result of new worker-wrapper transform
(spec_lam_args, spec_call_args) = mkWorkerArgs (sc_dflags env) qvars NoOneShotInfo body_ty
- -- Usual w/w hack to avoid generating
+ -- Usual w/w hack to avoid generating
-- a spec_rhs of unlifted type and no args
spec_rhs = mkLams spec_lam_args spec_body
@@ -1646,12 +1643,12 @@ calcSpecStrictness fn qvars pats
go_one :: DmdEnv -> Demand -> CoreExpr -> DmdEnv
go_one env d (Var v) = extendVarEnv_C bothDmd env v d
- go_one env d e
+ go_one env d e
| Just ds <- splitProdDmd_maybe d -- NB: d does not have to be strict
, (Var _, args) <- collectArgs e = go env ds args
go_one env _ _ = env
-\end{code}
+{-
Note [spec_usg includes rhs_usg]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In calls to 'specialise', the returned ScUsage must include the rhs_usg in
@@ -1706,11 +1703,11 @@ See Trac #3437 for a good example.
The function calcSpecStrictness performs the calculation.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{Argument analysis}
-%* *
-%************************************************************************
+* *
+************************************************************************
This code deals with analysing call-site arguments to see whether
they are constructor applications.
@@ -1729,8 +1726,8 @@ BUT phantom type synonyms can mess this reasoning up,
eg x::T b with type T b = Int
So we apply expandTypeSynonyms to the bound Ids.
See Trac # 5458. Yuk.
+-}
-\begin{code}
type CallPat = ([Var], [CoreExpr]) -- Quantified variables and arguments
callsToPats :: ScEnv -> [OneSpec] -> [ArgOcc] -> [Call] -> UniqSM (Bool, [CallPat])
@@ -1952,10 +1949,7 @@ argsToPats env in_scope val_env args occs
= do { stuff <- zipWithM (argToPat env in_scope val_env) args occs
; let (interesting_s, args') = unzip stuff
; return (or interesting_s, args') }
-\end{code}
-
-\begin{code}
isValue :: ValueEnv -> CoreExpr -> Maybe Value
isValue _env (Lit lit)
| litIsLifted lit = Nothing
@@ -2026,8 +2020,8 @@ samePat (vs1, as1) (vs2, as2)
bad (Let {}) = True
bad (Lam {}) = True
bad _other = False
-\end{code}
+{-
Note [Ignore type differences]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We do not want to generate specialisations where the call patterns
@@ -2035,4 +2029,4 @@ differ only in their type arguments! Not only is it utterly useless,
but it also means that (with polymorphic recursion) we can generate
an infinite number of specialisations. Example is Data.Sequence.adjustTree,
I think.
-
+-}
diff --git a/compiler/specialise/Specialise.lhs b/compiler/specialise/Specialise.hs
index bc04e063ef..de1bf08a31 100644
--- a/compiler/specialise/Specialise.lhs
+++ b/compiler/specialise/Specialise.hs
@@ -1,9 +1,9 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1993-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
+
\section[Specialise]{Stamping out overloading, and (optionally) polymorphism}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module Specialise ( specProgram, specUnfolding ) where
@@ -44,13 +44,13 @@ import Control.Monad
import Data.Map (Map)
import qualified Data.Map as Map
import qualified FiniteMap as Map
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[notes-Specialise]{Implementation notes [SLPJ, Aug 18 1993]}
-%* *
-%************************************************************************
+* *
+************************************************************************
These notes describe how we implement specialisation to eliminate
overloading.
@@ -511,11 +511,11 @@ like
f :: Eq [(a,b)] => ...
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsubsection{The new specialiser}
-%* *
-%************************************************************************
+* *
+************************************************************************
Our basic game plan is this. For let(rec) bound function
f :: (C a, D c) => (a,b,c,d) -> Bool
@@ -557,20 +557,20 @@ method from it! Even if it didn't, not a great deal is saved.
We do, however, generate polymorphic, but not overloaded, specialisations:
f :: Eq a => [a] -> b -> b -> b
- {#- SPECIALISE f :: [Int] -> b -> b -> b #-}
+ ... SPECIALISE f :: [Int] -> b -> b -> b ...
Hence, the invariant is this:
*** no specialised version is overloaded ***
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsubsection{The exported function}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
specProgram :: ModGuts -> CoreM ModGuts
specProgram guts@(ModGuts { mg_module = this_mod
, mg_rules = local_rules
@@ -693,8 +693,8 @@ wantSpecImport dflags unf
-- so perhaps it never will. Moreover it may have calls
-- inside it that we want to specialise
| otherwise -> False -- Stable, not INLINE, hence INLINEABLE
-\end{code}
+{-
Note [Specialise imported INLINABLE things]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
What imported functions do we specialise? The basic set is
@@ -740,14 +740,14 @@ And if the call is to the same type, one specialisation is enough.
Avoiding this recursive specialisation loop is the reason for the
'done' VarSet passed to specImports and specImport.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsubsection{@specExpr@: the main function}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-data SpecEnv
+data SpecEnv
= SE { se_subst :: CoreSubst.Subst
-- We carry a substitution down:
-- a) we must clone any binding that might float outwards,
@@ -756,14 +756,14 @@ data SpecEnv
-- the RHS of specialised bindings (no type-let!)
- , se_interesting :: VarSet
+ , se_interesting :: VarSet
-- Dict Ids that we know something about
-- and hence may be worth specialising against
-- See Note [Interesting dictionary arguments]
}
emptySpecEnv :: SpecEnv
-emptySpecEnv = SE { se_subst = CoreSubst.emptySubst, se_interesting = emptyVarSet}
+emptySpecEnv = SE { se_subst = CoreSubst.emptySubst, se_interesting = emptyVarSet}
specVar :: SpecEnv -> Id -> CoreExpr
specVar env v = CoreSubst.lookupIdSubst (text "specVar") (se_subst env) v
@@ -852,7 +852,7 @@ specCase env scrut' case_bndr [(con, args, rhs)]
| sc_arg' <- sc_args' ]
-- Extend the substitution for RHS to map the *original* binders
- -- to their floated verions.
+ -- to their floated verions.
mb_sc_flts :: [Maybe DictId]
mb_sc_flts = map (lookupVarEnv clone_env) args'
clone_env = zipVarEnv sc_args' sc_args_flt
@@ -905,8 +905,8 @@ specCase env scrut case_bndr alts
return ((con, args', wrapDictBindsE dumped_dbs rhs'), free_uds)
where
(env_rhs, args') = substBndrs env_alt args
-\end{code}
+{-
Note [Floating dictionaries out of cases]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
@@ -938,13 +938,13 @@ we transform to
The "_flt" things are the floated binds; we use the current substitution
to substitute sc -> sc_flt in the RHS
-%************************************************************************
-%* *
+************************************************************************
+* *
Dealing with a binding
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
specBind :: SpecEnv -- Use this for RHSs
-> CoreBind
-> UsageDetails -- Info on how the scope of the binding
@@ -1120,9 +1120,9 @@ specCalls mb_mod env rules_for_me calls_for_me fn rhs
already_covered :: DynFlags -> [CoreExpr] -> Bool
already_covered dflags args -- Note [Specialisations already covered]
- = isJust (lookupRule dflags
+ = isJust (lookupRule dflags
(CoreSubst.substInScope (se_subst env), realIdUnfolding)
- (const True)
+ (const True)
fn args rules_for_me)
mk_ty_args :: [Maybe Type] -> [TyVar] -> [CoreExpr]
@@ -1271,8 +1271,8 @@ bindAuxiliaryDicts env@(SE { se_subst = subst, se_interesting = interesting })
-- For the former, note that we bind the *original* dict in the substitution,
-- overriding any d->dx_id binding put there by substBndrs
go _ _ = pprPanic "bindAuxiliaryDicts" (ppr orig_dict_ids $$ ppr call_ds $$ ppr inst_dict_ids)
-\end{code}
+{-
Note [Make the new dictionaries interesting]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Important! We're going to substitute dx_id1 for d
@@ -1485,7 +1485,7 @@ It's a silly exapmle, but we get
where choose doesn't have any dict arguments. Thus far I have not
tried to fix this (wait till there's a real example).
-Mind you, then 'choose' will be inlined (since RHS is trivial) so
+Mind you, then 'choose' will be inlined (since RHS is trivial) so
it doesn't matter. This comes up with single-method classes
class C a where { op :: a -> a }
@@ -1541,7 +1541,7 @@ all they should be inlined, right? Two reasons:
$wreplicateM_ = ...
Now an importing module has a specialised call to replicateM_, say
(replicateM_ dMonadIO). We certainly want to specialise $wreplicateM_!
- This particular example had a huge effect on the call to replicateM_
+ This particular example had a huge effect on the call to replicateM_
in nofib/shootout/n-body.
Why (b): discard INLINEABLE pragmas? See Trac #4874 for persuasive examples.
@@ -1565,13 +1565,13 @@ for examples involving specialisation, which is the dominant use of
INLINABLE. See Trac #4874.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsubsection{UsageDetails and suchlike}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data UsageDetails
= MkUD {
ud_binds :: !(Bag DictBind),
@@ -1730,8 +1730,8 @@ mkCallUDs' env f args
EqPred {} -> True
IrredPred {} -> True -- Things like (D []) where D is a
-- Constraint-ranged family; Trac #7785
-\end{code}
+{-
Note [Type determines value]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Only specialise if all overloading is on non-IP *class* params,
@@ -1762,7 +1762,7 @@ because the code for the specialised f is not improved at all, because
d is lambda-bound. We simply get junk specialisations.
What is "interesting"? Just that it has *some* structure. But what about
-variables?
+variables?
* A variable might be imported, in which case its unfolding
will tell us whether it has useful structure
@@ -1772,17 +1772,16 @@ variables?
(cloneIdBndr). Moreover, we make up some new bindings, and it's a
nuisance to give them unfoldings. So we keep track of the
"interesting" dictionaries as a VarSet in SpecEnv.
- We have to take care to put any new interesting dictionary
+ We have to take care to put any new interesting dictionary
bindings in the set.
We accidentally lost accurate tracking of local variables for a long
-time, because cloned variables don't have unfoldings. But makes a
-massive difference in a few cases, eg Trac #5113. For nofib as a
+time, because cloned variables don't have unfoldings. But makes a
+massive difference in a few cases, eg Trac #5113. For nofib as a
whole it's only a small win: 2.2% improvement in allocation for ansi,
1.2% for bspt, but mostly 0.0! Average 0.1% increase in binary size.
+-}
-
-\begin{code}
interestingDict :: SpecEnv -> CoreExpr -> Bool
-- A dictionary argument is interesting if it has *some* structure
interestingDict env (Var v) = hasSomeUnfolding (idUnfolding v)
@@ -1795,9 +1794,7 @@ interestingDict env (App fn (Coercion _)) = interestingDict env fn
interestingDict env (Tick _ a) = interestingDict env a
interestingDict env (Cast e _) = interestingDict env e
interestingDict _ _ = True
-\end{code}
-\begin{code}
plusUDs :: UsageDetails -> UsageDetails -> UsageDetails
plusUDs (MkUD {ud_binds = db1, ud_calls = calls1})
(MkUD {ud_binds = db2, ud_calls = calls2})
@@ -1946,16 +1943,15 @@ deleteCallsMentioning bs calls
deleteCallsFor :: [Id] -> CallDetails -> CallDetails
-- Remove calls *for* bs
deleteCallsFor bs calls = delVarEnvList calls bs
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{Boring helper functions}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
newtype SpecM a = SpecM (State SpecState a)
data SpecState = SpecState {
@@ -2010,7 +2006,7 @@ mapAndCombineSM f (x:xs) = do (y, uds1) <- f x
return (y:ys, uds1 `plusUDs` uds2)
extendTvSubstList :: SpecEnv -> [(TyVar,Type)] -> SpecEnv
-extendTvSubstList env tv_binds
+extendTvSubstList env tv_binds
= env { se_subst = CoreSubst.extendTvSubstList (se_subst env) tv_binds }
substTy :: SpecEnv -> Type -> Type
@@ -2033,7 +2029,7 @@ cloneBindSM :: SpecEnv -> CoreBind -> SpecM (SpecEnv, SpecEnv, CoreBind)
cloneBindSM env@(SE { se_subst = subst, se_interesting = interesting }) (NonRec bndr rhs)
= do { us <- getUniqueSupplyM
; let (subst', bndr') = CoreSubst.cloneIdBndr subst us bndr
- interesting' | interestingDict env rhs
+ interesting' | interestingDict env rhs
= interesting `extendVarSet` bndr'
| otherwise = interesting
; return (env, env { se_subst = subst', se_interesting = interesting' }
@@ -2043,7 +2039,7 @@ cloneBindSM env@(SE { se_subst = subst, se_interesting = interesting }) (Rec pai
= do { us <- getUniqueSupplyM
; let (subst', bndrs') = CoreSubst.cloneRecIdBndrs subst us (map fst pairs)
env' = env { se_subst = subst'
- , se_interesting = interesting `extendVarSetList`
+ , se_interesting = interesting `extendVarSetList`
[ v | (v,r) <- pairs, interestingDict env r ] }
; return (env', env', Rec (bndrs' `zip` map snd pairs)) }
@@ -2063,9 +2059,8 @@ newSpecIdSM old_id new_ty
new_occ = mkSpecOcc (nameOccName name)
new_id = mkUserLocal new_occ uniq new_ty (getSrcSpan name)
; return new_id }
-\end{code}
-
+{-
Old (but interesting) stuff about unboxed bindings
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -2133,4 +2128,4 @@ Answer: When they at the top-level (where it is necessary) or when
inlining would duplicate work (or possibly code depending on
options). However, the _Lifting will still be eliminated if the
strictness analyser deems the lifted binding strict.
-
+-}
diff --git a/compiler/stgSyn/CoreToStg.lhs b/compiler/stgSyn/CoreToStg.hs
index 7807d895dc..5b22e67eaf 100644
--- a/compiler/stgSyn/CoreToStg.lhs
+++ b/compiler/stgSyn/CoreToStg.hs
@@ -1,4 +1,3 @@
-\begin{code}
{-# LANGUAGE CPP #-}
--
@@ -1192,4 +1191,3 @@ stgArity :: Id -> HowBound -> Arity
stgArity _ (LetBound _ arity) = arity
stgArity f ImportBound = idArity f
stgArity _ LambdaBound = 0
-\end{code}
diff --git a/compiler/stgSyn/StgLint.lhs b/compiler/stgSyn/StgLint.hs
index a0fdf78d34..5bd25e3116 100644
--- a/compiler/stgSyn/StgLint.lhs
+++ b/compiler/stgSyn/StgLint.hs
@@ -1,9 +1,9 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1993-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
+
\section[StgLint]{A ``lint'' pass to check for Stg correctness}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module StgLint ( lintStgBindings ) where
@@ -23,7 +23,7 @@ import ErrUtils ( MsgDoc, Severity(..), mkLocMessage )
import TypeRep
import Type
import TyCon
-import Util
+import Util
import SrcLoc
import Outputable
import FastString
@@ -34,8 +34,8 @@ import Control.Monad
import Data.Function
#include "HsVersions.h"
-\end{code}
+{-
Checks for
(a) *some* type errors
(b) locally-defined variables used but not defined
@@ -52,15 +52,15 @@ for Stg code that is currently perfectly acceptable for code
generation. Solution: don't use it! (KSW 2000-05).
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{``lint'' for various constructs}
-%* *
-%************************************************************************
+* *
+************************************************************************
@lintStgBindings@ is the top-level interface function.
+-}
-\begin{code}
lintStgBindings :: String -> [StgBinding] -> [StgBinding]
lintStgBindings whodunnit binds
@@ -82,10 +82,7 @@ lintStgBindings whodunnit binds
binders <- lintStgBinds bind
addInScopeVars binders $
lint_binds binds
-\end{code}
-
-\begin{code}
lintStgArg :: StgArg -> LintM (Maybe Type)
lintStgArg (StgLitArg lit) = return (Just (literalType lit))
lintStgArg (StgVarArg v) = lintStgVar v
@@ -93,9 +90,7 @@ lintStgArg (StgVarArg v) = lintStgVar v
lintStgVar :: Id -> LintM (Maybe Kind)
lintStgVar v = do checkInScope v
return (Just (idType v))
-\end{code}
-\begin{code}
lintStgBinds :: StgBinding -> LintM [Id] -- Returns the binders
lintStgBinds (StgNonRec binder rhs) = do
lint_binds_help (binder,rhs)
@@ -131,9 +126,7 @@ lint_binds_help (binder, rhs)
return ()
where
binder_ty = idType binder
-\end{code}
-\begin{code}
lintStgRhs :: StgRhs -> LintM (Maybe Type) -- Just ty => type is exact
lintStgRhs (StgRhsClosure _ _ _ _ _ [] expr)
@@ -150,9 +143,7 @@ lintStgRhs (StgRhsCon _ con args) = runMaybeT $ do
MaybeT $ checkFunApp con_ty arg_tys (mkRhsConMsg con_ty arg_tys)
where
con_ty = dataConRepType con
-\end{code}
-\begin{code}
lintStgExpr :: StgExpr -> LintM (Maybe Type) -- Just ty => type is exact
lintStgExpr (StgLit l) = return (Just (literalType l))
@@ -274,16 +265,15 @@ lintAlt scrut_ty (DataAlt con, args, _, rhs) = do
-- We give it its own copy, so it isn't overloaded.
elem _ [] = False
elem x (y:ys) = x==y || elem x ys
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[lint-monad]{The Lint monad}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
newtype LintM a = LintM
{ unLintM :: [LintLocInfo] -- Locations
-> IdSet -- Local vars in scope
@@ -312,9 +302,7 @@ pp_binders bs
where
pp_binder b
= hsep [ppr b, dcolon, ppr (idType b)]
-\end{code}
-\begin{code}
initL :: LintM a -> Maybe MsgDoc
initL (LintM m)
= case (m [] emptyVarSet emptyBag) of { (_, errs) ->
@@ -345,9 +333,7 @@ thenL_ :: LintM a -> LintM b -> LintM b
thenL_ m k = LintM $ \loc scope errs
-> case unLintM m loc scope errs of
(_, errs') -> unLintM k loc scope errs'
-\end{code}
-\begin{code}
checkL :: Bool -> MsgDoc -> LintM ()
checkL True _ = return ()
checkL False msg = addErrL msg
@@ -382,15 +368,15 @@ addInScopeVars ids m = LintM $ \loc scope errs
-- then id
-- else pprTrace "Shadowed vars:" (ppr (varSetElems shadowed))) $
unLintM m loc (scope `unionVarSet` new_set) errs
-\end{code}
+{-
Checking function applications: we only check that the type has the
right *number* of arrows, we don't actually compare the types. This
is because we can't expect the types to be equal - the type
applications and type lambdas that we use to calculate accurate types
have long since disappeared.
+-}
-\begin{code}
checkFunApp :: Type -- The function type
-> [Type] -- The arg type(s)
-> MsgDoc -- Error message
@@ -410,9 +396,9 @@ checkFunApp fun_ty arg_tys msg
cfa accurate fun_ty [] -- Args have run out; that's fine
= (if accurate then Just fun_ty else Nothing, Nothing)
- cfa accurate fun_ty arg_tys@(arg_ty':arg_tys')
+ cfa accurate fun_ty arg_tys@(arg_ty':arg_tys')
| Just (arg_ty, res_ty) <- splitFunTy_maybe fun_ty
- = if accurate && not (arg_ty `stgEqType` arg_ty')
+ = if accurate && not (arg_ty `stgEqType` arg_ty')
then (Nothing, Just msg) -- Arg type mismatch
else cfa accurate res_ty arg_tys'
@@ -421,7 +407,7 @@ checkFunApp fun_ty arg_tys msg
| Just (tc,tc_args) <- splitTyConApp_maybe fun_ty
, isNewTyCon tc
- = if length tc_args < tyConArity tc
+ = if length tc_args < tyConArity tc
then WARN( True, text "cfa: unsaturated newtype" <+> ppr fun_ty $$ msg )
(Nothing, Nothing) -- This is odd, but I've seen it
else cfa False (newTyConInstRhs tc tc_args) arg_tys
@@ -432,9 +418,7 @@ checkFunApp fun_ty arg_tys msg
| otherwise
= (Nothing, Nothing)
-\end{code}
-\begin{code}
stgEqType :: Type -> Type -> Bool
-- Compare types, but crudely because we have discarded
-- both casts and type applications, so types might look
@@ -443,7 +427,7 @@ stgEqType :: Type -> Type -> Bool
--
-- Fundamentally this is a losing battle because of unsafeCoerce
-stgEqType orig_ty1 orig_ty2
+stgEqType orig_ty1 orig_ty2
= gos (repType orig_ty1) (repType orig_ty2)
where
gos :: RepType -> RepType -> Bool
@@ -456,18 +440,18 @@ stgEqType orig_ty1 orig_ty2
go ty1 ty2
| Just (tc1, tc_args1) <- splitTyConApp_maybe ty1
, Just (tc2, tc_args2) <- splitTyConApp_maybe ty2
- , let res = if tc1 == tc2
+ , let res = if tc1 == tc2
then equalLength tc_args1 tc_args2 && and (zipWith (gos `on` repType) tc_args1 tc_args2)
- else -- TyCons don't match; but don't bleat if either is a
- -- family TyCon because a coercion might have made it
+ else -- TyCons don't match; but don't bleat if either is a
+ -- family TyCon because a coercion might have made it
-- equal to something else
(isFamilyTyCon tc1 || isFamilyTyCon tc2)
= if res then True
- else
- pprTrace "stgEqType: unequal" (vcat [ppr ty1, ppr ty2])
+ else
+ pprTrace "stgEqType: unequal" (vcat [ppr ty1, ppr ty2])
False
- | otherwise = True -- Conservatively say "fine".
+ | otherwise = True -- Conservatively say "fine".
-- Type variables in particular
checkInScope :: Id -> LintM ()
@@ -482,9 +466,7 @@ checkTys ty1 ty2 msg = LintM $ \loc _scope errs
-> if (ty1 `stgEqType` ty2)
then ((), errs)
else ((), addErr errs msg loc)
-\end{code}
-\begin{code}
_mkCaseAltMsg :: [StgAlt] -> MsgDoc
_mkCaseAltMsg _alts
= ($$) (text "In some case alternatives, type of alternatives not all same:")
@@ -551,4 +533,3 @@ mkUnLiftedTyMsg binder rhs
ptext (sLit "has unlifted type") <+> quotes (ppr (idType binder)))
$$
(ptext (sLit "RHS:") <+> ppr rhs)
-\end{code}
diff --git a/compiler/stgSyn/StgSyn.lhs b/compiler/stgSyn/StgSyn.hs
index 2ecd573133..7577e837a8 100644
--- a/compiler/stgSyn/StgSyn.lhs
+++ b/compiler/stgSyn/StgSyn.hs
@@ -1,14 +1,14 @@
-%
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[StgSyn]{Shared term graph (STG) syntax for spineless-tagless code generation}
This data type represents programs just before code generation (conversion to
@Cmm@): basically, what we have is a stylised form of @CoreSyntax@, the style
being one that happens to be ideally suited to spineless tagless code
generation.
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module StgSyn (
@@ -69,13 +69,13 @@ import UniqSet
import Unique ( Unique )
import Util
import VarSet ( IdSet, isEmptyVarSet )
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{@GenStgBinding@}
-%* *
-%************************************************************************
+* *
+************************************************************************
As usual, expressions are interesting; other things are boring. Here
are the boring things [except note the @GenStgRhs@], parameterised
@@ -83,20 +83,20 @@ with respect to binder and occurrence information (just as in
@CoreSyn@):
There is one SRT for each group of bindings.
+-}
-\begin{code}
data GenStgBinding bndr occ
= StgNonRec bndr (GenStgRhs bndr occ)
| StgRec [(bndr, GenStgRhs bndr occ)]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{@GenStgArg@}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data GenStgArg occ
= StgVarArg occ
| StgLitArg Literal
@@ -142,22 +142,22 @@ isAddrRep _ = False
stgArgType :: StgArg -> Type
stgArgType (StgVarArg v) = idType v
stgArgType (StgLitArg lit) = literalType lit
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{STG expressions}
-%* *
-%************************************************************************
+* *
+************************************************************************
The @GenStgExpr@ data type is parameterised on binder and occurrence
info, as before.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsubsection{@GenStgExpr@ application}
-%* *
-%************************************************************************
+* *
+************************************************************************
An application is of a function to a list of atoms [not expressions].
Operationally, we want to push the arguments on the stack and call the
@@ -166,24 +166,26 @@ their closures first.)
There is no constructor for a lone variable; it would appear as
@StgApp var [] _@.
-\begin{code}
+-}
+
type GenStgLiveVars occ = UniqSet occ
data GenStgExpr bndr occ
= StgApp
occ -- function
[GenStgArg occ] -- arguments; may be empty
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{@StgConApp@ and @StgPrimApp@---saturated applications}
-%* *
-%************************************************************************
+* *
+************************************************************************
There are a specialised forms of application, for constructors,
primitives, and literals.
-\begin{code}
+-}
+
| StgLit Literal
-- StgConApp is vital for returning unboxed tuples
@@ -196,32 +198,32 @@ primitives, and literals.
Type -- Result type
-- We need to know this so that we can
-- assign result registers
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{@StgLam@}
-%* *
-%************************************************************************
+* *
+************************************************************************
StgLam is used *only* during CoreToStg's work. Before CoreToStg has
finished it encodes (\x -> e) as (let f = \x -> e in f)
+-}
-\begin{code}
| StgLam
[bndr]
StgExpr -- Body of lambda
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{@GenStgExpr@: case-expressions}
-%* *
-%************************************************************************
+* *
+************************************************************************
This has the same boxed/unboxed business as Core case expressions.
-\begin{code}
+-}
+
| StgCase
(GenStgExpr bndr occ)
-- the thing to examine
@@ -248,13 +250,13 @@ This has the same boxed/unboxed business as Core case expressions.
[GenStgAlt bndr occ]
-- The DEFAULT case is always *first*
-- if it is there at all
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{@GenStgExpr@: @let(rec)@-expressions}
-%* *
-%************************************************************************
+* *
+************************************************************************
The various forms of let(rec)-expression encode most of the
interesting things we want to do.
@@ -341,7 +343,8 @@ in e
\end{enumerate}
And so the code for let(rec)-things:
-\begin{code}
+-}
+
| StgLet
(GenStgBinding bndr occ) -- right hand sides (see below)
(GenStgExpr bndr occ) -- body
@@ -358,50 +361,51 @@ And so the code for let(rec)-things:
(GenStgBinding bndr occ) -- right hand sides (see below)
(GenStgExpr bndr occ) -- body
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{@GenStgExpr@: @scc@ expressions}
-%* *
-%************************************************************************
+* *
+************************************************************************
For @scc@ expressions we introduce a new STG construct.
+-}
-\begin{code}
| StgSCC
CostCentre -- label of SCC expression
!Bool -- bump the entry count?
!Bool -- push the cost centre?
(GenStgExpr bndr occ) -- scc expression
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{@GenStgExpr@: @hpc@ expressions}
-%* *
-%************************************************************************
+* *
+************************************************************************
Finally for @hpc@ expressions we introduce a new STG construct.
+-}
-\begin{code}
| StgTick
Module -- the module of the source of this tick
Int -- tick number
(GenStgExpr bndr occ) -- sub expression
-- END of GenStgExpr
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{STG right-hand sides}
-%* *
-%************************************************************************
+* *
+************************************************************************
Here's the rest of the interesting stuff for @StgLet@s; the first
flavour is for closures:
-\begin{code}
+-}
+
data GenStgRhs bndr occ
= StgRhsClosure
CostCentreStack -- CCS to be attached (default is CurrentCCS)
@@ -413,7 +417,8 @@ data GenStgRhs bndr occ
[bndr] -- arguments; if empty, then not a function;
-- as above, order is important.
(GenStgExpr bndr occ) -- body
-\end{code}
+
+{-
An example may be in order. Consider:
\begin{verbatim}
let t = \x -> \y -> ... x ... y ... p ... q in e
@@ -427,7 +432,8 @@ offsets from @Node@ into the closure, and the code ptr for the closure
will be exactly that in parentheses above.
The second flavour of right-hand-side is for constructors (simple but important):
-\begin{code}
+-}
+
| StgRhsCon
CostCentreStack -- CCS to be attached (default is CurrentCCS).
-- Top-level (static) ones will end up with
@@ -456,10 +462,9 @@ rhsHasCafRefs (StgRhsCon _ _ args)
stgArgHasCafRefs :: GenStgArg Id -> Bool
stgArgHasCafRefs (StgVarArg id) = mayHaveCafRefs (idCafInfo id)
stgArgHasCafRefs _ = False
-\end{code}
-Here's the @StgBinderInfo@ type, and its combining op:
-\begin{code}
+-- Here's the @StgBinderInfo@ type, and its combining op:
+
data StgBinderInfo
= NoStgBinderInfo
| SatCallsOnly -- All occurrences are *saturated* *function* calls
@@ -484,13 +489,13 @@ combineStgBinderInfo _ _ = NoStgBinderInfo
pp_binder_info :: StgBinderInfo -> SDoc
pp_binder_info NoStgBinderInfo = empty
pp_binder_info SatCallsOnly = ptext (sLit "sat-only")
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Stg-case-alternatives]{STG case alternatives}
-%* *
-%************************************************************************
+* *
+************************************************************************
Very like in @CoreSyntax@ (except no type-world stuff).
@@ -502,8 +507,8 @@ constructor might not have all the constructors visible. So
mkStgAlgAlts (in CoreToStg) ensures that it gets the TyCon from the
constructors or literals (which are guaranteed to have the Real McCoy)
rather than from the scrutinee type.
+-}
-\begin{code}
type GenStgAlt bndr occ
= (AltCon, -- alts: data constructor,
[bndr], -- constructor's parameters,
@@ -518,30 +523,30 @@ data AltType
| UbxTupAlt Int -- Unboxed tuple of this arity
| AlgAlt TyCon -- Algebraic data type; the AltCons will be DataAlts
| PrimAlt TyCon -- Primitive data type; the AltCons will be LitAlts
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Stg]{The Plain STG parameterisation}
-%* *
-%************************************************************************
+* *
+************************************************************************
This happens to be the only one we use at the moment.
+-}
-\begin{code}
type StgBinding = GenStgBinding Id Id
type StgArg = GenStgArg Id
type StgLiveVars = GenStgLiveVars Id
type StgExpr = GenStgExpr Id Id
type StgRhs = GenStgRhs Id Id
type StgAlt = GenStgAlt Id Id
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection[UpdateFlag-datatype]{@UpdateFlag@}
-%* *
-%************************************************************************
+* *
+************************************************************************
This is also used in @LambdaFormInfo@ in the @ClosureInfo@ module.
@@ -550,8 +555,8 @@ updated or blackholed. An @Updatable@ closure should be updated after
evaluation (and may be blackholed during evaluation). A @SingleEntry@
closure will only be entered once, and so need not be updated but may
safely be blackholed.
+-}
-\begin{code}
data UpdateFlag = ReEntrant | Updatable | SingleEntry
instance Outputable UpdateFlag where
@@ -564,19 +569,19 @@ isUpdatable :: UpdateFlag -> Bool
isUpdatable ReEntrant = False
isUpdatable SingleEntry = False
isUpdatable Updatable = True
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{StgOp}
-%* *
-%************************************************************************
+* *
+************************************************************************
An StgOp allows us to group together PrimOps and ForeignCalls.
It's quite useful to move these around together, notably
in StgOpApp and COpStmt.
+-}
-\begin{code}
data StgOp
= StgPrimOp PrimOp
@@ -586,14 +591,13 @@ data StgOp
-- The Unique is occasionally needed by the C pretty-printer
-- (which lacks a unique supply), notably when generating a
-- typedef for foreign-export-dynamic
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection[Static Reference Tables]{@SRT@}
-%* *
-%************************************************************************
+* *
+************************************************************************
There is one SRT per top-level function group. Each local binding and
case expression within this binding group has a subrange of the whole
@@ -601,8 +605,8 @@ SRT, expressed as an offset and length.
In CoreToStg we collect the list of CafRefs at each SRT site, which is later
converted into the length and offset form by the SRT pass.
+-}
-\begin{code}
data SRT
= NoSRT
| SRTEntries IdSet
@@ -619,18 +623,18 @@ pprSRT :: SRT -> SDoc
pprSRT (NoSRT) = ptext (sLit "_no_srt_")
pprSRT (SRTEntries ids) = text "SRT:" <> ppr ids
pprSRT (SRT off _ _) = parens (ppr off <> comma <> text "*bitmap*")
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Stg-pretty-printing]{Pretty-printing}
-%* *
-%************************************************************************
+* *
+************************************************************************
Robin Popplestone asked for semi-colon separators on STG binds; here's
hoping he likes terminators instead... Ditto for case alternatives.
+-}
-\begin{code}
pprGenStgBinding :: (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> GenStgBinding bndr bdee -> SDoc
@@ -814,5 +818,3 @@ pprStgRhs (StgRhsCon cc con args)
pprMaybeSRT :: SRT -> SDoc
pprMaybeSRT (NoSRT) = empty
pprMaybeSRT srt = ptext (sLit "srt:") <> pprSRT srt
-\end{code}
-
diff --git a/compiler/typecheck/Inst.lhs b/compiler/typecheck/Inst.lhs
index 11bacc81bc..7b4a7075e1 100644
--- a/compiler/typecheck/Inst.lhs
+++ b/compiler/typecheck/Inst.lhs
@@ -411,15 +411,6 @@ getOverlapFlag overlap_mode
final_oflag = setOverlapModeMaybe default_oflag overlap_mode
; return final_oflag }
-tcGetInstEnvs :: TcM InstEnvs
--- Gets both the external-package inst-env
--- and the home-pkg inst env (includes module being compiled)
-tcGetInstEnvs = do { eps <- getEps
- ; env <- getGblEnv
- ; return (InstEnvs (eps_inst_env eps)
- (tcg_inst_env env)
- (tcg_visible_orphan_mods env))}
-
tcGetInsts :: TcM [ClsInst]
-- Gets the local class instances.
tcGetInsts = fmap tcg_insts getGblEnv
@@ -498,9 +489,9 @@ addLocalInst (home_ie, my_insts) ispec
global_ie
| isJust (tcg_sig_of tcg_env) = emptyInstEnv
| otherwise = eps_inst_env eps
- inst_envs = InstEnvs global_ie
- home_ie'
- (tcg_visible_orphan_mods tcg_env)
+ inst_envs = InstEnvs { ie_global = global_ie
+ , ie_local = home_ie'
+ , ie_visible = tcg_visible_orphan_mods tcg_env }
(matches, _, _) = lookupInstEnv inst_envs cls tys
dups = filter (identicalInstHead ispec) (map fst matches)
diff --git a/compiler/typecheck/TcEnv.lhs b/compiler/typecheck/TcEnv.lhs
index 765ac4d071..c4a3f2f0d3 100644
--- a/compiler/typecheck/TcEnv.lhs
+++ b/compiler/typecheck/TcEnv.lhs
@@ -21,7 +21,7 @@ module TcEnv(
tcLookupField, tcLookupTyCon, tcLookupClass,
tcLookupDataCon, tcLookupPatSyn, tcLookupConLike,
tcLookupLocatedGlobalId, tcLookupLocatedTyCon,
- tcLookupLocatedClass, tcLookupInstance, tcLookupAxiom,
+ tcLookupLocatedClass, tcLookupAxiom,
-- Local environment
tcExtendKindEnv, tcExtendKindEnv2,
@@ -38,8 +38,11 @@ module TcEnv(
tcExtendRecEnv, -- For knot-tying
+ -- Instances
+ tcLookupInstance, tcGetInstEnvs,
+
-- Rules
- tcExtendRules,
+ tcExtendRules,
-- Defaults
tcGetDefaultTys,
@@ -225,12 +228,14 @@ tcLookupInstance cls tys
extractTyVar (TyVarTy tv) = tv
extractTyVar _ = panic "TcEnv.tcLookupInstance: extractTyVar"
- -- NB: duplicated to prevent circular dependence on Inst
- tcGetInstEnvs = do { eps <- getEps
- ; env <- getGblEnv
- ; return (InstEnvs (eps_inst_env eps)
- (tcg_inst_env env)
- (tcg_visible_orphan_mods env)) }
+tcGetInstEnvs :: TcM InstEnvs
+-- Gets both the external-package inst-env
+-- and the home-pkg inst env (includes module being compiled)
+tcGetInstEnvs = do { eps <- getEps
+ ; env <- getGblEnv
+ ; return (InstEnvs { ie_global = eps_inst_env eps
+ , ie_local = tcg_inst_env env
+ , ie_visible = tcg_visible_orphan_mods env }) }
\end{code}
\begin{code}
diff --git a/compiler/typecheck/TcGenDeriv.lhs b/compiler/typecheck/TcGenDeriv.lhs
index 9b5ef8bbfe..13d8e836f6 100644
--- a/compiler/typecheck/TcGenDeriv.lhs
+++ b/compiler/typecheck/TcGenDeriv.lhs
@@ -65,6 +65,7 @@ import Pair
import Bag
import Fingerprint
import TcEnv (InstInfo)
+import StaticFlags( opt_PprStyle_Debug )
import ListSetOps ( assocMaybe )
import Data.List ( partition, intersperse )
@@ -1323,18 +1324,19 @@ we generate
\begin{code}
gen_Data_binds :: DynFlags
- -> SrcSpan
- -> TyCon
+ -> SrcSpan
+ -> TyCon -- For data families, this is the
+ -- *representation* TyCon
-> (LHsBinds RdrName, -- The method bindings
BagDerivStuff) -- Auxiliary bindings
-gen_Data_binds dflags loc tycon
+gen_Data_binds dflags loc rep_tc
= (listToBag [gfoldl_bind, gunfold_bind, toCon_bind, dataTypeOf_bind]
`unionBags` gcast_binds,
-- Auxiliary definitions: the data type and constructors
listToBag ( DerivHsBind (genDataTyCon)
: map (DerivHsBind . genDataDataCon) data_cons))
where
- data_cons = tyConDataCons tycon
+ data_cons = tyConDataCons rep_tc
n_cons = length data_cons
one_constr = n_cons == 1
@@ -1343,11 +1345,11 @@ gen_Data_binds dflags loc tycon
= (mkHsVarBind loc rdr_name rhs,
L loc (TypeSig [L loc rdr_name] sig_ty PlaceHolder))
where
- rdr_name = mk_data_type_name tycon
+ rdr_name = mk_data_type_name rep_tc
sig_ty = nlHsTyVar dataType_RDR
- constrs = [nlHsVar (mk_constr_name con) | con <- tyConDataCons tycon]
+ constrs = [nlHsVar (mk_constr_name con) | con <- tyConDataCons rep_tc]
rhs = nlHsVar mkDataType_RDR
- `nlHsApp` nlHsLit (mkHsString (showSDocOneLine dflags (ppr tycon)))
+ `nlHsApp` nlHsLit (mkHsString (showSDocOneLine dflags (ppr rep_tc)))
`nlHsApp` nlList constrs
genDataDataCon :: DataCon -> (LHsBind RdrName, LSig RdrName)
@@ -1418,10 +1420,25 @@ gen_Data_binds dflags loc tycon
loc
dataTypeOf_RDR
[nlWildPat]
- (nlHsVar (mk_data_type_name tycon))
+ (nlHsVar (mk_data_type_name rep_tc))
------------ gcast1/2
- tycon_kind = tyConKind tycon
+ -- Make the binding dataCast1 x = gcast1 x -- if T :: * -> *
+ -- or dataCast2 x = gcast2 s -- if T :: * -> * -> *
+ -- (or nothing if T has neither of these two types)
+
+ -- But care is needed for data families:
+ -- If we have data family D a
+ -- data instance D (a,b,c) = A | B deriving( Data )
+ -- and we want instance ... => Data (D [(a,b,c)]) where ...
+ -- then we need dataCast1 x = gcast1 x
+ -- because D :: * -> *
+ -- even though rep_tc has kind * -> * -> * -> *
+ -- Hence looking for the kind of fam_tc not rep_tc
+ -- See Trac #4896
+ tycon_kind = case tyConFamInst_maybe rep_tc of
+ Just (fam_tc, _) -> tyConKind fam_tc
+ Nothing -> tyConKind rep_tc
gcast_binds | tycon_kind `tcEqKind` kind1 = mk_gcast dataCast1_RDR gcast1_RDR
| tycon_kind `tcEqKind` kind2 = mk_gcast dataCast2_RDR gcast2_RDR
| otherwise = emptyBag
@@ -2278,6 +2295,11 @@ f_Pat = nlVarPat f_RDR
k_Pat = nlVarPat k_RDR
z_Pat = nlVarPat z_RDR
+minusInt_RDR, tagToEnum_RDR, error_RDR :: RdrName
+minusInt_RDR = getRdrName (primOpId IntSubOp )
+tagToEnum_RDR = getRdrName (primOpId TagToEnumOp)
+error_RDR = getRdrName eRROR_ID
+
con2tag_RDR, tag2con_RDR, maxtag_RDR :: TyCon -> RdrName
-- Generates Orig s RdrName, for the binding positions
con2tag_RDR tycon = mk_tc_deriv_name tycon mkCon2TagOcc
@@ -2288,13 +2310,40 @@ mk_tc_deriv_name :: TyCon -> (OccName -> OccName) -> RdrName
mk_tc_deriv_name tycon occ_fun = mkAuxBinderName (tyConName tycon) occ_fun
mkAuxBinderName :: Name -> (OccName -> OccName) -> RdrName
-mkAuxBinderName parent occ_fun = mkRdrUnqual (occ_fun (nameOccName parent))
--- Was: mkDerivedRdrName name occ_fun, which made an original name
--- But: (a) that does not work well for standalone-deriving
--- (b) an unqualified name is just fine, provided it can't clash with user code
+-- ^ Make a top-level binder name for an auxiliary binding for a parent name
+-- See Note [Auxiliary binders]
+mkAuxBinderName parent occ_fun
+ = mkRdrUnqual (occ_fun uniq_parent_occ)
+ where
+ uniq_parent_occ = mkOccName (occNameSpace parent_occ) uniq_string
-minusInt_RDR, tagToEnum_RDR, error_RDR :: RdrName
-minusInt_RDR = getRdrName (primOpId IntSubOp )
-tagToEnum_RDR = getRdrName (primOpId TagToEnumOp)
-error_RDR = getRdrName eRROR_ID
+ uniq_string
+ | opt_PprStyle_Debug = showSDocSimple (ppr parent_occ <> underscore <> ppr parent_uniq)
+ | otherwise = show parent_uniq
+ -- The debug thing is just to generate longer, but perhaps more perspicuous, names
+
+ parent_uniq = nameUnique parent
+ parent_occ = nameOccName parent
\end{code}
+
+Note [Auxiliary binders]
+~~~~~~~~~~~~~~~~~~~~~~~~
+We often want to make a top-level auxiliary binding. E.g. for comparison we haev
+
+ instance Ord T where
+ compare a b = $con2tag a `compare` $con2tag b
+
+ $con2tag :: T -> Int
+ $con2tag = ...code....
+
+Of course these top-level bindings should all have distinct name, and we are
+generating RdrNames here. We can't just use the TyCon or DataCon to distinguish
+becuase with standalone deriving two imported TyCons might both be called T!
+(See Trac #7947.)
+
+So we use the *unique* from the parent name (T in this example) as part of the
+OccName we generate for the new binding.
+
+In the past we used mkDerivedRdrName name occ_fun, which made an original name
+But: (a) that does not work well for standalone-deriving either
+ (b) an unqualified name is just fine, provided it can't clash with user code
diff --git a/compiler/typecheck/TcInteract.lhs b/compiler/typecheck/TcInteract.lhs
index ebac3102a8..b3ca7d8f90 100644
--- a/compiler/typecheck/TcInteract.lhs
+++ b/compiler/typecheck/TcInteract.lhs
@@ -1040,8 +1040,8 @@ outer type constructors match.
Note [Delicate equality kick-out]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-When adding an work-item CTyEqCan (a ~ xi), we kick out an inert
-CTyEqCan (b ~ phi) when
+When adding an fully-rewritten work-item CTyEqCan (a ~ xi), we kick
+out an inert CTyEqCan (b ~ phi) when
a) the work item can rewrite the inert item
diff --git a/compiler/typecheck/TcRnDriver.lhs b/compiler/typecheck/TcRnDriver.lhs
index 901f8f1410..29086c6ebe 100644
--- a/compiler/typecheck/TcRnDriver.lhs
+++ b/compiler/typecheck/TcRnDriver.lhs
@@ -79,7 +79,6 @@ import DataCon
import Type
import Class
import CoAxiom
-import Inst ( tcGetInstEnvs )
import Annotations
import Data.List ( sortBy )
import Data.Ord
@@ -1974,7 +1973,7 @@ tcRnGetInfo hsc_env name
lookupInsts :: TyThing -> TcM ([ClsInst],[FamInst])
lookupInsts (ATyCon tc)
- = do { InstEnvs pkg_ie home_ie vis_mods <- tcGetInstEnvs
+ = do { InstEnvs { ie_global = pkg_ie, ie_local = home_ie, ie_visible = vis_mods } <- tcGetInstEnvs
; (pkg_fie, home_fie) <- tcGetFamInstEnvs
-- Load all instances for all classes that are
-- in the type environment (which are all the ones
diff --git a/compiler/typecheck/TcRnTypes.lhs b/compiler/typecheck/TcRnTypes.lhs
index 18a455644b..69d0ce5396 100644
--- a/compiler/typecheck/TcRnTypes.lhs
+++ b/compiler/typecheck/TcRnTypes.lhs
@@ -278,6 +278,7 @@ data TcGblEnv
-- ^ The set of orphan modules which transitively reachable from
-- direct imports. We use this to figure out if an orphan instance
-- in the global InstEnv should be considered visible.
+ -- See Note [Instance lookup and orphan instances] in InstEnv
-- Now a bunch of things about this module that are simply
-- accumulated, but never consulted until the end.
diff --git a/compiler/types/InstEnv.hs b/compiler/types/InstEnv.hs
index 19fec2b179..e9eb1dd001 100644
--- a/compiler/types/InstEnv.hs
+++ b/compiler/types/InstEnv.hs
@@ -19,7 +19,7 @@ module InstEnv (
IsOrphan(..), isOrphan, notOrphan,
- InstEnvs(..), InstEnv,
+ InstEnvs(..), VisibleOrphanModules, InstEnv,
emptyInstEnv, extendInstEnv, deleteFromInstEnv, identicalInstHead,
extendInstEnvList, lookupUniqueInstEnv, lookupInstEnv', lookupInstEnv, instEnvElts,
memberInstEnv, instIsVisible,
@@ -55,39 +55,11 @@ import Data.Monoid
{-
************************************************************************
* *
-\subsection{The key types}
+ ClsInst: the data type for type-class instances
* *
************************************************************************
-}
--- | Is this instance an orphan? If it is not an orphan, contains an 'OccName'
--- witnessing the instance's non-orphanhood.
-data IsOrphan = IsOrphan | NotOrphan OccName
- deriving (Data, Typeable)
-
--- | Returns true if 'IsOrphan' is orphan.
-isOrphan :: IsOrphan -> Bool
-isOrphan IsOrphan = True
-isOrphan _ = False
-
--- | Returns true if 'IsOrphan' is not an orphan.
-notOrphan :: IsOrphan -> Bool
-notOrphan NotOrphan{} = True
-notOrphan _ = False
-
-instance Binary IsOrphan where
- put_ bh IsOrphan = putByte bh 0
- put_ bh (NotOrphan n) = do
- putByte bh 1
- put_ bh n
- get bh = do
- h <- getByte bh
- case h of
- 0 -> return IsOrphan
- _ -> do
- n <- get bh
- return $ NotOrphan n
-
data ClsInst
= ClsInst { -- Used for "rough matching"; see Note [Rough-match field]
-- INVARIANT: is_tcs = roughMatchTcs is_tys
@@ -242,10 +214,9 @@ mkLocalInstance :: DFunId -> OverlapFlag
-> [TyVar] -> Class -> [Type]
-> ClsInst
-- Used for local instances, where we can safely pull on the DFunId
--- TODO: what is the difference between source_tvs and tvs?
-mkLocalInstance dfun oflag source_tvs cls tys
+mkLocalInstance dfun oflag tvs cls tys
= ClsInst { is_flag = oflag, is_dfun = dfun
- , is_tvs = source_tvs
+ , is_tvs = tvs
, is_cls = cls, is_cls_nm = cls_name
, is_tys = tys, is_tcs = roughMatchTcs tys
, is_orphan = orph
@@ -256,11 +227,11 @@ mkLocalInstance dfun oflag source_tvs cls tys
this_mod = ASSERT( isExternalName dfun_name ) nameModule dfun_name
is_local name = nameIsLocalOrFrom this_mod name
- -- Compute orphanhood. See Note [Orphans] in IfaceSyn
- (tvs, fds) = classTvsFds cls
+ -- Compute orphanhood. See Note [Orphans] in InstEnv
+ (cls_tvs, fds) = classTvsFds cls
arg_names = [filterNameSet is_local (orphNamesOfType ty) | ty <- tys]
- -- See Note [When exactly is an instance decl an orphan?] in IfaceSyn
+ -- See Note [When exactly is an instance decl an orphan?]
orph | is_local cls_name = NotOrphan (nameOccName cls_name)
| all notOrphan mb_ns = ASSERT( not (null mb_ns) ) head mb_ns
| otherwise = IsOrphan
@@ -272,7 +243,7 @@ mkLocalInstance dfun oflag source_tvs cls tys
-- that is not in the "determined" arguments
mb_ns | null fds = [choose_one arg_names]
| otherwise = map do_one fds
- do_one (_ltvs, rtvs) = choose_one [ns | (tv,ns) <- tvs `zip` arg_names
+ do_one (_ltvs, rtvs) = choose_one [ns | (tv,ns) <- cls_tvs `zip` arg_names
, not (tv `elem` rtvs)]
choose_one :: [NameSet] -> IsOrphan
@@ -313,127 +284,115 @@ instanceCantMatch (Just t : ts) (Just a : as) = t/=a || instanceCantMatch ts as
instanceCantMatch _ _ = False -- Safe
{-
-Note [Overlapping instances]
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Overlap is permitted, but only in such a way that one can make
-a unique choice when looking up. That is, overlap is only permitted if
-one template matches the other, or vice versa. So this is ok:
-
- [a] [Int]
-
-but this is not
-
- (Int,a) (b,Int)
-
-If overlap is permitted, the list is kept most specific first, so that
-the first lookup is the right choice.
-
-
-For now we just use association lists.
-
-\subsection{Avoiding a problem with overlapping}
+************************************************************************
+* *
+ Orphans
+* *
+************************************************************************
+-}
-Consider this little program:
+-- | Is this instance an orphan? If it is not an orphan, contains an 'OccName'
+-- witnessing the instance's non-orphanhood.
+-- See Note [Orphans]
+data IsOrphan
+ = IsOrphan
+ | NotOrphan OccName -- The OccName 'n' witnesses the instance's non-orphanhood
+ -- In that case, the instance is fingerprinted as part
+ -- of the definition of 'n's definition
+ deriving (Data, Typeable)
-\begin{pseudocode}
- class C a where c :: a
- class C a => D a where d :: a
+-- | Returns true if 'IsOrphan' is orphan.
+isOrphan :: IsOrphan -> Bool
+isOrphan IsOrphan = True
+isOrphan _ = False
- instance C Int where c = 17
- instance D Int where d = 13
+-- | Returns true if 'IsOrphan' is not an orphan.
+notOrphan :: IsOrphan -> Bool
+notOrphan NotOrphan{} = True
+notOrphan _ = False
- instance C a => C [a] where c = [c]
- instance ({- C [a], -} D a) => D [a] where d = c
+instance Binary IsOrphan where
+ put_ bh IsOrphan = putByte bh 0
+ put_ bh (NotOrphan n) = do
+ putByte bh 1
+ put_ bh n
+ get bh = do
+ h <- getByte bh
+ case h of
+ 0 -> return IsOrphan
+ _ -> do
+ n <- get bh
+ return $ NotOrphan n
- instance C [Int] where c = [37]
+{-
+Note [Orphans]
+~~~~~~~~~~~~~~
+Class instances, rules, and family instances are divided into orphans
+and non-orphans. Roughly speaking, an instance/rule is an orphan if
+its left hand side mentions nothing defined in this module. Orphan-hood
+has two major consequences
- main = print (d :: [Int])
-\end{pseudocode}
+ * A module that contains orphans is called an "orphan module". If
+ the module being compiled depends (transitively) on an oprhan
+ module M, then M.hi is read in regardless of whether M is oherwise
+ needed. This is to ensure that we don't miss any instance decls in
+ M. But it's painful, because it means we need to keep track of all
+ the orphan modules below us.
-What do you think `main' prints (assuming we have overlapping instances, and
-all that turned on)? Well, the instance for `D' at type `[a]' is defined to
-be `c' at the same type, and we've got an instance of `C' at `[Int]', so the
-answer is `[37]', right? (the generic `C [a]' instance shouldn't apply because
-the `C [Int]' instance is more specific).
+ * A non-orphan is not finger-printed separately. Instead, for
+ fingerprinting purposes it is treated as part of the entity it
+ mentions on the LHS. For example
+ data T = T1 | T2
+ instance Eq T where ....
+ The instance (Eq T) is incorprated as part of T's fingerprint.
-Ghc-4.04 gives `[37]', while ghc-4.06 gives `[17]', so 4.06 is wrong. That
-was easy ;-) Let's just consult hugs for good measure. Wait - if I use old
-hugs (pre-September99), I get `[17]', and stranger yet, if I use hugs98, it
-doesn't even compile! What's going on!?
+ In constrast, orphans are all fingerprinted together in the
+ mi_orph_hash field of the ModIface.
-What hugs complains about is the `D [a]' instance decl.
+ See MkIface.addFingerprints.
-\begin{pseudocode}
- ERROR "mj.hs" (line 10): Cannot build superclass instance
- *** Instance : D [a]
- *** Context supplied : D a
- *** Required superclass : C [a]
-\end{pseudocode}
+Orphan-hood is computed
+ * For class instances:
+ when we make a ClsInst
+ (because it is needed during instance lookup)
-You might wonder what hugs is complaining about. It's saying that you
-need to add `C [a]' to the context of the `D [a]' instance (as appears
-in comments). But there's that `C [a]' instance decl one line above
-that says that I can reduce the need for a `C [a]' instance to the
-need for a `C a' instance, and in this case, I already have the
-necessary `C a' instance (since we have `D a' explicitly in the
-context, and `C' is a superclass of `D').
+ * For rules and family instances:
+ when we generate an IfaceRule (MkIface.coreRuleToIfaceRule)
+ or IfaceFamInst (MkIface.instanceToIfaceInst)
-Unfortunately, the above reasoning indicates a premature commitment to the
-generic `C [a]' instance. I.e., it prematurely rules out the more specific
-instance `C [Int]'. This is the mistake that ghc-4.06 makes. The fix is to
-add the context that hugs suggests (uncomment the `C [a]'), effectively
-deferring the decision about which instance to use.
+Note [When exactly is an instance decl an orphan?]
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ (see MkIface.instanceToIfaceInst, which implements this)
+Roughly speaking, an instance is an orphan if its head (after the =>)
+mentions nothing defined in this module.
-Now, interestingly enough, 4.04 has this same bug, but it's covered up
-in this case by a little known `optimization' that was disabled in
-4.06. Ghc-4.04 silently inserts any missing superclass context into
-an instance declaration. In this case, it silently inserts the `C
-[a]', and everything happens to work out.
+Functional dependencies complicate the situation though. Consider
-(See `basicTypes/MkId:mkDictFunId' for the code in question. Search for
-`Mark Jones', although Mark claims no credit for the `optimization' in
-question, and would rather it stopped being called the `Mark Jones
-optimization' ;-)
+ module M where { class C a b | a -> b }
-So, what's the fix? I think hugs has it right. Here's why. Let's try
-something else out with ghc-4.04. Let's add the following line:
+and suppose we are compiling module X:
- d' :: D a => [a]
- d' = c
+ module X where
+ import M
+ data T = ...
+ instance C Int T where ...
-Everyone raise their hand who thinks that `d :: [Int]' should give a
-different answer from `d' :: [Int]'. Well, in ghc-4.04, it does. The
-`optimization' only applies to instance decls, not to regular
-bindings, giving inconsistent behavior.
+This instance is an orphan, because when compiling a third module Y we
+might get a constraint (C Int v), and we'd want to improve v to T. So
+we must make sure X's instances are loaded, even if we do not directly
+use anything from X.
-Old hugs had this same bug. Here's how we fixed it: like GHC, the
-list of instances for a given class is ordered, so that more specific
-instances come before more generic ones. For example, the instance
-list for C might contain:
- ..., C Int, ..., C a, ...
-When we go to look for a `C Int' instance we'll get that one first.
-But what if we go looking for a `C b' (`b' is unconstrained)? We'll
-pass the `C Int' instance, and keep going. But if `b' is
-unconstrained, then we don't know yet if the more specific instance
-will eventually apply. GHC keeps going, and matches on the generic `C
-a'. The fix is to, at each step, check to see if there's a reverse
-match, and if so, abort the search. This prevents hugs from
-prematurely chosing a generic instance when a more specific one
-exists.
+More precisely, an instance is an orphan iff
---Jeff
+ If there are no fundeps, then at least of the names in
+ the instance head is locally defined.
-BUT NOTE [Nov 2001]: we must actually *unify* not reverse-match in
-this test. Suppose the instance envt had
- ..., forall a b. C a a b, ..., forall a b c. C a b c, ...
-(still most specific first)
-Now suppose we are looking for (C x y Int), where x and y are unconstrained.
- C x y Int doesn't match the template {a,b} C a a b
-but neither does
- C a a b match the template {x,y} C x y Int
-But still x and y might subsequently be unified so they *do* match.
+ If there are fundeps, then for every fundep, at least one of the
+ names free in a *non-determined* part of the instance head is
+ defined in this module.
-Simple story: unify, don't match.
+(Note that these conditions hold trivially if the class is locally
+defined.)
************************************************************************
@@ -462,11 +421,18 @@ type InstEnv = UniqFM ClsInstEnv -- Maps Class to instances for that clas
-- transitively reachable orphan modules (according to what modules have been
-- directly imported) used to test orphan instance visibility.
data InstEnvs = InstEnvs {
- ie_global :: InstEnv,
- ie_local :: InstEnv,
- ie_visible :: VisibleOrphanModules
+ ie_global :: InstEnv, -- External-package instances
+ ie_local :: InstEnv, -- Home-package instances
+ ie_visible :: VisibleOrphanModules -- Set of all orphan modules transitively
+ -- reachable from the module being compiled
+ -- See Note [Instance lookup and orphan instances]
}
+-- | Set of visible orphan modules, according to what modules have been directly
+-- imported. This is based off of the dep_orphs field, which records
+-- transitively reachable orphan modules (modules that define orphan instances).
+type VisibleOrphanModules = ModuleSet
+
newtype ClsInstEnv
= ClsIE [ClsInst] -- The instances for a particular class, in any order
@@ -490,22 +456,24 @@ instEnvElts ie = [elt | ClsIE elts <- eltsUFM ie, elt <- elts]
-- | Test if an instance is visible, by checking that its origin module
-- is in 'VisibleOrphanModules'.
+-- See Note [Instance lookup and orphan instances]
instIsVisible :: VisibleOrphanModules -> ClsInst -> Bool
instIsVisible vis_mods ispec
-- NB: Instances from the interactive package always are visible. We can't
-- add interactive modules to the set since we keep creating new ones
-- as a GHCi session progresses.
- | isInteractiveModule mod = True
+ | isInteractiveModule mod = True
| IsOrphan <- is_orphan ispec = mod `elemModuleSet` vis_mods
- | otherwise = True
- where mod = nameModule (idName (is_dfun ispec))
+ | otherwise = True
+ where
+ mod = nameModule (idName (is_dfun ispec))
classInstances :: InstEnvs -> Class -> [ClsInst]
-classInstances (InstEnvs pkg_ie home_ie vis_mods) cls
- = filter (instIsVisible vis_mods) (get home_ie ++ get pkg_ie)
+classInstances (InstEnvs { ie_global = pkg_ie, ie_local = home_ie, ie_visible = vis_mods }) cls
+ = get home_ie ++ get pkg_ie
where
get env = case lookupUFM env cls of
- Just (ClsIE insts) -> insts
+ Just (ClsIE insts) -> filter (instIsVisible vis_mods) insts
Nothing -> []
-- | Collects the names of concrete types and type constructors that make
@@ -562,6 +530,32 @@ identicalInstHead (ClsInst { is_cls_nm = cls_nm1, is_tcs = rough1, is_tvs = tvs1
the env is kept ordered, the first match must be the only one. The
thing we are looking up can have an arbitrary "flexi" part.
+Note [Instance lookup and orphan instances]
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Suppose we are compiling a module M, and we have a zillion packages
+loaded, and we are looking up an instance for C (T W). If we find a
+match in module 'X' from package 'p', should be "in scope"; that is,
+
+ is p:X in the transitive closure of modules imported from M?
+
+The difficulty is that the "zillion packages" might include ones loaded
+through earlier invocations of the GHC API, or earlier module loads in GHCi.
+They might not be in the dependencies of M itself; and if not, the instances
+in them should not be visible. Trac #2182, #8427.
+
+There are two cases:
+ * If the instance is *not an orphan*, then module X defines C, T, or W.
+ And in order for those types to be involved in typechecking M, it
+ must be that X is in the transitive closure of M's imports. So we
+ can use the instance.
+
+ * If the instance *is an orphan*, the above reasoning does not apply.
+ So we keep track of the set of orphan modules transitively below M;
+ this is the ie_visible field of InstEnvs, of type VisibleOrphanModules.
+
+ If module p:X is in this set, then we can use the instance, otherwise
+ we can't.
+
Note [Rules for instance lookup]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
These functions implement the carefully-written rules in the user
@@ -608,6 +602,128 @@ of the target constraint (C ty1 .. tyn). The search works like this.
* If only one candidate remains, pick it. Otherwise if all remaining
candidates are incoherent, pick an arbitrary candidate. Otherwise fail.
+
+Note [Overlapping instances] (NB: these notes are quite old)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Overlap is permitted, but only in such a way that one can make
+a unique choice when looking up. That is, overlap is only permitted if
+one template matches the other, or vice versa. So this is ok:
+
+ [a] [Int]
+
+but this is not
+
+ (Int,a) (b,Int)
+
+If overlap is permitted, the list is kept most specific first, so that
+the first lookup is the right choice.
+
+
+For now we just use association lists.
+
+\subsection{Avoiding a problem with overlapping}
+
+Consider this little program:
+
+\begin{pseudocode}
+ class C a where c :: a
+ class C a => D a where d :: a
+
+ instance C Int where c = 17
+ instance D Int where d = 13
+
+ instance C a => C [a] where c = [c]
+ instance ({- C [a], -} D a) => D [a] where d = c
+
+ instance C [Int] where c = [37]
+
+ main = print (d :: [Int])
+\end{pseudocode}
+
+What do you think `main' prints (assuming we have overlapping instances, and
+all that turned on)? Well, the instance for `D' at type `[a]' is defined to
+be `c' at the same type, and we've got an instance of `C' at `[Int]', so the
+answer is `[37]', right? (the generic `C [a]' instance shouldn't apply because
+the `C [Int]' instance is more specific).
+
+Ghc-4.04 gives `[37]', while ghc-4.06 gives `[17]', so 4.06 is wrong. That
+was easy ;-) Let's just consult hugs for good measure. Wait - if I use old
+hugs (pre-September99), I get `[17]', and stranger yet, if I use hugs98, it
+doesn't even compile! What's going on!?
+
+What hugs complains about is the `D [a]' instance decl.
+
+\begin{pseudocode}
+ ERROR "mj.hs" (line 10): Cannot build superclass instance
+ *** Instance : D [a]
+ *** Context supplied : D a
+ *** Required superclass : C [a]
+\end{pseudocode}
+
+You might wonder what hugs is complaining about. It's saying that you
+need to add `C [a]' to the context of the `D [a]' instance (as appears
+in comments). But there's that `C [a]' instance decl one line above
+that says that I can reduce the need for a `C [a]' instance to the
+need for a `C a' instance, and in this case, I already have the
+necessary `C a' instance (since we have `D a' explicitly in the
+context, and `C' is a superclass of `D').
+
+Unfortunately, the above reasoning indicates a premature commitment to the
+generic `C [a]' instance. I.e., it prematurely rules out the more specific
+instance `C [Int]'. This is the mistake that ghc-4.06 makes. The fix is to
+add the context that hugs suggests (uncomment the `C [a]'), effectively
+deferring the decision about which instance to use.
+
+Now, interestingly enough, 4.04 has this same bug, but it's covered up
+in this case by a little known `optimization' that was disabled in
+4.06. Ghc-4.04 silently inserts any missing superclass context into
+an instance declaration. In this case, it silently inserts the `C
+[a]', and everything happens to work out.
+
+(See `basicTypes/MkId:mkDictFunId' for the code in question. Search for
+`Mark Jones', although Mark claims no credit for the `optimization' in
+question, and would rather it stopped being called the `Mark Jones
+optimization' ;-)
+
+So, what's the fix? I think hugs has it right. Here's why. Let's try
+something else out with ghc-4.04. Let's add the following line:
+
+ d' :: D a => [a]
+ d' = c
+
+Everyone raise their hand who thinks that `d :: [Int]' should give a
+different answer from `d' :: [Int]'. Well, in ghc-4.04, it does. The
+`optimization' only applies to instance decls, not to regular
+bindings, giving inconsistent behavior.
+
+Old hugs had this same bug. Here's how we fixed it: like GHC, the
+list of instances for a given class is ordered, so that more specific
+instances come before more generic ones. For example, the instance
+list for C might contain:
+ ..., C Int, ..., C a, ...
+When we go to look for a `C Int' instance we'll get that one first.
+But what if we go looking for a `C b' (`b' is unconstrained)? We'll
+pass the `C Int' instance, and keep going. But if `b' is
+unconstrained, then we don't know yet if the more specific instance
+will eventually apply. GHC keeps going, and matches on the generic `C
+a'. The fix is to, at each step, check to see if there's a reverse
+match, and if so, abort the search. This prevents hugs from
+prematurely chosing a generic instance when a more specific one
+exists.
+
+--Jeff
+v
+BUT NOTE [Nov 2001]: we must actually *unify* not reverse-match in
+this test. Suppose the instance envt had
+ ..., forall a b. C a a b, ..., forall a b c. C a b c, ...
+(still most specific first)
+Now suppose we are looking for (C x y Int), where x and y are unconstrained.
+ C x y Int doesn't match the template {a,b} C a a b
+but neither does
+ C a a b match the template {x,y} C x y Int
+But still x and y might subsequently be unified so they *do* match.
+
+Simple story: unify, don't match.
-}
type DFunInstType = Maybe Type
@@ -686,7 +802,8 @@ lookupInstEnv' ie vis_mods cls tys
find ms us (item@(ClsInst { is_tcs = mb_tcs, is_tvs = tpl_tvs
, is_tys = tpl_tys, is_flag = oflag }) : rest)
| not (instIsVisible vis_mods item)
- = find ms us rest
+ = find ms us rest -- See Note [Instance lookup and orphan instances]
+
-- Fast check for no match, uses the "rough match" fields
| instanceCantMatch rough_tcs mb_tcs
= find ms us rest
@@ -726,7 +843,7 @@ lookupInstEnv :: InstEnvs -- External and home package inst-env
-> Class -> [Type] -- What we are looking for
-> ClsInstLookupResult
-- ^ See Note [Rules for instance lookup]
-lookupInstEnv (InstEnvs pkg_ie home_ie vis_mods) cls tys
+lookupInstEnv (InstEnvs { ie_global = pkg_ie, ie_local = home_ie, ie_visible = vis_mods }) cls tys
= (final_matches, final_unifs, safe_fail)
where
(home_matches, home_unifs) = lookupInstEnv' home_ie vis_mods cls tys
diff --git a/compiler/utils/Bag.lhs b/compiler/utils/Bag.hs
index 65c5b39df1..95feaed9f8 100644
--- a/compiler/utils/Bag.lhs
+++ b/compiler/utils/Bag.hs
@@ -1,11 +1,11 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
Bag: an unordered collection with duplicates
+-}
-\begin{code}
{-# LANGUAGE DeriveDataTypeable, ScopedTypeVariables #-}
module Bag (
@@ -32,10 +32,7 @@ import Data.List ( partition )
infixr 3 `consBag`
infixl 3 `snocBag`
-\end{code}
-
-\begin{code}
data Bag a
= EmptyBag
| UnitBag a
@@ -257,9 +254,7 @@ listToBag vs = ListBag vs
bagToList :: Bag a -> [a]
bagToList b = foldrBag (:) [] b
-\end{code}
-\begin{code}
instance (Outputable a) => Outputable (Bag a) where
ppr bag = braces (pprWithCommas ppr (bagToList bag))
@@ -269,5 +264,3 @@ instance Data a => Data (Bag a) where
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "Bag"
dataCast1 x = gcast1 x
-\end{code}
-
diff --git a/compiler/utils/Digraph.lhs b/compiler/utils/Digraph.hs
index 35782bac6e..8f5df0ce05 100644
--- a/compiler/utils/Digraph.lhs
+++ b/compiler/utils/Digraph.hs
@@ -1,8 +1,5 @@
-%
-% (c) The University of Glasgow 2006
-%
+-- (c) The University of Glasgow 2006
-\begin{code}
{-# LANGUAGE CPP, ScopedTypeVariables #-}
module Digraph(
Graph, graphFromVerticesAndAdjacency, graphFromEdgedVertices,
@@ -58,13 +55,13 @@ import Data.Ord
import Data.Array.ST
import qualified Data.Map as Map
import qualified Data.Set as Set
-\end{code}
-%************************************************************************
-%* *
-%* Graphs and Graph Construction
-%* *
-%************************************************************************
+{-
+************************************************************************
+* *
+* Graphs and Graph Construction
+* *
+************************************************************************
Note [Nodes, keys, vertices]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -75,8 +72,8 @@ Note [Nodes, keys, vertices]
* Digraph then maps each 'key' to a Vertex (Int) which is
arranged densely in 0.n
+-}
-\begin{code}
data Graph node = Graph {
gr_int_graph :: IntGraph,
gr_vertex_to_node :: Vertex -> node,
@@ -151,15 +148,15 @@ reduceNodesIntoVertices nodes key_extractor = (bounds, (!) vertex_map, key_verte
LT -> find a (mid - 1)
EQ -> Just mid
GT -> find (mid + 1) b
-\end{code}
-%************************************************************************
-%* *
-%* SCC
-%* *
-%************************************************************************
+{-
+************************************************************************
+* *
+* SCC
+* *
+************************************************************************
+-}
-\begin{code}
type WorkItem key payload
= (Node key payload, -- Tip of the path
[payload]) -- Rest of the path;
@@ -208,15 +205,15 @@ findCycle graph
new_work :: [key] -> [payload] -> [WorkItem key payload]
new_work deps path = [ (n, path) | Just n <- map (`Map.lookup` env) deps ]
-\end{code}
-%************************************************************************
-%* *
-%* SCC
-%* *
-%************************************************************************
+{-
+************************************************************************
+* *
+* SCC
+* *
+************************************************************************
+-}
-\begin{code}
data SCC vertex = AcyclicSCC vertex
| CyclicSCC [vertex]
@@ -234,19 +231,19 @@ flattenSCC (CyclicSCC vs) = vs
instance Outputable a => Outputable (SCC a) where
ppr (AcyclicSCC v) = text "NONREC" $$ (nest 3 (ppr v))
ppr (CyclicSCC vs) = text "REC" $$ (nest 3 (vcat (map ppr vs)))
-\end{code}
-%************************************************************************
-%* *
-%* Strongly Connected Component wrappers for Graph
-%* *
-%************************************************************************
+{-
+************************************************************************
+* *
+* Strongly Connected Component wrappers for Graph
+* *
+************************************************************************
Note: the components are returned topologically sorted: later components
depend on earlier ones, but not vice versa i.e. later components only have
edges going from them to earlier ones.
+-}
-\begin{code}
stronglyConnCompG :: Graph node -> [SCC node]
stronglyConnCompG graph = decodeSccs graph forest
where forest = {-# SCC "Digraph.scc" #-} scc (gr_int_graph graph)
@@ -278,15 +275,15 @@ stronglyConnCompFromEdgedVerticesR
=> [Node key payload]
-> [SCC (Node key payload)]
stronglyConnCompFromEdgedVerticesR = stronglyConnCompG . graphFromEdgedVertices
-\end{code}
-%************************************************************************
-%* *
-%* Misc wrappers for Graph
-%* *
-%************************************************************************
+{-
+************************************************************************
+* *
+* Misc wrappers for Graph
+* *
+************************************************************************
+-}
-\begin{code}
topologicalSortG :: Graph node -> [node]
topologicalSortG graph = map (gr_vertex_to_node graph) result
where result = {-# SCC "Digraph.topSort" #-} topSort (gr_int_graph graph)
@@ -340,15 +337,14 @@ emptyG g = graphEmpty (gr_int_graph g)
componentsG :: Graph node -> [[node]]
componentsG graph = map (map (gr_vertex_to_node graph) . flattenTree) $ components (gr_int_graph graph)
-\end{code}
-%************************************************************************
-%* *
-%* Showing Graphs
-%* *
-%************************************************************************
-
-\begin{code}
+{-
+************************************************************************
+* *
+* Showing Graphs
+* *
+************************************************************************
+-}
instance Outputable node => Outputable (Graph node) where
ppr graph = vcat [
@@ -359,23 +355,20 @@ instance Outputable node => Outputable (Graph node) where
instance Outputable node => Outputable (Edge node) where
ppr (Edge from to) = ppr from <+> text "->" <+> ppr to
-\end{code}
-
-%************************************************************************
-%* *
-%* IntGraphs
-%* *
-%************************************************************************
+{-
+************************************************************************
+* *
+* IntGraphs
+* *
+************************************************************************
+-}
-\begin{code}
type Vertex = Int
type Table a = Array Vertex a
type IntGraph = Table [Vertex]
type Bounds = (Vertex, Vertex)
type IntEdge = (Vertex, Vertex)
-\end{code}
-\begin{code}
vertices :: IntGraph -> [Vertex]
vertices = indices
@@ -405,15 +398,14 @@ graphEmpty :: IntGraph -> Bool
graphEmpty g = lo > hi
where (lo, hi) = bounds g
-\end{code}
-
-%************************************************************************
-%* *
-%* Trees and forests
-%* *
-%************************************************************************
+{-
+************************************************************************
+* *
+* Trees and forests
+* *
+************************************************************************
+-}
-\begin{code}
data Tree a = Node a (Forest a)
type Forest a = [Tree a]
@@ -422,9 +414,7 @@ mapTree f (Node x ts) = Node (f x) (map (mapTree f) ts)
flattenTree :: Tree a -> [a]
flattenTree (Node x ts) = x : concatMap flattenTree ts
-\end{code}
-\begin{code}
instance Show a => Show (Tree a) where
showsPrec _ t s = showTree t ++ s
@@ -451,16 +441,15 @@ draw (Node x ts) = grp this (space (length this)) (stLoop ts)
grp fst rst = zipWith (++) (fst:repeat rst)
[s1,s2,s3,s4,s5,s6] = ["- ", "--", "-+", " |", " `", " +"]
-\end{code}
+{-
+************************************************************************
+* *
+* Depth first search
+* *
+************************************************************************
+-}
-%************************************************************************
-%* *
-%* Depth first search
-%* *
-%************************************************************************
-
-\begin{code}
type Set s = STArray s Vertex Bool
mkEmpty :: Bounds -> ST s (Set s)
@@ -471,9 +460,7 @@ contains m v = readArray m v
include :: Set s -> Vertex -> ST s ()
include m v = writeArray m v True
-\end{code}
-\begin{code}
dff :: IntGraph -> Forest Vertex
dff g = dfs g (vertices g)
@@ -498,20 +485,19 @@ chop m (Node v ts : us)
chop m ts >>= \as ->
chop m us >>= \bs ->
return (Node v as : bs)
-\end{code}
-
-%************************************************************************
-%* *
-%* Algorithms
-%* *
-%************************************************************************
+{-
+************************************************************************
+* *
+* Algorithms
+* *
+************************************************************************
------------------------------------------------------------
-- Algorithm 1: depth first search numbering
------------------------------------------------------------
+-}
-\begin{code}
preorder :: Tree a -> [a]
preorder (Node a ts) = a : preorderF ts
@@ -523,13 +509,13 @@ tabulate bnds vs = array bnds (zip vs [1..])
preArr :: Bounds -> Forest Vertex -> Table Int
preArr bnds = tabulate bnds . preorderF
-\end{code}
+{-
------------------------------------------------------------
-- Algorithm 2: topological sorting
------------------------------------------------------------
+-}
-\begin{code}
postorder :: Tree a -> [a] -> [a]
postorder (Node a ts) = postorderF ts . (a :)
@@ -541,34 +527,34 @@ postOrd g = postorderF (dff g) []
topSort :: IntGraph -> [Vertex]
topSort = reverse . postOrd
-\end{code}
+{-
------------------------------------------------------------
-- Algorithm 3: connected components
------------------------------------------------------------
+-}
-\begin{code}
components :: IntGraph -> Forest Vertex
components = dff . undirected
undirected :: IntGraph -> IntGraph
undirected g = buildG (bounds g) (edges g ++ reverseE g)
-\end{code}
+{-
------------------------------------------------------------
-- Algorithm 4: strongly connected components
------------------------------------------------------------
+-}
-\begin{code}
scc :: IntGraph -> Forest Vertex
scc g = dfs g (reverse (postOrd (transpose g)))
-\end{code}
+{-
------------------------------------------------------------
-- Algorithm 5: Classifying edges
------------------------------------------------------------
+-}
-\begin{code}
back :: IntGraph -> Table Int -> IntGraph
back g post = mapT select g
where select v ws = [ w | w <- ws, post!v < post!w ]
@@ -580,25 +566,25 @@ cross g pre post = mapT select g
forward :: IntGraph -> IntGraph -> Table Int -> IntGraph
forward g tree pre = mapT select g
where select v ws = [ w | w <- ws, pre!v < pre!w ] \\ tree!v
-\end{code}
+{-
------------------------------------------------------------
-- Algorithm 6: Finding reachable vertices
------------------------------------------------------------
+-}
-\begin{code}
reachable :: IntGraph -> [Vertex] -> [Vertex]
reachable g vs = preorderF (dfs g vs)
path :: IntGraph -> Vertex -> Vertex -> Bool
path g v w = w `elem` (reachable g [v])
-\end{code}
+{-
------------------------------------------------------------
-- Algorithm 7: Biconnected components
------------------------------------------------------------
+-}
-\begin{code}
bcc :: IntGraph -> Forest [Vertex]
bcc g = (concat . map bicomps . map (do_label g dnum)) forest
where forest = dff g
@@ -620,8 +606,8 @@ collect (Node (v,dv,lv) ts) = (lv, Node (v:vs) cs)
vs = concat [ ws | (lw, Node ws _) <- collected, lw<dv]
cs = concat [ if lw<dv then us else [Node (v:ws) us]
| (lw, Node ws us) <- collected ]
-\end{code}
+{-
------------------------------------------------------------
-- Algorithm 8: Total ordering on groups of vertices
------------------------------------------------------------
@@ -637,8 +623,7 @@ We proceed by iteratively removing elements with no outgoing edges
and their associated edges from the graph.
This probably isn't very efficient and certainly isn't very clever.
-
-\begin{code}
+-}
vertexGroups :: IntGraph -> [[Vertex]]
vertexGroups g = runST (mkEmpty (bounds g) >>= \provided -> vertexGroupsS provided g next_vertices)
@@ -665,4 +650,3 @@ vertexGroupsS provided g to_provide
vertexReady :: Set s -> IntGraph -> Vertex -> ST s Bool
vertexReady provided g v = liftM2 (&&) (liftM not $ provided `contains` v) (allM (provided `contains`) (g!v))
-\end{code}
diff --git a/compiler/utils/FastBool.lhs b/compiler/utils/FastBool.hs
index 9aa1a75b37..9e88376f0a 100644
--- a/compiler/utils/FastBool.lhs
+++ b/compiler/utils/FastBool.hs
@@ -1,9 +1,9 @@
-%
-% (c) The University of Glasgow, 2000-2006
-%
+{-
+(c) The University of Glasgow, 2000-2006
+
\section{Fast booleans}
+-}
-\begin{code}
{-# LANGUAGE CPP, MagicHash #-}
module FastBool (
@@ -68,5 +68,3 @@ fastBool :: Bool -> FastBool
isFastTrue :: FastBool -> Bool
fastOr :: FastBool -> FastBool -> FastBool
fastAnd :: FastBool -> FastBool -> FastBool
-
-\end{code}
diff --git a/compiler/utils/FastFunctions.lhs b/compiler/utils/FastFunctions.hs
index 854bd13b11..140e42949a 100644
--- a/compiler/utils/FastFunctions.lhs
+++ b/compiler/utils/FastFunctions.hs
@@ -1,9 +1,10 @@
+{-
Z%
-% (c) The University of Glasgow, 2000-2006
-%
+(c) The University of Glasgow, 2000-2006
+
\section{Fast functions}
+-}
-\begin{code}
{-# LANGUAGE CPP, MagicHash, UnboxedTuples #-}
module FastFunctions (
@@ -43,5 +44,3 @@ global a = unsafePerformIO (newIORef a)
indexWord8OffFastPtr :: FastPtr Word8 -> FastInt -> Word8
indexWord8OffFastPtrAsFastChar :: FastPtr Word8 -> FastInt -> FastChar
indexWord8OffFastPtrAsFastInt :: FastPtr Word8 -> FastInt -> FastInt
-
-\end{code}
diff --git a/compiler/utils/FastMutInt.lhs b/compiler/utils/FastMutInt.hs
index e866aa5d38..4cde1216ed 100644
--- a/compiler/utils/FastMutInt.lhs
+++ b/compiler/utils/FastMutInt.hs
@@ -1,4 +1,3 @@
-\begin{code}
{-# LANGUAGE CPP, BangPatterns, MagicHash, UnboxedTuples #-}
{-# OPTIONS_GHC -O #-}
-- We always optimise this, otherwise performance of a non-optimised
@@ -32,9 +31,7 @@ writeFastMutInt :: FastMutInt -> Int -> IO ()
newFastMutPtr :: IO FastMutPtr
readFastMutPtr :: FastMutPtr -> IO (Ptr a)
writeFastMutPtr :: FastMutPtr -> Ptr a -> IO ()
-\end{code}
-\begin{code}
data FastMutInt = FastMutInt (MutableByteArray# RealWorld)
newFastMutInt = IO $ \s ->
@@ -64,5 +61,3 @@ readFastMutPtr (FastMutPtr arr) = IO $ \s ->
writeFastMutPtr (FastMutPtr arr) (Ptr i) = IO $ \s ->
case writeAddrArray# arr 0# i s of { s ->
(# s, () #) }
-\end{code}
-
diff --git a/compiler/utils/FastString.lhs b/compiler/utils/FastString.hs
index c1f7973e76..9607d24823 100644
--- a/compiler/utils/FastString.lhs
+++ b/compiler/utils/FastString.hs
@@ -1,7 +1,5 @@
-%
-% (c) The University of Glasgow, 1997-2006
-%
-\begin{code}
+-- (c) The University of Glasgow, 1997-2006
+
{-# LANGUAGE BangPatterns, CPP, DeriveDataTypeable, MagicHash, UnboxedTuples #-}
{-# OPTIONS_GHC -O -funbox-strict-fields #-}
-- We always optimise this, otherwise performance of a non-optimised
@@ -640,4 +638,3 @@ fsLit x = mkFastString x
forall x . sLit (unpackCString# x) = mkLitString# x #-}
{-# RULES "fslit"
forall x . fsLit (unpackCString# x) = mkFastString# x #-}
-\end{code}
diff --git a/compiler/utils/FastTypes.lhs b/compiler/utils/FastTypes.hs
index 6b1517c484..a5c1aa9637 100644
--- a/compiler/utils/FastTypes.lhs
+++ b/compiler/utils/FastTypes.hs
@@ -1,9 +1,9 @@
-%
-% (c) The University of Glasgow, 2000-2006
-%
+{-
+(c) The University of Glasgow, 2000-2006
+
\section{Fast integers, etc... booleans moved to FastBool for using panic}
+-}
-\begin{code}
{-# LANGUAGE CPP, MagicHash #-}
--Even if the optimizer could handle boxed arithmetic equally well,
@@ -136,5 +136,3 @@ eqFastChar :: FastChar -> FastChar -> Bool
pBox :: FastPtr a -> Ptr a
pUnbox :: Ptr a -> FastPtr a
castFastPtr :: FastPtr a -> FastPtr b
-
-\end{code}
diff --git a/compiler/utils/FiniteMap.lhs b/compiler/utils/FiniteMap.hs
index b52f28c324..dccfca10a9 100644
--- a/compiler/utils/FiniteMap.lhs
+++ b/compiler/utils/FiniteMap.hs
@@ -1,6 +1,5 @@
-Some extra functions to extend Data.Map
+-- Some extra functions to extend Data.Map
-\begin{code}
module FiniteMap (
insertList,
insertListWith,
@@ -28,5 +27,3 @@ foldRight :: (elt -> a -> a) -> a -> Map key elt -> a
foldRight = Map.fold
foldRightWithKey :: (key -> elt -> a -> a) -> a -> Map key elt -> a
foldRightWithKey = Map.foldrWithKey
-\end{code}
-
diff --git a/compiler/utils/ListSetOps.lhs b/compiler/utils/ListSetOps.hs
index 6247dc67f6..54faa4f600 100644
--- a/compiler/utils/ListSetOps.lhs
+++ b/compiler/utils/ListSetOps.hs
@@ -1,10 +1,10 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+
\section[ListSetOps]{Set-like operations on lists}
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module ListSetOps (
@@ -29,8 +29,8 @@ import UniqFM
import Util
import Data.List
-\end{code}
+{-
---------
#ifndef DEBUG
getNth :: [a] -> Int -> a
@@ -41,20 +41,21 @@ getNth xs n = ASSERT2( xs `lengthAtLeast` n, ppr n $$ ppr xs )
xs !! n
#endif
----------
-\begin{code}
+-}
+
getNth :: Outputable a => [a] -> Int -> a
getNth xs n = ASSERT2( xs `lengthExceeds` n, ppr n $$ ppr xs )
xs !! n
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
Treating lists as sets
Assumes the lists contain no duplicates, but are unordered
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
insertList :: Eq a => a -> [a] -> [a]
-- Assumes the arg list contains no dups; guarantees the result has no dups
insertList x xs | isIn "insert" x xs = xs
@@ -62,25 +63,24 @@ insertList x xs | isIn "insert" x xs = xs
unionLists :: (Outputable a, Eq a) => [a] -> [a] -> [a]
-- Assumes that the arguments contain no duplicates
-unionLists xs ys
+unionLists xs ys
= WARN(length xs > 100 || length ys > 100, ppr xs $$ ppr ys)
[x | x <- xs, isn'tIn "unionLists" x ys] ++ ys
minusList :: (Eq a) => [a] -> [a] -> [a]
-- Everything in the first list that is not in the second list:
minusList xs ys = [ x | x <- xs, isn'tIn "minusList" x ys]
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Utils-assoc]{Association lists}
-%* *
-%************************************************************************
+* *
+************************************************************************
Inefficient finite maps based on association lists and equality.
+-}
-\begin{code}
-- A finite mapping based on equality and association lists
type Assoc a b = [(a,b)]
@@ -104,15 +104,15 @@ assocMaybe alist key
where
lookup [] = Nothing
lookup ((tv,ty):rest) = if key == tv then Just ty else lookup rest
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Utils-dups]{Duplicate-handling}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
hasNoDups :: (Eq a) => [a] -> Bool
hasNoDups xs = f [] xs
@@ -123,9 +123,7 @@ hasNoDups xs = f [] xs
else f (x:seen_so_far) xs
is_elem = isIn "hasNoDups"
-\end{code}
-\begin{code}
equivClasses :: (a -> a -> Ordering) -- Comparison
-> [a]
-> [[a]]
@@ -135,16 +133,16 @@ equivClasses _ stuff@[_] = [stuff]
equivClasses cmp items = runs eq (sortBy cmp items)
where
eq a b = case cmp a b of { EQ -> True; _ -> False }
-\end{code}
+{-
The first cases in @equivClasses@ above are just to cut to the point
more quickly...
@runs@ groups a list into a list of lists, each sublist being a run of
identical elements of the input list. It is passed a predicate @p@ which
tells when two elements are equal.
+-}
-\begin{code}
runs :: (a -> a -> Bool) -- Equality
-> [a]
-> [[a]]
@@ -152,9 +150,7 @@ runs :: (a -> a -> Bool) -- Equality
runs _ [] = []
runs p (x:xs) = case (span (p x) xs) of
(first, rest) -> (x:first) : (runs p rest)
-\end{code}
-\begin{code}
removeDups :: (a -> a -> Ordering) -- Comparison function
-> [a]
-> ([a], -- List with no duplicates
@@ -176,10 +172,7 @@ findDupsEq _ [] = []
findDupsEq eq (x:xs) | null eq_xs = findDupsEq eq xs
| otherwise = (x:eq_xs) : findDupsEq eq neq_xs
where (eq_xs, neq_xs) = partition (eq x) xs
-\end{code}
-
-\begin{code}
equivClassesByUniq :: (a -> Unique) -> [a] -> [[a]]
-- NB: it's *very* important that if we have the input list [a,b,c],
-- where a,b,c all have the same unique, then we get back the list
@@ -192,5 +185,3 @@ equivClassesByUniq get_uniq xs
where
add a ufm = addToUFM_C tack_on ufm (get_uniq a) [a]
tack_on old new = new++old
-\end{code}
-
diff --git a/compiler/utils/Maybes.lhs b/compiler/utils/Maybes.hs
index 8052b1d848..fc8e3199ae 100644
--- a/compiler/utils/Maybes.lhs
+++ b/compiler/utils/Maybes.hs
@@ -1,9 +1,8 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module Maybes (
module Data.Maybe,
@@ -25,15 +24,15 @@ import Control.Monad
import Data.Maybe
infixr 4 `orElse`
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Maybe type]{The @Maybe@ type}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
firstJust :: Maybe a -> Maybe a -> Maybe a
firstJust a b = firstJusts [a, b]
@@ -54,15 +53,14 @@ whenIsJust Nothing _ = return ()
-- | Flipped version of @fromMaybe@, useful for chaining.
orElse :: Maybe a -> a -> a
orElse = flip fromMaybe
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[MaybeT type]{The @MaybeT@ monad transformer}
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
newtype MaybeT m a = MaybeT {runMaybeT :: m (Maybe a)}
@@ -78,16 +76,14 @@ instance Monad m => Monad (MaybeT m) where
x >>= f = MaybeT $ runMaybeT x >>= maybe (return Nothing) (runMaybeT . f)
fail _ = MaybeT $ return Nothing
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[MaybeErr type]{The @MaybeErr@ type}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
data MaybeErr err val = Succeeded val | Failed err
instance Functor (MaybeErr err) where
@@ -108,4 +104,3 @@ isSuccess (Failed {}) = False
failME :: err -> MaybeErr err val
failME e = Failed e
-\end{code}
diff --git a/compiler/utils/OrdList.lhs b/compiler/utils/OrdList.hs
index 42abb51696..ad72ca1d45 100644
--- a/compiler/utils/OrdList.lhs
+++ b/compiler/utils/OrdList.hs
@@ -1,14 +1,14 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The AQUA Project, Glasgow University, 1993-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The AQUA Project, Glasgow University, 1993-1998
+
This is useful, general stuff for the Native Code Generator.
Provide trees (of instructions), so that lists of instructions
can be appended in linear time.
+-}
-\begin{code}
module OrdList (
OrdList,
nilOL, isNilOL, unitOL, appOL, consOL, snocOL, concatOL,
@@ -96,4 +96,3 @@ foldlOL k z (Many xs) = foldl k z xs
toOL :: [a] -> OrdList a
toOL [] = None
toOL xs = Many xs
-\end{code}
diff --git a/compiler/utils/Outputable.lhs b/compiler/utils/Outputable.hs
index a4ba48c609..488094a498 100644
--- a/compiler/utils/Outputable.lhs
+++ b/compiler/utils/Outputable.hs
@@ -1,9 +1,8 @@
-%
-% (c) The University of Glasgow 2006-2012
-% (c) The GRASP Project, Glasgow University, 1992-1998
-%
+{-
+(c) The University of Glasgow 2006-2012
+(c) The GRASP Project, Glasgow University, 1992-1998
+-}
-\begin{code}
-- | This module defines classes and functions for pretty-printing. It also
-- exports a number of helpful debugging and other utilities such as 'trace' and 'panic'.
--
@@ -105,17 +104,14 @@ import Text.Printf
import GHC.Fingerprint
import GHC.Show ( showMultiLineString )
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{The @PprStyle@ data type}
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
data PprStyle
= PprUser PrintUnqualified Depth
@@ -246,8 +242,8 @@ mkUserStyle :: PrintUnqualified -> Depth -> PprStyle
mkUserStyle unqual depth
| opt_PprStyle_Debug = PprDebug
| otherwise = PprUser unqual depth
-\end{code}
+{-
Orthogonal to the above printing styles are (possibly) some
command-line flags that affect printing (often carried with the
style). The most likely ones are variations on how much type info is
@@ -256,13 +252,13 @@ shown.
The following test decides whether or not we are actually generating
code (either C or assembly), or generating interface files.
-%************************************************************************
-%* *
+************************************************************************
+* *
\subsection{The @SDoc@ data type}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
newtype SDoc = SDoc { runSDoc :: SDocContext -> Doc }
data SDocContext = SDC
@@ -294,7 +290,7 @@ pprDeeper d = SDoc $ \ctx -> case ctx of
pprDeeperList :: ([SDoc] -> SDoc) -> [SDoc] -> SDoc
-- Truncate a list that list that is longer than the current depth
-pprDeeperList f ds
+pprDeeperList f ds
| null ds = f []
| otherwise = SDoc work
where
@@ -324,9 +320,7 @@ sdocWithDynFlags f = SDoc $ \ctx -> runSDoc (f (sdocDynFlags ctx)) ctx
sdocWithPlatform :: (Platform -> SDoc) -> SDoc
sdocWithPlatform f = sdocWithDynFlags (f . targetPlatform)
-\end{code}
-\begin{code}
qualName :: PprStyle -> QueryQualifyName
qualName (PprUser q _) mod occ = queryQualifyName q mod occ
qualName (PprDump q) mod occ = queryQualifyName q mod occ
@@ -372,9 +366,6 @@ ifPprDebug d = SDoc $ \ctx ->
case ctx of
SDC{sdocStyle=PprDebug} -> runSDoc d ctx
_ -> Pretty.empty
-\end{code}
-
-\begin{code}
printForUser :: DynFlags -> Handle -> PrintUnqualified -> SDoc -> IO ()
printForUser dflags handle unqual doc
@@ -452,9 +443,7 @@ showSDocDumpOneLine dflags d
irrelevantNCols :: Int
-- Used for OneLineMode and LeftMode when number of cols isn't used
irrelevantNCols = 1
-\end{code}
-\begin{code}
docToSDoc :: Doc -> SDoc
docToSDoc d = SDoc (\_ -> d)
@@ -485,7 +474,7 @@ float n = docToSDoc $ Pretty.float n
double n = docToSDoc $ Pretty.double n
rational n = docToSDoc $ Pretty.rational n
-parens, braces, brackets, quotes, quote,
+parens, braces, brackets, quotes, quote,
paBrackets, doubleQuotes, angleBrackets :: SDoc -> SDoc
parens d = SDoc $ Pretty.parens . runSDoc d
@@ -655,16 +644,14 @@ bold = coloured colBold
keyword :: SDoc -> SDoc
keyword = bold
-\end{code}
-
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Outputable-class]{The @Outputable@ class}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Class designating that some type has an 'SDoc' representation
class Outputable a where
ppr :: a -> SDoc
@@ -675,9 +662,7 @@ class Outputable a where
ppr = pprPrec 0
pprPrec _ = ppr
-\end{code}
-\begin{code}
instance Outputable Char where
ppr c = text [c]
@@ -779,15 +764,15 @@ instance (Outputable elt) => Outputable (IM.IntMap elt) where
instance Outputable Fingerprint where
ppr (Fingerprint w1 w2) = text (printf "%016x%016x" w1 w2)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{The @OutputableBndr@ class}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | 'BindingSite' is used to tell the thing that prints binder what
-- language construct is binding the identifier. This can be used
-- to decide how much info to print.
@@ -800,18 +785,18 @@ class Outputable a => OutputableBndr a where
pprBndr _b x = ppr x
pprPrefixOcc, pprInfixOcc :: a -> SDoc
- -- Print an occurrence of the name, suitable either in the
+ -- Print an occurrence of the name, suitable either in the
-- prefix position of an application, thus (f a b) or ((+) x)
-- or infix position, thus (a `f` b) or (x + y)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Random printing helpers}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- We have 31-bit Chars and will simply use Show instances of Char and String.
-- | Special combinator for showing character literals.
@@ -849,15 +834,15 @@ pprInfixVar is_operator pp_v
---------------------
pprFastFilePath :: FastString -> SDoc
pprFastFilePath path = text $ normalise $ unpackFS path
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Other helper functions}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
pprWithCommas :: (a -> SDoc) -- ^ The pretty printing function to use
-> [a] -- ^ The things to be pretty printed
-> SDoc -- ^ 'SDoc' where the things have been pretty printed,
@@ -885,16 +870,15 @@ quotedListWithOr :: [SDoc] -> SDoc
-- [x,y,z] ==> `x', `y' or `z'
quotedListWithOr xs@(_:_:_) = quotedList (init xs) <+> ptext (sLit "or") <+> quotes (last xs)
quotedListWithOr xs = quotedList xs
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Printing numbers verbally}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
intWithCommas :: Integral a => a -> SDoc
-- Prints a big integer with commas, eg 345,821
intWithCommas n
@@ -982,16 +966,14 @@ plural _ = char 's'
isOrAre :: [a] -> SDoc
isOrAre [_] = ptext (sLit "is")
isOrAre _ = ptext (sLit "are")
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Error handling}
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
pprPanic :: String -> SDoc -> a
-- ^ Throw an exception saying "bug in GHC"
@@ -1043,5 +1025,3 @@ pprDebugAndThen dflags cont heading pretty_msg
= cont (showSDocDump dflags doc)
where
doc = sep [heading, nest 2 pretty_msg]
-\end{code}
-
diff --git a/compiler/utils/Outputable.lhs-boot b/compiler/utils/Outputable.hs-boot
index e013307ef9..1c15a6982a 100644
--- a/compiler/utils/Outputable.lhs-boot
+++ b/compiler/utils/Outputable.hs-boot
@@ -1,7 +1,3 @@
-
-\begin{code}
module Outputable where
data SDoc
-\end{code}
-
diff --git a/compiler/utils/Pair.lhs b/compiler/utils/Pair.hs
index 529ba669ea..f2d39de48e 100644
--- a/compiler/utils/Pair.lhs
+++ b/compiler/utils/Pair.hs
@@ -1,8 +1,8 @@
-
+{-
A simple homogeneous pair type with useful Functor, Applicative, and
Traversable instances.
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
module Pair ( Pair(..), unPair, toPair, swap ) where
@@ -48,4 +48,3 @@ toPair (x,y) = Pair x y
swap :: Pair a -> Pair a
swap (Pair x y) = Pair y x
-\end{code}
diff --git a/compiler/utils/Panic.lhs b/compiler/utils/Panic.hs
index 23bf01cafe..bfb9df3ad3 100644
--- a/compiler/utils/Panic.lhs
+++ b/compiler/utils/Panic.hs
@@ -1,13 +1,13 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The GRASP Project, Glasgow University, 1992-2000
-%
+{-
+(c) The University of Glasgow 2006
+(c) The GRASP Project, Glasgow University, 1992-2000
+
Defines basic functions for printing error messages.
It's hard to put these functions anywhere else without causing
some unnecessary loops in the module dependency graph.
+-}
-\begin{code}
{-# LANGUAGE CPP, DeriveDataTypeable, ScopedTypeVariables #-}
module Panic (
@@ -305,5 +305,3 @@ popInterruptTargetThread =
modifyMVar_ interruptTargetThread $
\tids -> return $! case tids of [] -> []
(_:ts) -> ts
-
-\end{code}
diff --git a/compiler/utils/Pretty.lhs b/compiler/utils/Pretty.hs
index 0357c8cfba..5e441838fc 100644
--- a/compiler/utils/Pretty.lhs
+++ b/compiler/utils/Pretty.hs
@@ -1,15 +1,16 @@
-%*********************************************************************************
-%* *
-%* John Hughes's and Simon Peyton Jones's Pretty Printer Combinators *
-%* *
-%* based on "The Design of a Pretty-printing Library" *
-%* in Advanced Functional Programming, *
-%* Johan Jeuring and Erik Meijer (eds), LNCS 925 *
-%* http://www.cs.chalmers.se/~rjmh/Papers/pretty.ps *
-%* *
-%* Heavily modified by Simon Peyton Jones, Dec 96 *
-%* *
-%*********************************************************************************
+{-
+*********************************************************************************
+* *
+* John Hughes's and Simon Peyton Jones's Pretty Printer Combinators *
+* *
+* based on "The Design of a Pretty-printing Library" *
+* in Advanced Functional Programming, *
+* Johan Jeuring and Erik Meijer (eds), LNCS 925 *
+* http://www.cs.chalmers.se/~rjmh/Papers/pretty.ps *
+* *
+* Heavily modified by Simon Peyton Jones, Dec 96 *
+* *
+*********************************************************************************
Version 3.0 28 May 1997
* Cured massive performance bug. If you write
@@ -148,10 +149,8 @@ Relative to John's original paper, there are the following new features:
6. Numerous implementation tidy-ups
Use of unboxed data types to speed up the implementation
+-}
-
-
-\begin{code}
{-# LANGUAGE BangPatterns, CPP, MagicHash #-}
module Pretty (
@@ -194,26 +193,20 @@ import GHC.Ptr ( Ptr(..) )
infixl 6 <>
infixl 6 <+>
infixl 5 $$, $+$
-\end{code}
-
-
-\begin{code}
-- Disable ASSERT checks; they are expensive!
#define LOCAL_ASSERT(x)
-\end{code}
-
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{The interface}
-%* *
-%*********************************************************
+* *
+*********************************************************
The primitive @Doc@ values
+-}
-\begin{code}
empty :: Doc
isEmpty :: Doc -> Bool
-- | Some text, but without any width. Use for non-printing text
@@ -234,11 +227,9 @@ integer :: Integer -> Doc
float :: Float -> Doc
double :: Double -> Doc
rational :: Rational -> Doc
-\end{code}
-Combining @Doc@ values
+-- Combining @Doc@ values
-\begin{code}
(<>) :: Doc -> Doc -> Doc -- Beside
hcat :: [Doc] -> Doc -- List version of <>
(<+>) :: Doc -> Doc -> Doc -- Beside, separated by space
@@ -254,18 +245,14 @@ fcat :: [Doc] -> Doc -- ``Paragraph fill'' version of cat
fsep :: [Doc] -> Doc -- ``Paragraph fill'' version of sep
nest :: Int -> Doc -> Doc -- Nested
-\end{code}
-GHC-specific ones.
+-- GHC-specific ones.
-\begin{code}
hang :: Doc -> Int -> Doc -> Doc
punctuate :: Doc -> [Doc] -> [Doc] -- punctuate p [d1, ... dn] = [d1 <> p, d2 <> p, ... dn-1 <> p, dn]
-\end{code}
-Displaying @Doc@ values.
+-- Displaying @Doc@ values.
-\begin{code}
instance Show Doc where
showsPrec _ doc cont = showDocPlus PageMode 100 doc cont
@@ -281,14 +268,13 @@ data Mode = PageMode -- Normal
| ZigZagMode -- With zig-zag cuts
| LeftMode -- No indentation, infinitely long lines
| OneLineMode -- All on one line
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{The @Doc@ calculus}
-%* *
-%*********************************************************
+* *
+*********************************************************
The @Doc@ combinators satisfy the following laws:
\begin{verbatim}
@@ -363,13 +349,13 @@ But it doesn't work, for if x=empty, we would have
-%*********************************************************
-%* *
+*********************************************************
+* *
\subsection{Simple derived definitions}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
semi = char ';'
colon = char ':'
comma = char ','
@@ -411,18 +397,18 @@ punctuate p (d:ds) = go d ds
where
go d [] = [d]
go d (e:es) = (d <> p) : go e es
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{The @Doc@ data type}
-%* *
-%*********************************************************
+* *
+*********************************************************
A @Doc@ represents a {\em set} of layouts. A @Doc@ with
no occurrences of @Union@ or @NoDoc@ represents just one layout.
-\begin{code}
+-}
+
data Doc
= Empty -- empty
| NilAbove Doc -- text "" $$ x
@@ -453,8 +439,8 @@ space_text :: TextDetails
space_text = Chr ' '
nl_text :: TextDetails
nl_text = Chr '\n'
-\end{code}
+{-
Here are the invariants:
\begin{itemize}
\item
@@ -486,8 +472,8 @@ is longer than the first line of any layout in the right argument.
this invariant means that the right argument must have at least two
lines.
\end{itemize}
+-}
-\begin{code}
-- Arg of a NilAbove is always an RDoc
nilAbove_ :: Doc -> Doc
nilAbove_ p = LOCAL_ASSERT( _ok p ) NilAbove p
@@ -517,8 +503,8 @@ union_ p q = Union (LOCAL_ASSERT( _ok p ) p) (LOCAL_ASSERT( _ok q ) q)
_ok (NilAbove _) = True
_ok (Union _ _) = True
_ok _ = False
-\end{code}
+{-
Notice the difference between
* NoDoc (no documents)
* Empty (one empty document; no height and no width)
@@ -527,13 +513,13 @@ Notice the difference between
-%*********************************************************
-%* *
+*********************************************************
+* *
\subsection{@empty@, @text@, @nest@, @union@}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
empty = Empty
isEmpty Empty = True
@@ -574,16 +560,15 @@ mkNest k p = nest_ k p
mkUnion :: Doc -> Doc -> Doc
mkUnion Empty _ = Empty
mkUnion p q = p `union_` q
-\end{code}
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Vertical composition @$$@}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-
-\begin{code}
p $$ q = Above p False q
($+$) :: Doc -> Doc -> Doc
p $+$ q = Above p True q
@@ -612,9 +597,7 @@ aboveNest (TextBeside s sl p) g k q = textBeside_ s sl rest
Empty -> nilAboveNest g k1 q
_ -> aboveNest p g k1 q
aboveNest _ _ _ _ = panic "aboveNest: Unhandled case"
-\end{code}
-\begin{code}
nilAboveNest :: Bool -> FastInt -> RDoc -> RDoc
-- Specification: text s <> nilaboveNest g k q
-- = text s <> (text "" $g$ nest k q)
@@ -626,16 +609,15 @@ nilAboveNest g k q | (not g) && (k ># _ILIT(0)) -- No newline i
= textBeside_ (Str (spaces k)) k q
| otherwise -- Put them really above
= nilAbove_ (mkNest k q)
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Horizontal composition @<>@}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
p <> q = Beside p False q
p <+> q = Beside p True q
@@ -658,9 +640,7 @@ beside (TextBeside s sl p) g q = textBeside_ s sl $! rest
rest = case p of
Empty -> nilBeside g q
_ -> beside p g q
-\end{code}
-\begin{code}
nilBeside :: Bool -> RDoc -> RDoc
-- Specification: text "" <> nilBeside g p
-- = text "" <g> p
@@ -669,15 +649,15 @@ nilBeside _ Empty = Empty -- Hence the text "" in the spec
nilBeside g (Nest _ p) = nilBeside g p
nilBeside g p | g = textBeside_ space_text (_ILIT(1)) p
| otherwise = p
-\end{code}
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Separate, @sep@, Hughes version}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
-- Specification: sep ps = oneLiner (hsep ps)
-- `union`
-- vcat ps
@@ -722,15 +702,15 @@ sepNB g Empty k ys = oneLiner (nilBeside g (reduceDoc rest))
| otherwise = hcat ys
sepNB g p k ys = sep1 g p k ys
-\end{code}
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{@fill@}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
fsep = fill True
fcat = fill False
@@ -771,16 +751,15 @@ fillNB g Empty k (y:ys) = nilBeside g (fill1 g (oneLiner (reduceDoc y)) k1 ys
| otherwise = k
fillNB g p k ys = fill1 g p k ys
-\end{code}
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Selecting the best layout}
-%* *
-%*********************************************************
+* *
+*********************************************************
+-}
-\begin{code}
best :: Int -- Line length
-> Int -- Ribbon length
-> RDoc
@@ -830,12 +809,12 @@ fits _ Empty = True
fits _ (NilAbove _) = True
fits n (TextBeside _ sl p) = fits (n -# sl) p
fits _ _ = panic "fits: Unhandled case"
-\end{code}
+{-
@first@ and @nonEmptySet@ are similar to @nicest@ and @fits@, only simpler.
@first@ returns its first argument if it is non-empty, otherwise its second.
+-}
-\begin{code}
first :: Doc -> Doc -> Doc
first p q | nonEmptySet p = p
| otherwise = q
@@ -848,11 +827,9 @@ nonEmptySet (NilAbove _) = True -- NoDoc always in first line
nonEmptySet (TextBeside _ _ p) = nonEmptySet p
nonEmptySet (Nest _ p) = nonEmptySet p
nonEmptySet _ = panic "nonEmptySet: Unhandled case"
-\end{code}
-@oneLiner@ returns the one-line members of the given set of @Doc@s.
+-- @oneLiner@ returns the one-line members of the given set of @Doc@s.
-\begin{code}
oneLiner :: Doc -> Doc
oneLiner NoDoc = NoDoc
oneLiner Empty = Empty
@@ -861,18 +838,15 @@ oneLiner (TextBeside s sl p) = textBeside_ s sl (oneLiner p)
oneLiner (Nest k p) = nest_ k (oneLiner p)
oneLiner (p `Union` _) = oneLiner p
oneLiner _ = panic "oneLiner: Unhandled case"
-\end{code}
-
-
-%*********************************************************
-%* *
+{-
+*********************************************************
+* *
\subsection{Displaying the best layout}
-%* *
-%*********************************************************
-
+* *
+*********************************************************
+-}
-\begin{code}
showDocPlus :: Mode -> Int -> Doc -> String -> String
showDocPlus mode cols doc rest = fullRender mode cols 1.5 string_txt rest doc
@@ -885,9 +859,6 @@ string_txt (Str s1) s2 = s1 ++ s2
string_txt (PStr s1) s2 = unpackFS s1 ++ s2
string_txt (ZStr s1) s2 = zString s1 ++ s2
string_txt (LStr s1 _) s2 = unpackLitString s1 ++ s2
-\end{code}
-
-\begin{code}
fullRender OneLineMode _ _ txt end doc
= lay (reduceDoc doc)
@@ -977,9 +948,6 @@ spaces :: Int# -> String
spaces n | n <=# _ILIT(0) = ""
| otherwise = ' ' : spaces (n -# _ILIT(1))
-\end{code}
-
-\begin{code}
printDoc :: Mode -> Int -> Handle -> Doc -> IO ()
-- printDoc adds a newline to the end
printDoc mode cols hdl doc = printDoc_ mode cols hdl (doc $$ text "")
@@ -1054,4 +1022,3 @@ layLeft b (TextBeside s _ p) = put b s >> layLeft b p
put b (ZStr s) = bPutFZS b s
put b (LStr s l) = bPutLitString b s l
layLeft _ _ = panic "layLeft: Unhandled case"
-\end{code}
diff --git a/compiler/utils/StringBuffer.lhs b/compiler/utils/StringBuffer.hs
index 9e6e6c1824..570282da57 100644
--- a/compiler/utils/StringBuffer.lhs
+++ b/compiler/utils/StringBuffer.hs
@@ -1,11 +1,11 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The University of Glasgow, 1997-2006
-%
+{-
+(c) The University of Glasgow 2006
+(c) The University of Glasgow, 1997-2006
+
Buffers for scanning string input stored in external arrays.
+-}
-\begin{code}
{-# LANGUAGE BangPatterns, CPP, MagicHash, UnboxedTuples #-}
{-# OPTIONS_GHC -O -funbox-strict-fields #-}
-- We always optimise this, otherwise performance of a non-optimised
@@ -255,5 +255,3 @@ parseUnsignedInteger (StringBuffer buf _ cur) len radix char_to_int
| otherwise = case byteOff i of
char -> go (i + 1) (x * radix + toInteger (char_to_int char))
in go 0 0
-
-\end{code}
diff --git a/compiler/utils/UniqFM.lhs b/compiler/utils/UniqFM.hs
index f0f903522b..8f962d4f5e 100644
--- a/compiler/utils/UniqFM.lhs
+++ b/compiler/utils/UniqFM.hs
@@ -1,7 +1,7 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The AQUA Project, Glasgow University, 1994-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The AQUA Project, Glasgow University, 1994-1998
+
UniqFM: Specialised finite maps, for things with @Uniques@.
@@ -18,8 +18,8 @@ The @UniqFM@ interface maps directly to Data.IntMap, only
``Data.IntMap.union'' is left-biased and ``plusUFM'' right-biased
and ``addToUFM\_C'' and ``Data.IntMap.insertWith'' differ in the order
of arguments of combining function.
+-}
-\begin{code}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveTraversable #-}
@@ -81,15 +81,15 @@ import Data.Data
#if __GLASGOW_HASKELL__ < 709
import Data.Monoid
#endif
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{The signature of the module}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
emptyUFM :: UniqFM elt
isNullUFM :: UniqFM elt -> Bool
unitUFM :: Uniquable key => key -> elt -> UniqFM elt
@@ -190,27 +190,26 @@ eltsUFM :: UniqFM elt -> [elt]
ufmToSet_Directly :: UniqFM elt -> S.IntSet
ufmToList :: UniqFM elt -> [(Unique, elt)]
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Monoid interface}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
instance Monoid (UniqFM a) where
mempty = emptyUFM
mappend = plusUFM
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Implementation using ``Data.IntMap''}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
newtype UniqFM ele = UFM (M.IntMap ele)
deriving (Data, Eq, Foldable.Foldable, Functor, Traversable.Traversable,
Typeable)
@@ -294,15 +293,14 @@ joinUFM eltJoin l (OldFact old) (NewFact new) = foldUFM_Directly add (NoChange,
(SomeChange, v') -> (SomeChange, addToUFM_Directly joinmap k v')
(NoChange, _) -> (ch, joinmap)
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Output-ery}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
instance Outputable a => Outputable (UniqFM a) where
ppr ufm = pprUniqFM ppr ufm
@@ -311,4 +309,3 @@ pprUniqFM ppr_elt ufm
= brackets $ fsep $ punctuate comma $
[ ppr uq <+> ptext (sLit ":->") <+> ppr_elt elt
| (uq, elt) <- ufmToList ufm ]
-\end{code}
diff --git a/compiler/utils/UniqSet.lhs b/compiler/utils/UniqSet.hs
index fae5ddabb6..5a82303157 100644
--- a/compiler/utils/UniqSet.lhs
+++ b/compiler/utils/UniqSet.hs
@@ -1,14 +1,14 @@
-%
-% (c) The University of Glasgow 2006
-% (c) The AQUA Project, Glasgow University, 1994-1998
-%
+{-
+(c) The University of Glasgow 2006
+(c) The AQUA Project, Glasgow University, 1994-1998
+
\section[UniqSet]{Specialised sets, for things with @Uniques@}
Based on @UniqFMs@ (as you would expect).
Basically, the things need to be in class @Uniquable@.
+-}
-\begin{code}
module UniqSet (
-- * Unique set type
UniqSet, -- type synonym for UniqFM a
@@ -37,15 +37,14 @@ module UniqSet (
import UniqFM
import Unique
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{The signature of the module}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
emptyUniqSet :: UniqSet a
unitUniqSet :: Uniquable a => a -> UniqSet a
mkUniqSet :: Uniquable a => [a] -> UniqSet a
@@ -74,15 +73,14 @@ sizeUniqSet :: UniqSet a -> Int
isEmptyUniqSet :: UniqSet a -> Bool
lookupUniqSet :: Uniquable a => UniqSet a -> a -> Maybe a
uniqSetToList :: UniqSet a -> [a]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Implementation using ``UniqFM''}
-%* *
-%************************************************************************
-
-\begin{code}
+* *
+************************************************************************
+-}
type UniqSet a = UniqFM a
@@ -115,5 +113,3 @@ sizeUniqSet = sizeUFM
isEmptyUniqSet = isNullUFM
lookupUniqSet = lookupUFM
uniqSetToList = eltsUFM
-
-\end{code}
diff --git a/compiler/utils/Util.lhs b/compiler/utils/Util.hs
index 7fe45f5d5d..aa3a19b64c 100644
--- a/compiler/utils/Util.lhs
+++ b/compiler/utils/Util.hs
@@ -1,8 +1,5 @@
-%
-% (c) The University of Glasgow 2006
-%
+-- (c) The University of Glasgow 2006
-\begin{code}
{-# LANGUAGE CPP #-}
-- | Highly random utility functions
@@ -135,13 +132,13 @@ import qualified Data.Set as Set
import Data.Time
infixr 9 `thenCmp`
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Is DEBUG on, are we on Windows, etc?}
-%* *
-%************************************************************************
+* *
+************************************************************************
These booleans are global constants, set by CPP flags. They allow us to
recompile a single module (this one) to change whether or not debug output
@@ -151,8 +148,8 @@ It's important that the flags are literal constants (True/False). Then,
with -0, tests of the flags in other modules will simplify to the correct
branch of the conditional, thereby dropping debug code altogether when
the flags are off.
+-}
-\begin{code}
ghciSupported :: Bool
#ifdef GHCI
ghciSupported = True
@@ -194,23 +191,21 @@ isDarwinHost = True
#else
isDarwinHost = False
#endif
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{A for loop}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Compose a function with itself n times. (nth rather than twice)
nTimes :: Int -> (a -> a) -> (a -> a)
nTimes 0 _ = id
nTimes 1 f = f
nTimes n f = f . nTimes (n-1) f
-\end{code}
-\begin{code}
fstOf3 :: (a,b,c) -> a
sndOf3 :: (a,b,c) -> b
thirdOf3 :: (a,b,c) -> c
@@ -223,23 +218,21 @@ third3 f (a, b, c) = (a, b, f c)
uncurry3 :: (a -> b -> c -> d) -> (a, b, c) -> d
uncurry3 f (a, b, c) = f a b c
-\end{code}
-\begin{code}
firstM :: Monad m => (a -> m c) -> (a, b) -> m (c, b)
firstM f (x, y) = liftM (\x' -> (x', y)) (f x)
first3M :: Monad m => (a -> m d) -> (a, b, c) -> m (d, b, c)
first3M f (x, y, z) = liftM (\x' -> (x', y, z)) (f x)
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Utils-lists]{General list processing}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
filterOut :: (a->Bool) -> [a] -> [a]
-- ^ Like filter, only it reverses the sense of the test
filterOut _ [] = []
@@ -268,13 +261,13 @@ chkAppend :: [a] -> [a] -> [a]
chkAppend xs ys
| null ys = xs
| otherwise = xs ++ ys
-\end{code}
+{-
A paranoid @zip@ (and some @zipWith@ friends) that checks the lists
are of equal length. Alastair Reid thinks this should only happen if
DEBUGging on; hey, why not?
+-}
-\begin{code}
zipEqual :: String -> [a] -> [b] -> [(a,b)]
zipWithEqual :: String -> (a->b->c) -> [a]->[b]->[c]
zipWith3Equal :: String -> (a->b->c->d) -> [a]->[b]->[c]->[d]
@@ -304,9 +297,7 @@ zipWith4Equal msg z (a:as) (b:bs) (c:cs) (d:ds)
zipWith4Equal _ _ [] [] [] [] = []
zipWith4Equal msg _ _ _ _ _ = panic ("zipWith4Equal: unequal lists:"++msg)
#endif
-\end{code}
-\begin{code}
-- | 'zipLazy' is a kind of 'zip' that is lazy in the second list (observe the ~)
zipLazy :: [a] -> [b] -> [(a,b)]
zipLazy [] _ = []
@@ -331,10 +322,7 @@ stretchZipWith p z f (x:xs) ys
| otherwise = case ys of
[] -> []
(y:ys) -> f x y : stretchZipWith p z f xs ys
-\end{code}
-
-\begin{code}
mapFst :: (a->c) -> [(a,b)] -> [(c,b)]
mapSnd :: (b->c) -> [(a,b)] -> [(a,c)]
@@ -372,9 +360,7 @@ mapAccumL2 f s1 s2 xs = (s1', s2', ys)
where ((s1', s2'), ys) = mapAccumL (\(s1, s2) x -> case f s1 s2 x of
(s1', s2', y) -> ((s1', s2'), y))
(s1, s2) xs
-\end{code}
-\begin{code}
nOfThem :: Int -> a -> [a]
nOfThem n thing = replicate n thing
@@ -459,11 +445,9 @@ only [a] = a
only (a:_) = a
#endif
only _ = panic "Util: only"
-\end{code}
-Debugging/specialising versions of \tr{elem} and \tr{notElem}
+-- Debugging/specialising versions of \tr{elem} and \tr{notElem}
-\begin{code}
isIn, isn'tIn :: Eq a => String -> a -> [a] -> Bool
# ifndef DEBUG
@@ -489,15 +473,15 @@ isn'tIn msg x ys
(x `notElem` (y:ys))
| otherwise = x /= y && notElem100 (i +# _ILIT(1)) x ys
# endif /* DEBUG */
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsubsection{Sort utils}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
sortWith :: Ord b => (a->b) -> [a] -> [a]
sortWith get_key xs = sortBy (comparing get_key) xs
@@ -507,17 +491,17 @@ minWith get_key xs = ASSERT( not (null xs) )
nubSort :: Ord a => [a] -> [a]
nubSort = Set.toAscList . Set.fromList
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Utils-transitive-closure]{Transitive closure}
-%* *
-%************************************************************************
+* *
+************************************************************************
This algorithm for transitive closure is straightforward, albeit quadratic.
+-}
-\begin{code}
transitiveClosure :: (a -> [a]) -- Successor function
-> (a -> a -> Bool) -- Equality predicate
-> [a]
@@ -533,17 +517,17 @@ transitiveClosure succ eq xs
_ `is_in` [] = False
x `is_in` (y:ys) | eq x y = True
| otherwise = x `is_in` ys
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Utils-accum]{Accumulating}
-%* *
-%************************************************************************
+* *
+************************************************************************
A combination of foldl with zip. It works with equal length lists.
+-}
-\begin{code}
foldl2 :: (acc -> a -> b -> acc) -> acc -> [a] -> [b] -> acc
foldl2 _ z [] [] = z
foldl2 k z (a:as) (b:bs) = foldl2 k (k z a b) as bs
@@ -555,21 +539,19 @@ all2 :: (a -> b -> Bool) -> [a] -> [b] -> Bool
all2 _ [] [] = True
all2 p (x:xs) (y:ys) = p x y && all2 p xs ys
all2 _ _ _ = False
-\end{code}
-Count the number of times a predicate is true
+-- Count the number of times a predicate is true
-\begin{code}
count :: (a -> Bool) -> [a] -> Int
count _ [] = 0
count p (x:xs) | p x = 1 + count p xs
| otherwise = count p xs
-\end{code}
+{-
@splitAt@, @take@, and @drop@ but with length of another
list giving the break-off point:
+-}
-\begin{code}
takeList :: [b] -> [a] -> [a]
takeList [] _ = []
takeList (_:xs) ls =
@@ -629,16 +611,15 @@ split c s = case rest of
[] -> [chunk]
_:rest -> chunk : split c rest
where (chunk, rest) = break (==c) s
-\end{code}
-
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Utils-comparison]{Comparisons}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
isEqual :: Ordering -> Bool
-- Often used in (isEqual (a `compare` b))
isEqual GT = False
@@ -668,20 +649,18 @@ cmpList _ [] _ = LT
cmpList _ _ [] = GT
cmpList cmp (a:as) (b:bs)
= case cmp a b of { EQ -> cmpList cmp as bs; xxx -> xxx }
-\end{code}
-\begin{code}
removeSpaces :: String -> String
removeSpaces = dropWhileEndLE isSpace . dropWhile isSpace
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection{Edit distance}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | Find the "restricted" Damerau-Levenshtein edit distance between two strings.
-- See: <http://en.wikipedia.org/wiki/Damerau-Levenshtein_distance>.
-- Based on the algorithm presented in "A Bit-Vector Algorithm for Computing
@@ -804,59 +783,49 @@ fuzzyLookup user_entered possibilites
--
fuzzy_threshold = truncate $ fromIntegral (length user_entered + 2) / (4 :: Rational)
mAX_RESULTS = 3
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Utils-pairs]{Pairs}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
unzipWith :: (a -> b -> c) -> [(a, b)] -> [c]
unzipWith f pairs = map ( \ (a, b) -> f a b ) pairs
-\end{code}
-\begin{code}
seqList :: [a] -> b -> b
seqList [] b = b
seqList (x:xs) b = x `seq` seqList xs b
-\end{code}
-Global variables:
+-- Global variables:
-\begin{code}
global :: a -> IORef a
global a = unsafePerformIO (newIORef a)
-\end{code}
-\begin{code}
consIORef :: IORef [a] -> a -> IO ()
consIORef var x = do
atomicModifyIORef var (\xs -> (x:xs,()))
-\end{code}
-\begin{code}
globalM :: IO a -> IORef a
globalM ma = unsafePerformIO (ma >>= newIORef)
-\end{code}
-Module names:
+-- Module names:
-\begin{code}
looksLikeModuleName :: String -> Bool
looksLikeModuleName [] = False
looksLikeModuleName (c:cs) = isUpper c && go cs
where go [] = True
go ('.':cs) = looksLikeModuleName cs
go (c:cs) = (isAlphaNum c || c == '_' || c == '\'') && go cs
-\end{code}
+{-
Akin to @Prelude.words@, but acts like the Bourne shell, treating
quoted strings as Haskell Strings, and also parses Haskell [String]
syntax.
+-}
-\begin{code}
getCmd :: String -> Either String -- Error
(String, String) -- (Cmd, Rest)
getCmd s = case break isSpace $ dropWhile isSpace s of
@@ -898,12 +867,12 @@ toArgs str
(arg, s'') -> case toArgs' s'' of
Left err -> Left err
Right args -> Right (arg : args)
-\end{code}
+{-
-- -----------------------------------------------------------------------------
-- Floats
+-}
-\begin{code}
readRational__ :: ReadS Rational -- NB: doesn't handle leading "-"
readRational__ r = do
(n,d,s) <- readFix r
@@ -1034,33 +1003,31 @@ this `makeRelativeTo` that = directory </> thisFilename
f (x : xs) (y : ys)
| x == y = f xs ys
f xs ys = replicate (length ys) ".." ++ xs
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Utils-Data]{Utils for defining Data instances}
-%* *
-%************************************************************************
+* *
+************************************************************************
These functions helps us to define Data instances for abstract types.
+-}
-\begin{code}
abstractConstr :: String -> Constr
abstractConstr n = mkConstr (abstractDataType n) ("{abstract:"++n++"}") [] Prefix
-\end{code}
-\begin{code}
abstractDataType :: String -> DataType
abstractDataType n = mkDataType n [abstractConstr n]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Utils-C]{Utils for printing C code}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
charToC :: Word8 -> String
charToC w =
case chr (fromIntegral w) of
@@ -1072,15 +1039,15 @@ charToC w =
chr (ord '0' + ord c `div` 64),
chr (ord '0' + ord c `div` 8 `mod` 8),
chr (ord '0' + ord c `mod` 8)]
-\end{code}
-%************************************************************************
-%* *
+{-
+************************************************************************
+* *
\subsection[Utils-Hashing]{Utils for hashing}
-%* *
-%************************************************************************
+* *
+************************************************************************
+-}
-\begin{code}
-- | A sample hash function for Strings. We keep multiplying by the
-- golden ratio and adding. The implementation is:
--
@@ -1139,5 +1106,3 @@ mulHi :: Int32 -> Int32 -> Int32
mulHi a b = fromIntegral (r `shiftR` 32)
where r :: Int64
r = fromIntegral a * fromIntegral b
-\end{code}
-
diff --git a/docs/users_guide/separate_compilation.xml b/docs/users_guide/separate_compilation.xml
index 43ab182729..b30eff86ae 100644
--- a/docs/users_guide/separate_compilation.xml
+++ b/docs/users_guide/separate_compilation.xml
@@ -966,10 +966,8 @@ ghc -c A.hs
<para>Just like <literal>hs-boot</literal> files, when an
<literal>hsig</literal> file is compiled it is checked for type
- consistency against the backing implementation; furthermore, it also
- produces a pseudo-object file <literal>A.o</literal> which you should
- not link with. Signature files are also written in a subset
- of Haskell similar to essentially identical to that of
+ consistency against the backing implementation. Signature files are also
+ written in a subset of Haskell essentially identical to that of
<literal>hs-boot</literal> files.</para>
<para>There is one important gotcha with the current implementation:
diff --git a/libraries/Cabal b/libraries/Cabal
-Subproject 6c395bb8f22961ce5267df64e6d9351c310fcbb
+Subproject ea062bf522e015f6e643bcc833487098edba839
diff --git a/libraries/base/Data/Fixed.hs b/libraries/base/Data/Fixed.hs
index 068eec5f12..f12a0e496d 100644
--- a/libraries/base/Data/Fixed.hs
+++ b/libraries/base/Data/Fixed.hs
@@ -143,7 +143,9 @@ showFixed chopTrailingZeros fa@(MkFixed a) = (show i) ++ (withDot (showIntegerZe
-- enough digits to be unambiguous
digits = ceiling (logBase 10 (fromInteger res) :: Double)
maxnum = 10 ^ digits
- fracNum = div (d * maxnum) res
+ -- read floors, so show must ceil for `read . show = id` to hold. See #9240
+ fracNum = divCeil (d * maxnum) res
+ divCeil x y = (x + y - 1) `div` y
instance (HasResolution a) => Show (Fixed a) where
show = showFixed False
diff --git a/libraries/base/GHC/Natural.hs b/libraries/base/GHC/Natural.hs
index 3519bcf40a..0211061a32 100644
--- a/libraries/base/GHC/Natural.hs
+++ b/libraries/base/GHC/Natural.hs
@@ -296,9 +296,9 @@ instance Bits Natural where
NatJ# n .&. NatJ# m = bigNatToNatural (andBigNat n m)
NatS# n .|. NatS# m = wordToNatural (W# n .|. W# m)
- NatS# n .|. NatJ# m = NatJ# (andBigNat (wordToBigNat n) m)
- NatJ# n .|. NatS# m = NatJ# (andBigNat n (wordToBigNat m))
- NatJ# n .|. NatJ# m = NatJ# (andBigNat n m)
+ NatS# n .|. NatJ# m = NatJ# (orBigNat (wordToBigNat n) m)
+ NatJ# n .|. NatS# m = NatJ# (orBigNat n (wordToBigNat m))
+ NatJ# n .|. NatJ# m = NatJ# (orBigNat n m)
NatS# n `xor` NatS# m = wordToNatural (W# n `xor` W# m)
NatS# n `xor` NatJ# m = NatJ# (xorBigNat (wordToBigNat n) m)
diff --git a/libraries/base/changelog.md b/libraries/base/changelog.md
index 07c91a34aa..ef3e9ae95c 100644
--- a/libraries/base/changelog.md
+++ b/libraries/base/changelog.md
@@ -128,6 +128,8 @@
together with a new exception `AllocationLimitExceeded`.
+ * Make `read . show = id` for `Data.Fixed` (#9240)
+
## 4.7.0.2 *Dec 2014*
* Bundled with GHC 7.8.4
diff --git a/libraries/base/tests/data-fixed-show-read.hs b/libraries/base/tests/data-fixed-show-read.hs
index 349f639f2c..7e947f466e 100644
--- a/libraries/base/tests/data-fixed-show-read.hs
+++ b/libraries/base/tests/data-fixed-show-read.hs
@@ -3,6 +3,11 @@ module Main (main) where
import Data.Fixed
+data B7
+
+instance HasResolution B7 where
+ resolution _ = 128
+
main :: IO ()
main = do doit 38.001
doit 38.009
@@ -14,6 +19,8 @@ main = do doit 38.001
doit (-38.01)
doit (-38.09)
print (read "-38" :: Centi)
+ print (read "0.008" :: Fixed B7)
+ print (read "-0.008" :: Fixed B7)
doit :: Centi -> IO ()
doit c = do let s = show c
diff --git a/libraries/base/tests/data-fixed-show-read.stdout b/libraries/base/tests/data-fixed-show-read.stdout
index 0e5d7caef5..4abb2d9676 100644
--- a/libraries/base/tests/data-fixed-show-read.stdout
+++ b/libraries/base/tests/data-fixed-show-read.stdout
@@ -16,3 +16,5 @@
-38.09
-38.09
-38.00
+0.008
+-0.008
diff --git a/libraries/ghc-prim/GHC/Types.hs b/libraries/ghc-prim/GHC/Types.hs
index e9f142805f..0f1d9610ef 100644
--- a/libraries/ghc-prim/GHC/Types.hs
+++ b/libraries/ghc-prim/GHC/Types.hs
@@ -98,11 +98,11 @@ constraints, but we cannot define them as such in Haskell. But we also cannot
just define them only in GHC.Prim (like (->)), because we need a real module
for them, e.g. to compile the constructor's info table.
-Furthermore the type of MkCoercible cannot be written in Haskell (no syntax for
-~#R).
+Furthermore the type of MkCoercible cannot be written in Haskell
+(no syntax for ~#R).
-So we define them as regular data types in GHC.Types, and do magic in GHC to
-change the kind and type, in tysWiredIn.
+So we define them as regular data types in GHC.Types, and do magic in TysWiredIn,
+inside GHC, to change the kind and type.
-}
@@ -161,6 +161,10 @@ data (~) a b = Eq# ((~#) a b)
--
-- /Since: 4.7.0.0/
data Coercible a b = MkCoercible ((~#) a b)
+-- It's really ~R# (represntational equality), not ~#,
+-- but * we don't yet have syntax for ~R#,
+-- * the compiled code is the same either way
+-- * TysWiredIn has the truthful types
-- Also see Note [Kind-changing of (~) and Coercible]
-- | Alias for 'tagToEnum#'. Returns True if its parameter is 1# and False
diff --git a/libraries/parallel b/libraries/parallel
-Subproject 50a2b2a622898786d623a9f933183525305058d
+Subproject c4863d925c446ba5416aeed6a11012f2e978686
diff --git a/packages b/packages
index 50ad970cdc..33137d6c27 100644
--- a/packages
+++ b/packages
@@ -71,7 +71,7 @@ libraries/unix - - ssh://g
libraries/Win32 - - https://github.com/haskell/win32.git
libraries/xhtml - - https://github.com/haskell/xhtml.git
nofib nofib - -
-libraries/parallel extra - -
+libraries/parallel extra - ssh://git@github.com/haskell/parallel.git
libraries/stm extra - -
libraries/random dph - https://github.com/haskell/random.git
libraries/primitive dph - https://github.com/haskell/primitive.git
diff --git a/testsuite/tests/deriving/should_compile/T4896.hs b/testsuite/tests/deriving/should_compile/T4896.hs
new file mode 100644
index 0000000000..18fcc7c72b
--- /dev/null
+++ b/testsuite/tests/deriving/should_compile/T4896.hs
@@ -0,0 +1,19 @@
+{-# LANGUAGE FlexibleInstances, FlexibleContexts, UndecidableInstances, TypeFamilies, DeriveDataTypeable, StandaloneDeriving #-}
+
+module T4896 where
+
+import Data.Data
+import Data.Typeable
+
+--instance Typeable1 Bar where
+-- typeOf1 _ = mkTyConApp (mkTyCon "Main.Bar") []
+deriving instance Typeable Bar
+
+class Foo a where
+ data Bar a
+
+data D a b = D Int a deriving (Typeable, Data)
+
+instance Foo (D a b) where
+ data Bar (D a b) = B { l :: a } deriving (Eq, Ord, Read, Show, Data)
+
diff --git a/testsuite/tests/deriving/should_compile/T7947.hs b/testsuite/tests/deriving/should_compile/T7947.hs
new file mode 100644
index 0000000000..d4df4353e5
--- /dev/null
+++ b/testsuite/tests/deriving/should_compile/T7947.hs
@@ -0,0 +1,16 @@
+{-# LANGUAGE DeriveDataTypeable #-}
+{-# LANGUAGE StandaloneDeriving #-}
+
+module T7947 where
+
+import Data.Data
+import Data.Typeable
+
+import T7947a
+import qualified T7947b as B
+
+deriving instance Typeable A
+deriving instance Typeable B.B
+
+deriving instance Data A
+deriving instance Data B.B
diff --git a/testsuite/tests/deriving/should_compile/T7947a.hs b/testsuite/tests/deriving/should_compile/T7947a.hs
new file mode 100644
index 0000000000..eb5c7472a0
--- /dev/null
+++ b/testsuite/tests/deriving/should_compile/T7947a.hs
@@ -0,0 +1,3 @@
+module T7947a where
+
+data A = C1 | C2 | C
diff --git a/testsuite/tests/deriving/should_compile/T7947b.hs b/testsuite/tests/deriving/should_compile/T7947b.hs
new file mode 100644
index 0000000000..f17f1cd674
--- /dev/null
+++ b/testsuite/tests/deriving/should_compile/T7947b.hs
@@ -0,0 +1,3 @@
+module T7947b where
+
+data B = D1 | D2 | C
diff --git a/testsuite/tests/deriving/should_compile/all.T b/testsuite/tests/deriving/should_compile/all.T
index 3bf871db61..8d9023646c 100644
--- a/testsuite/tests/deriving/should_compile/all.T
+++ b/testsuite/tests/deriving/should_compile/all.T
@@ -51,3 +51,6 @@ test('T8963', normal, compile, [''])
test('T7269', normal, compile, [''])
test('T9069', normal, compile, [''])
test('T9359', normal, compile, [''])
+test('T4896', normal, compile, [''])
+test('T7947', extra_clean(['T7947a.o', 'T7947a.hi', 'T7947b.o', 'T7947b.hi']), multimod_compile, ['T7947', '-v0'])
+
diff --git a/testsuite/tests/ghc-api/annotations/Makefile b/testsuite/tests/ghc-api/annotations/Makefile
index d5c7bd4973..821aaa06ac 100644
--- a/testsuite/tests/ghc-api/annotations/Makefile
+++ b/testsuite/tests/ghc-api/annotations/Makefile
@@ -5,15 +5,18 @@ include $(TOP)/mk/test.mk
clean:
rm -f *.o *.hi
-annotations: clean
+annotations:
+ rm -f annotations.o annotations.hi
'$(TEST_HC)' $(TEST_HC_OPTS) --make -v0 -package ghc annotations
./annotations "`'$(TEST_HC)' $(TEST_HC_OPTS) --print-libdir | tr -d '\r'`"
-parseTree: clean
+parseTree:
+ rm -f parseTree.o parseTree.hi
'$(TEST_HC)' $(TEST_HC_OPTS) --make -v0 -package ghc parseTree
./parseTree "`'$(TEST_HC)' $(TEST_HC_OPTS) --print-libdir | tr -d '\r'`"
-comments: clean
+comments:
+ rm -f comments.o comments.hi
'$(TEST_HC)' $(TEST_HC_OPTS) --make -v0 -package ghc comments
./comments "`'$(TEST_HC)' $(TEST_HC_OPTS) --print-libdir | tr -d '\r'`"
diff --git a/testsuite/tests/perf/compiler/all.T b/testsuite/tests/perf/compiler/all.T
index 92d1326e93..4add8e4bf0 100644
--- a/testsuite/tests/perf/compiler/all.T
+++ b/testsuite/tests/perf/compiler/all.T
@@ -190,10 +190,11 @@ test('T4801',
[(platform('x86_64-apple-darwin'), 464872776, 5),
# expected value: 510938976 (amd64/OS X):
- (wordsize(32), 185242032, 10),
+ (wordsize(32), 203962148, 10),
# prev: 185669232 (x86/OSX)
# 2014-01-22: 211198056 (x86/Linux)
# 2014-09-03: 185242032 (Windows laptop)
+ # 2014-12-01: 203962148 (Windows laptop)
(wordsize(64), 382056344, 10)]),
# prev: 360243576 (amd64/Linux)
# 19/10/2012: 447190832 (amd64/Linux) (-fPIC turned on)
@@ -251,11 +252,13 @@ test('T3064',
# (amd64/Linux) 2014-10-13: 38: Stricter seqDmdType
compiler_stats_num_field('bytes allocated',
- [(wordsize(32), 162457940, 10),
+ [(wordsize(32), 188697088, 10),
# 2011-06-28: 56380288 (x86/Linux)
# 2012-10-30: 111189536 (x86/Windows)
# 2013-11-13: 146626504 (x86/Windows, 64bit machine)
# 2014-01-22: 162457940 (x86/Linux)
+ # 2014-12-01: 162457940 (Windows)
+
(wordsize(64), 385145080, 5)]),
# (amd64/Linux) (28/06/2011): 73259544
# (amd64/Linux) (07/02/2013): 224798696
@@ -332,9 +335,10 @@ test('T5030',
test('T5631',
[compiler_stats_num_field('bytes allocated',
- [(wordsize(32), 346389856, 10),
+ [(wordsize(32), 390199244, 10),
# expected value: 392904228 (x86/Linux)
# 2014-04-04: 346389856 (x86 Windows, 64 bit machine)
+ # 2014-12-01: 390199244 (Windows laptop)
(wordsize(64), 776121120, 5)]),
# expected value: 774595008 (amd64/Linux):
# expected value: 735486328 (amd64/Linux) 2012/12/12:
@@ -454,10 +458,12 @@ test('T5642',
test('T5837',
[ only_ways(['normal']),
compiler_stats_num_field('bytes allocated',
- [(wordsize(32), 37096484, 10),
+ [(wordsize(32), 135914136, 10),
# 40000000 (x86/Linux)
- # 2013-11-13: 45520936 (x86/Windows, 64bit machine)
- # 2041-09-03: 37096484 (Windows laptop, w/w for INLINABLE things
+ # 2013-11-13: 45520936 (x86/Windows, 64bit machine)
+ # 2014-09-03: 37096484 (Windows laptop, w/w for INLINABLE things
+ # 2014-12-01: 135914136 (Windows laptop, regression see below)
+
(wordsize(64), 271028976, 10)])
# sample: 3926235424 (amd64/Linux, 15/2/2012)
# 2012-10-02 81879216
@@ -475,11 +481,13 @@ test('T5837',
test('T6048',
[ only_ways(['optasm']),
compiler_stats_num_field('bytes allocated',
- [(wordsize(32), 56315812, 10),
+ [(wordsize(32), 49987836, 10),
# prev: 38000000 (x86/Linux)
# 2012-10-08: 48887164 (x86/Linux)
# 2014-04-04: 62618072 (x86 Windows, 64 bit machine)
# 2014-09-03: 56315812 (x86 Windows, w/w for INLINEAVBLE)
+ # 2014-12-01: 49987836 (x86 Windows)
+
(wordsize(64), 88186056, 12)])
# 18/09/2012 97247032 amd64/Linux
# 16/01/2014 108578664 amd64/Linux (unknown, likely foldl-via-foldr)
diff --git a/testsuite/tests/perf/haddock/all.T b/testsuite/tests/perf/haddock/all.T
index cb0a235f80..58900ff2d8 100644
--- a/testsuite/tests/perf/haddock/all.T
+++ b/testsuite/tests/perf/haddock/all.T
@@ -21,12 +21,12 @@ test('haddock.base',
# 2014-09-09: 8354439016 (x86_64/Linux - Applicative/Monad changes, according to Austin)
# 2014-09-10: 7901230808 (x86_64/Linux - Applicative/Monad changes, according to Joachim)
# 2014-10-07: 8322584616 (x86_64/Linux)
- ,(platform('i386-unknown-mingw32'), 3746792812, 5)
+ ,(platform('i386-unknown-mingw32'), 4202377432, 5)
# 2013-02-10: 3358693084 (x86/Windows)
# 2013-11-13: 3097751052 (x86/Windows, 64bit machine)
# 2014-04-04: 3548581572 (x86/Windows, 64bit machine)
- # 2014-08-05: XXX TODO UPDATE ME XXX
- # 2014-09-03: Windows laptop, no known reason
+ # 2014-12-01: 4202377432 (x86/Windows, 64bit machine)
+
,(wordsize(32), 3799130400, 1)])
# 2012-08-14: 3046487920 (x86/OSX)
# 2012-10-30: 2955470952 (x86/Windows)
@@ -60,12 +60,12 @@ test('haddock.Cabal',
# 2014-09-24: 5840893376 (x86_64/Linux - Cabal update)
# 2014-10-04: 6019839624 (x86_64/Linux - Burning Bridges, Cabal update)
- ,(platform('i386-unknown-mingw32'), 2052220292, 5)
+ ,(platform('i386-unknown-mingw32'), 3088635556, 5)
# 2012-10-30: 1733638168 (x86/Windows)
# 2013-02-10: 1906532680 (x86/Windows)
# 2014-01-28: 1966911336 (x86/Windows)
# 2014-04-24: 2052220292 (x86/Windows)
- # 2014-08-05: XXX TODO UPDATE ME XXX
+ # 2014-12-01: 3088635556 (x86/Windows)
,(wordsize(32), 2127198484, 1)])
# 2012-08-14: 1648610180 (x86/OSX)
@@ -88,10 +88,11 @@ test('haddock.compiler',
# 2012-11-27: 28708374824 (amd64/Linux)
# 2014-09-10: 30353349160 (amd64/Linux) post-AMP cleanup
# 2014-11-22: 33562468736 (amd64/Linux)
- ,(platform('i386-unknown-mingw32'), 14328363592, 10)
+ ,(platform('i386-unknown-mingw32'), 104140852, 10)
# 2012-10-30: 13773051312 (x86/Windows)
# 2013-02-10: 14925262356 (x86/Windows)
# 2013-11-13: 14328363592 (x86/Windows, 64bit machine)
+ # 2014-12-01: 104140852 (x86/Windows, sudden shrinkage!)
,(wordsize(32), 15110426000, 1)])
# 2012-08-14: 13471797488 (x86/OSX)
# 2014-01-22: 14581475024 (x86/Linux - new haddock)
diff --git a/testsuite/tests/perf/should_run/all.T b/testsuite/tests/perf/should_run/all.T
index d8af52bbef..3731218347 100644
--- a/testsuite/tests/perf/should_run/all.T
+++ b/testsuite/tests/perf/should_run/all.T
@@ -61,10 +61,13 @@ test('T876',
[(wordsize(64), 63216 , 5),
# 2013-02-14: 1263712 (x86_64/Linux)
# 2014-02-10: 63216 (x86_64/Linux), call arity analysis
- (wordsize(32), 53024, 5) ]),
+
+ (wordsize(32), 56796, 5) ]),
# some date: 663712 (Windows, 64-bit machine)
# 2014-04-04: 56820 (Windows, 64-bit machine)
# 2014-06-29: 53024 (x86_64/Linux)
+ # 2014-12-01: 56796 (Windows)
+
only_ways(['normal']),
extra_run_opts('10000')
],
@@ -167,8 +170,10 @@ test('T5205',
test('T5549',
[stats_num_field('bytes allocated',
- [(wordsize(32), 3362958676, 5),
+ [(wordsize(32), 4096606332, 5),
# expected value: 3362958676 (Windows)
+ # 2014-12-01: 4096606332 (Windows) integer-gmp2
+
(wordsize(64), 8193140752, 5)]),
# expected value: 6725846120 (amd64/Linux)
# 8193140752 (amd64/Linux) integer-gmp2
diff --git a/testsuite/tests/perf/space_leaks/all.T b/testsuite/tests/perf/space_leaks/all.T
index af7eefccc5..722c316e70 100644
--- a/testsuite/tests/perf/space_leaks/all.T
+++ b/testsuite/tests/perf/space_leaks/all.T
@@ -15,12 +15,19 @@ test('space_leak_001',
(wordsize(32), 405650, 10)]),
# 2013-02-10 372072 (x86/OSX)
# 2013-02-10 439228 (x86/OSX)
- stats_num_field('bytes allocated', (11315747416, 1)),
+
+ stats_num_field('bytes allocated',
+ [ (wordsize(64), 11315747416, 5),
# expected value: 9079316016 (amd64/Linux)
# 9331570416 (x86/Linux)
# 9329073952 (x86/OS X)
# 9327959840 (x86/Windows)
# 11315747416 (amd64/Lnx, integer-gmp2)
+
+ (wordsize(32), 13550759068, 5),
+ # 2014-12-01 13550759068 (Windows)
+
+ ]),
omit_ways(['profasm','profthreaded','threaded1','threaded2'])
],
compile_and_run,
diff --git a/testsuite/tests/typecheck/should_fail/T4921.hs b/testsuite/tests/typecheck/should_fail/T4921.hs
new file mode 100644
index 0000000000..b024967b2e
--- /dev/null
+++ b/testsuite/tests/typecheck/should_fail/T4921.hs
@@ -0,0 +1,12 @@
+{-# LANGUAGE MultiParamTypeClasses #-}
+module T4921 where
+
+class C a b where
+ f :: (a,b)
+
+instance C Int Char where
+ f = undefined
+
+x = fst f
+
+y = fst f :: Int
diff --git a/testsuite/tests/typecheck/should_fail/T4921.stderr b/testsuite/tests/typecheck/should_fail/T4921.stderr
new file mode 100644
index 0000000000..c304b05c4b
--- /dev/null
+++ b/testsuite/tests/typecheck/should_fail/T4921.stderr
@@ -0,0 +1,19 @@
+
+T4921.hs:10:9:
+ No instance for (C a0 b1) arising from a use of ‘f’
+ The type variables ‘a0’, ‘b1’ are ambiguous
+ Relevant bindings include x :: a0 (bound at T4921.hs:10:1)
+ Note: there is a potential instance available:
+ instance C Int Char -- Defined at T4921.hs:7:10
+ In the first argument of ‘fst’, namely ‘f’
+ In the expression: fst f
+ In an equation for ‘x’: x = fst f
+
+T4921.hs:12:9:
+ No instance for (C Int b0) arising from a use of ‘f’
+ The type variable ‘b0’ is ambiguous
+ Note: there is a potential instance available:
+ instance C Int Char -- Defined at T4921.hs:7:10
+ In the first argument of ‘fst’, namely ‘f’
+ In the expression: fst f :: Int
+ In an equation for ‘y’: y = fst f :: Int
diff --git a/testsuite/tests/typecheck/should_fail/all.T b/testsuite/tests/typecheck/should_fail/all.T
index 7d1e55867e..d3c8941c65 100644
--- a/testsuite/tests/typecheck/should_fail/all.T
+++ b/testsuite/tests/typecheck/should_fail/all.T
@@ -345,3 +345,4 @@ test('T9201', normal, compile_fail, [''])
test('T9109', normal, compile_fail, [''])
test('T9497d', normal, compile_fail, ['-fdefer-type-errors -fno-defer-typed-holes'])
test('T8044', normal, compile_fail, [''])
+test('T4921', normal, compile_fail, [''])