[Git][ghc/ghc][wip/spj-wibbles] 4 commits: Improve NegativeLiterals (#18022, GHC Proposal #344)
Simon Peyton Jones
gitlab at gitlab.haskell.org
Mon Jul 27 23:09:24 UTC 2020
Simon Peyton Jones pushed to branch wip/spj-wibbles at Glasgow Haskell Compiler / GHC
Commits:
aee45d9e by Vladislav Zavialov at 2020-07-27T07:06:56-04:00
Improve NegativeLiterals (#18022, GHC Proposal #344)
Before this patch, NegativeLiterals used to parse x-1 as x (-1).
This may not be what the user expects, and now it is fixed:
x-1 is parsed as (-) x 1.
We achieve this by the following requirement:
* When lexing a negative literal,
it must not be preceded by a 'closing token'.
This also applies to unboxed literals, e.g. -1#.
See GHC Proposal #229 for the definition of a closing token.
A nice consequence of this change is that -XNegativeLiterals becomes a
subset of -XLexicalNegation. In other words, enabling both of those
extensions has the same effect as enabling -XLexicalNegation alone.
- - - - -
667ab69e by leiftw at 2020-07-27T07:07:32-04:00
fix typo referring to non-existent `-ohidir` flag, should be `-hidir` I think
- - - - -
6ff89c17 by Vladislav Zavialov at 2020-07-27T07:08:07-04:00
Refactor the parser a little
* Create a dedicated production for type operators
* Create a dedicated type for the UNPACK pragma
* Remove an outdated part of Note [Parsing data constructors is hard]
- - - - -
b7a902db by Simon Peyton Jones at 2020-07-28T00:09:02+01:00
Remove an incorrect WARN in extendLocalRdrEnv
I noticed this warning going off, and discovered that it's
really fine. This small patch removes the warning, and docments
what is going on.
- - - - -
12 changed files:
- compiler/GHC/Parser.y
- compiler/GHC/Parser/Lexer.x
- compiler/GHC/Parser/PostProcess.hs
- compiler/GHC/Rename/Pat.hs
- compiler/GHC/Types/Name/Reader.hs
- docs/users_guide/8.12.1-notes.rst
- docs/users_guide/exts/negative_literals.rst
- docs/users_guide/separate_compilation.rst
- − testsuite/tests/parser/should_compile/LexNegVsNegLit.hs
- + testsuite/tests/parser/should_compile/NegativeLiterals.hs
- + testsuite/tests/parser/should_compile/NegativeLiteralsNoExt.hs
- testsuite/tests/parser/should_compile/all.T
Changes:
=====================================
compiler/GHC/Parser.y
=====================================
@@ -1884,9 +1884,9 @@ sigtypes1 :: { (OrdList (LHsSigType GhcPs)) }
-----------------------------------------------------------------------------
-- Types
-unpackedness :: { Located ([AddAnn], SourceText, SrcUnpackedness) }
- : '{-# UNPACK' '#-}' { sLL $1 $> ([mo $1, mc $2], getUNPACK_PRAGs $1, SrcUnpack) }
- | '{-# NOUNPACK' '#-}' { sLL $1 $> ([mo $1, mc $2], getNOUNPACK_PRAGs $1, SrcNoUnpack) }
+unpackedness :: { Located UnpackednessPragma }
+ : '{-# UNPACK' '#-}' { sLL $1 $> (UnpackednessPragma [mo $1, mc $2] (getUNPACK_PRAGs $1) SrcUnpack) }
+ | '{-# NOUNPACK' '#-}' { sLL $1 $> (UnpackednessPragma [mo $1, mc $2] (getNOUNPACK_PRAGs $1) SrcNoUnpack) }
forall_telescope :: { Located ([AddAnn], HsForAllTelescope GhcPs) }
: 'forall' tv_bndrs '.' {% do { hintExplicitForall $1
@@ -1980,13 +1980,16 @@ tyapp :: { Located TyEl }
-- See Note [Whitespace-sensitive operator parsing] in GHC.Parser.Lexer
| PREFIX_AT atype { sLL $1 $> $ (TyElKindApp (comb2 $1 $2) $2) }
- | qtyconop { sL1 $1 $ TyElOpr (unLoc $1) }
- | tyvarop { sL1 $1 $ TyElOpr (unLoc $1) }
- | SIMPLEQUOTE qconop {% ams (sLL $1 $> $ TyElOpr (unLoc $2))
+ | tyop { mapLoc TyElOpr $1 }
+ | unpackedness { sL1 $1 $ TyElUnpackedness (unLoc $1) }
+
+tyop :: { Located RdrName }
+ : qtyconop { $1 }
+ | tyvarop { $1 }
+ | SIMPLEQUOTE qconop {% ams (sLL $1 $> (unLoc $2))
[mj AnnSimpleQuote $1,mj AnnVal $2] }
- | SIMPLEQUOTE varop {% ams (sLL $1 $> $ TyElOpr (unLoc $2))
+ | SIMPLEQUOTE varop {% ams (sLL $1 $> (unLoc $2))
[mj AnnSimpleQuote $1,mj AnnVal $2] }
- | unpackedness { sL1 $1 $ TyElUnpackedness (unLoc $1) }
atype :: { LHsType GhcPs }
: ntgtycon { sL1 $1 (HsTyVar noExtField NotPromoted $1) } -- Not including unit tuples
=====================================
compiler/GHC/Parser/Lexer.x
=====================================
@@ -199,7 +199,6 @@ $docsym = [\| \^ \* \$]
-- normal signed numerical literals can only be explicitly negative,
-- not explicitly positive (contrast @exponent)
@negative = \-
- at signed = @negative ?
-- -----------------------------------------------------------------------------
@@ -531,12 +530,12 @@ $tab { warnTab }
ifExtension BinaryLiteralsBit } { tok_primint positive 2 3 binary }
0[oO] @numspc @octal \# / { ifExtension MagicHashBit } { tok_primint positive 2 3 octal }
0[xX] @numspc @hexadecimal \# / { ifExtension MagicHashBit } { tok_primint positive 2 3 hexadecimal }
- @negative @decimal \# / { ifExtension MagicHashBit } { tok_primint negative 1 2 decimal }
- @negative 0[bB] @numspc @binary \# / { ifExtension MagicHashBit `alexAndPred`
+ @negative @decimal \# / { negHashLitPred } { tok_primint negative 1 2 decimal }
+ @negative 0[bB] @numspc @binary \# / { negHashLitPred `alexAndPred`
ifExtension BinaryLiteralsBit } { tok_primint negative 3 4 binary }
- @negative 0[oO] @numspc @octal \# / { ifExtension MagicHashBit } { tok_primint negative 3 4 octal }
+ @negative 0[oO] @numspc @octal \# / { negHashLitPred } { tok_primint negative 3 4 octal }
@negative 0[xX] @numspc @hexadecimal \#
- / { ifExtension MagicHashBit } { tok_primint negative 3 4 hexadecimal }
+ / { negHashLitPred } { tok_primint negative 3 4 hexadecimal }
@decimal \# \# / { ifExtension MagicHashBit } { tok_primword 0 2 decimal }
0[bB] @numspc @binary \# \# / { ifExtension MagicHashBit `alexAndPred`
@@ -546,8 +545,11 @@ $tab { warnTab }
-- Unboxed floats and doubles (:: Float#, :: Double#)
-- prim_{float,double} work with signed literals
- @signed @floating_point \# / { ifExtension MagicHashBit } { tok_frac 1 tok_primfloat }
- @signed @floating_point \# \# / { ifExtension MagicHashBit } { tok_frac 2 tok_primdouble }
+ @floating_point \# / { ifExtension MagicHashBit } { tok_frac 1 tok_primfloat }
+ @floating_point \# \# / { ifExtension MagicHashBit } { tok_frac 2 tok_primdouble }
+
+ @negative @floating_point \# / { negHashLitPred } { tok_frac 1 tok_primfloat }
+ @negative @floating_point \# \# / { negHashLitPred } { tok_frac 2 tok_primdouble }
}
-- Strings and chars are lexed by hand-written code. The reason is
@@ -1192,8 +1194,8 @@ atEOL _ _ _ (AI _ buf) = atEnd buf || currentChar buf == '\n'
-- Check if we should parse a negative literal (e.g. -123) as a single token.
negLitPred :: AlexAccPred ExtsBitmap
negLitPred =
- negative_literals `alexOrPred`
- (lexical_negation `alexAndPred` prefix_minus)
+ prefix_minus `alexAndPred`
+ (negative_literals `alexOrPred` lexical_negation)
where
negative_literals = ifExtension NegativeLiteralsBit
@@ -1202,14 +1204,33 @@ negLitPred =
alexNotPred (ifExtension NoLexicalNegationBit)
prefix_minus =
- -- The condition for a prefix occurrence of an operator is:
- --
- -- not precededByClosingToken && followedByOpeningToken
- --
- -- but we don't check followedByOpeningToken here as it holds
- -- simply because we immediately lex a literal after the minus.
+ -- Note [prefix_minus in negLitPred and negHashLitPred]
+ alexNotPred precededByClosingToken
+
+-- Check if we should parse an unboxed negative literal (e.g. -123#) as a single token.
+negHashLitPred :: AlexAccPred ExtsBitmap
+negHashLitPred = prefix_minus `alexAndPred` magic_hash
+ where
+ magic_hash = ifExtension MagicHashBit
+ prefix_minus =
+ -- Note [prefix_minus in negLitPred and negHashLitPred]
alexNotPred precededByClosingToken
+{- Note [prefix_minus in negLitPred and negHashLitPred]
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+We want to parse -1 as a single token, but x-1 as three tokens.
+So in negLitPred (and negHashLitPred) we require that we have a prefix
+occurrence of the minus sign. See Note [Whitespace-sensitive operator parsing]
+for a detailed definition of a prefix occurrence.
+
+The condition for a prefix occurrence of an operator is:
+
+ not precededByClosingToken && followedByOpeningToken
+
+but we don't check followedByOpeningToken when parsing a negative literal.
+It holds simply because we immediately lex a literal after the minus.
+-}
+
ifExtension :: ExtBits -> AlexAccPred ExtsBitmap
ifExtension extBits bits _ _ _ = extBits `xtest` bits
=====================================
compiler/GHC/Parser/PostProcess.hs
=====================================
@@ -70,6 +70,7 @@ module GHC.Parser.PostProcess (
addFatalError, hintBangPat,
TyEl(..), mergeOps, mergeDataCon,
mkBangTy,
+ UnpackednessPragma(..),
-- Help with processing exports
ImpExpSubSpec(..),
@@ -559,25 +560,6 @@ As the result, in order to determine whether (C t1 t2) declares a data
constructor, a type, or a context, we would need unlimited lookahead which
'happy' is not so happy with.
-To further complicate matters, the interpretation of (!) and (~) is different
-in constructors and types:
-
- (b1) type T = C ! D
- (b2) data T = C ! D
- (b3) data T = C ! D => E
-
-In (b1) and (b3), (!) is a type operator with two arguments: 'C' and 'D'. At
-the same time, in (b2) it is a strictness annotation: 'C' is a data constructor
-with a single strict argument 'D'. For the programmer, these cases are usually
-easy to tell apart due to whitespace conventions:
-
- (b2) data T = C !D -- no space after the bang hints that
- -- it is a strictness annotation
-
-For the parser, on the other hand, this whitespace does not matter. We cannot
-tell apart (b2) from (b3) until we encounter (=>), so it requires unlimited
-lookahead.
-
The solution that accounts for all of these issues is to initially parse data
declarations and types as a reversed list of TyEl:
@@ -1324,7 +1306,7 @@ isFunLhs e = go e [] []
data TyEl = TyElOpr RdrName | TyElOpd (HsType GhcPs)
| TyElKindApp SrcSpan (LHsType GhcPs)
-- See Note [TyElKindApp SrcSpan interpretation]
- | TyElUnpackedness ([AddAnn], SourceText, SrcUnpackedness)
+ | TyElUnpackedness UnpackednessPragma
{- Note [TyElKindApp SrcSpan interpretation]
@@ -1345,20 +1327,15 @@ instance Outputable TyEl where
ppr (TyElOpr name) = ppr name
ppr (TyElOpd ty) = ppr ty
ppr (TyElKindApp _ ki) = text "@" <> ppr ki
- ppr (TyElUnpackedness (_, _, unpk)) = ppr unpk
+ ppr (TyElUnpackedness (UnpackednessPragma _ _ unpk)) = ppr unpk
-- | Extract a strictness/unpackedness annotation from the front of a reversed
-- 'TyEl' list.
pUnpackedness
:: [Located TyEl] -- reversed TyEl
- -> Maybe ( SrcSpan
- , [AddAnn]
- , SourceText
- , SrcUnpackedness
- , [Located TyEl] {- remaining TyEl -})
-pUnpackedness (L l x1 : xs)
- | TyElUnpackedness (anns, prag, unpk) <- x1
- = Just (l, anns, prag, unpk, xs)
+ -> Maybe (SrcSpan, UnpackednessPragma,
+ [Located TyEl] {- remaining TyEl -})
+pUnpackedness (L l x1 : xs) | TyElUnpackedness up <- x1 = Just (l, up, xs)
pUnpackedness _ = Nothing
pBangTy
@@ -1371,7 +1348,7 @@ pBangTy
pBangTy lt@(L l1 _) xs =
case pUnpackedness xs of
Nothing -> (False, lt, pure (), xs)
- Just (l2, anns, prag, unpk, xs') ->
+ Just (l2, UnpackednessPragma anns prag unpk, xs') ->
let bl = combineSrcSpans l1 l2
bt = addUnpackedness (prag, unpk) lt
in (True, L bl bt, addAnnsAt bl anns, xs')
@@ -1380,6 +1357,10 @@ mkBangTy :: SrcStrictness -> LHsType GhcPs -> HsType GhcPs
mkBangTy strictness =
HsBangTy noExtField (HsSrcBang NoSourceText NoSrcUnpack strictness)
+-- Result of parsing {-# UNPACK #-} or {-# NOUNPACK #-}
+data UnpackednessPragma =
+ UnpackednessPragma [AddAnn] SourceText SrcUnpackedness
+
addUnpackedness :: (SourceText, SrcUnpackedness) -> LHsType GhcPs -> HsType GhcPs
addUnpackedness (prag, unpk) (L _ (HsBangTy x bang t))
| HsSrcBang NoSourceText NoSrcUnpack strictness <- bang
@@ -1411,7 +1392,7 @@ mergeOps all_xs = go (0 :: Int) [] id all_xs
-- clause [unpk]:
-- handle (NO)UNPACK pragmas
- go k acc ops_acc ((L l (TyElUnpackedness (anns, unpkSrc, unpk))):xs) =
+ go k acc ops_acc ((L l (TyElUnpackedness (UnpackednessPragma anns unpkSrc unpk))):xs) =
if not (null acc) && null xs
then do { acc' <- eitherToP $ mergeOpsAcc acc
; let a = ops_acc acc'
=====================================
compiler/GHC/Rename/Pat.hs
=====================================
@@ -236,19 +236,30 @@ newPatName (LetMk is_top fix_env) rdr_name
do { name <- case is_top of
NotTopLevel -> newLocalBndrRn rdr_name
TopLevel -> newTopSrcBinder rdr_name
- ; bindLocalNames [name] $ -- Do *not* use bindLocalNameFV here
- -- See Note [View pattern usage]
+ ; bindLocalNames [name] $
+ -- Do *not* use bindLocalNameFV here;
+ -- see Note [View pattern usage]
+ -- For the TopLevel case
+ -- see Note [bindLocalNames for an External name]
addLocalFixities fix_env [name] $
thing_inside name })
- -- Note: the bindLocalNames is somewhat suspicious
- -- because it binds a top-level name as a local name.
- -- however, this binding seems to work, and it only exists for
- -- the duration of the patterns and the continuation;
- -- then the top-level name is added to the global env
- -- before going on to the RHSes (see GHC.Rename.Module).
+{- Note [bindLocalNames for an External name]
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+In the TopLevel case, the use of bindLocalNames here is somewhat
+suspicious because it binds a top-level External name in the
+LocalRdrEnv. c.f. Note [LocalRdrEnv] in GHC.Types.Name.Reader.
+
+However, this only happens when renaming the LHS (only) of a top-level
+pattern binding. Even though this only the LHS, we need to bring the
+binder into scope in the pattern itself in case the binder is used in
+subsequent view patterns. A bit bizarre, something like
+ (x, Just y <- f x) = e
+
+Anyway, bindLocalNames does work, and the binding only exists for the
+duration of the pattern; then the top-level name is added to the
+global env before going on to the RHSes (see GHC.Rename.Module).
-{-
Note [View pattern usage]
~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
=====================================
compiler/GHC/Types/Name/Reader.hs
=====================================
@@ -338,13 +338,24 @@ instance Ord RdrName where
************************************************************************
-}
+{- Note [LocalRdrEnv]
+~~~~~~~~~~~~~~~~~~~~~
+The LocalRdrEnv is used to store local bindings (let, where, lambda, case).
+
+* It is keyed by OccName, because we never use it for qualified names.
+
+* It maps the OccName to a Name. That Name is almost always an
+ Internal Name, but (hackily) it can be External too for top-level
+ pattern bindings. See Note [bindLocalNames for an External name]
+ in GHC.Rename.Pat
+
+* We keep the current mapping (lre_env), *and* the set of all Names in
+ scope (lre_in_scope). Reason: see Note [Splicing Exact names] in
+ GHC.Rename.Env.
+-}
+
-- | Local Reader Environment
---
--- This environment is used to store local bindings
--- (@let@, @where@, lambda, @case@).
--- It is keyed by OccName, because we never use it for qualified names
--- We keep the current mapping, *and* the set of all Names in scope
--- Reason: see Note [Splicing Exact names] in "GHC.Rename.Env"
+-- See Note [LocalRdrEnv]
data LocalRdrEnv = LRE { lre_env :: OccEnv Name
, lre_in_scope :: NameSet }
@@ -364,16 +375,15 @@ emptyLocalRdrEnv = LRE { lre_env = emptyOccEnv
, lre_in_scope = emptyNameSet }
extendLocalRdrEnv :: LocalRdrEnv -> Name -> LocalRdrEnv
--- The Name should be a non-top-level thing
+-- See Note [LocalRdrEnv]
extendLocalRdrEnv lre@(LRE { lre_env = env, lre_in_scope = ns }) name
- = WARN( isExternalName name, ppr name )
- lre { lre_env = extendOccEnv env (nameOccName name) name
+ = lre { lre_env = extendOccEnv env (nameOccName name) name
, lre_in_scope = extendNameSet ns name }
extendLocalRdrEnvList :: LocalRdrEnv -> [Name] -> LocalRdrEnv
+-- See Note [LocalRdrEnv]
extendLocalRdrEnvList lre@(LRE { lre_env = env, lre_in_scope = ns }) names
- = WARN( any isExternalName names, ppr names )
- lre { lre_env = extendOccEnvList env [(nameOccName n, n) | n <- names]
+ = lre { lre_env = extendOccEnvList env [(nameOccName n, n) | n <- names]
, lre_in_scope = extendNameSetList ns names }
lookupLocalRdrEnv :: LocalRdrEnv -> RdrName -> Maybe Name
=====================================
docs/users_guide/8.12.1-notes.rst
=====================================
@@ -224,6 +224,13 @@ Language
f = (- x) -- operator section
c = (-x) -- negation
+* The behavior of :extension:`NegativeLiterals` changed, and now we require
+ that a negative literal must not be preceded by a closing token (see
+ `GHC Proposal #229 <https://github.com/ghc-proposals/ghc-proposals/blob/master/proposals/0229-whitespace-bang-patterns.rst>`__
+ for the definition of a closing token). In other words, we parse ``f -123``
+ as ``f (-123)``, but ``x-123`` as ``(-) x 123``. Before this amendment,
+ :extension:`NegativeLiterals` caused ``x-123`` to be parsed as ``x(-123)``.
+
Compiler
~~~~~~~~
=====================================
docs/users_guide/exts/negative_literals.rst
=====================================
@@ -24,9 +24,11 @@ will elicit an unexpected integer-literal-overflow message.
Whitespace can be inserted, as in ``- 123``, to force interpretation
as two tokens.
-One pitfall is that with :extension:`NegativeLiterals`, ``x-1`` will
-be parsed as ``x`` applied to the argument ``-1``, which is usually
-not what you want. ``x - 1`` or even ``x- 1`` can be used instead
-for subtraction. To avoid this, consider using :extension:`LexicalNegation`
-instead.
-
+In 8.12, the behavior of this extension changed, and now we require that a negative literal must not be preceded by a closing token (see
+`GHC Proposal #229 <https://github.com/ghc-proposals/ghc-proposals/blob/master/proposals/0229-whitespace-bang-patterns.rst>`__
+for the definition of a closing token). In other words, we parse ``f -123`` as ``f (-123)``, but ``x-123`` as ``(-) x
+123``. Before this amendment, :extension:`NegativeLiterals` caused ``x-123`` to be parsed as ``x(-123)``.
+
+:extension:`NegativeLiterals` is a subset of :extension:`LexicalNegation`. That
+is, enabling both of those extensions has the same effect as enabling
+:extension:`LexicalNegation` alone.
=====================================
docs/users_guide/separate_compilation.rst
=====================================
@@ -758,7 +758,7 @@ There are several points to note here:
the same machine-generated binary format as any other
GHC-generated interface file (e.g. ``B.hi``). You can display its
contents with ``ghc --show-iface``. If you specify a directory for
- interface files, the ``-ohidir`` flag, then that affects ``hi-boot`` files
+ interface files, the ``-hidir`` flag, then that affects ``hi-boot`` files
too.
- If hs-boot files are considered distinct from their parent source
=====================================
testsuite/tests/parser/should_compile/LexNegVsNegLit.hs deleted
=====================================
@@ -1,17 +0,0 @@
-{-# LANGUAGE NegativeLiterals, LexicalNegation #-}
-
-module LexNegVsNegLit where
-
--- NegativeLiterals specifies that we parse x-1 as x (-1), even though it's
--- considered a shortcoming.
---
--- LexicalNegation does not change that.
---
-b :: Bool
-b = even-1 -- parsed as: even (-1)
- -- so it is well-typed.
- --
- -- with LexicalNegation alone, we'd get (-) even 1,
- -- but NegativeLiterals takes precedence here.
-
--- See also: GHC Proposal #344
=====================================
testsuite/tests/parser/should_compile/NegativeLiterals.hs
=====================================
@@ -0,0 +1,57 @@
+{-# LANGUAGE NegativeLiterals, MagicHash, BinaryLiterals #-}
+
+module NegativeLiterals where
+
+import GHC.Exts
+
+------------------------------------
+-- Prefix occurrence of the minus --
+------------------------------------
+
+p1 :: Bool
+p1 = even -2 -- parsed as: even (-2)
+
+p2 :: Int
+p2 = I# -1# -- parsed as: I# (-1#)
+
+p3 :: Int
+p3 = floor -2.4 -- parsed as: floor (-2.4)
+
+p4 :: Float
+p4 = F# -0.01# -- parsed as: F# (-0.01#)
+
+p5 :: Double
+p5 = D# -0.01## -- parsed as: D# (-0.01##)
+
+p6 :: Bool
+p6 = even -0b10 -- parsed as: even (-2)
+ || even -0o10 -- parsed as: even (-8)
+ || even -0x10 -- parsed as: even (-16)
+
+-----------------------------------------
+-- Tight infix occurrence of the minus --
+-----------------------------------------
+
+ti1 :: Integer -> Integer
+ti1 x = x-2 -- parsed as: (-) x 1
+
+ti2 :: Int# -> Int#
+ti2 x = x-1# -- parsed as: (-) x 1#
+ where (-) = (-#)
+
+ti3 :: Double -> Double
+ti3 x = x-2.4 -- parsed as: (-) x 2.4
+
+ti4 :: Float# -> Float#
+ti4 x = x-0.1# -- parsed as: (-) x 0.1#
+ where (-) = minusFloat#
+
+ti5 :: Double# -> Double#
+ti5 x = x-0.1## -- parsed as: (-) x 0.1##
+ where (-) = (-##)
+
+ti6 :: Integer -> [Integer]
+ti6 x =
+ [ x-0b10, -- parsed as: (-) x 2
+ x-0o10, -- parsed as: (-) x 8
+ x-0x10 ] -- parsed as: (-) x 16
=====================================
testsuite/tests/parser/should_compile/NegativeLiteralsNoExt.hs
=====================================
@@ -0,0 +1,39 @@
+{-# LANGUAGE NoNegativeLiterals, MagicHash, BinaryLiterals #-}
+
+-- Even when NegativeLiterals are disabled,
+-- we parse unboxed literals appropriately.
+module NegativeLiteralsNoExt where
+
+import GHC.Exts
+
+------------------------------------
+-- Prefix occurrence of the minus --
+------------------------------------
+
+p2 :: Int
+p2 = I# -1# -- parsed as: I# (-1#)
+
+p4 :: Float
+p4 = F# -0.01# -- parsed as: F# (-0.01#)
+
+p5 :: Double
+p5 = D# -0.01## -- parsed as: D# (-0.01##)
+
+-----------------------------------------
+-- Tight infix occurrence of the minus --
+-----------------------------------------
+
+ti2 :: Int# -> Int#
+ti2 x = x-1# -- parsed as: (-) x 1#
+ where (-) = (-#)
+
+ti3 :: Double -> Double
+ti3 x = x-2.4 -- parsed as: (-) x 2.4
+
+ti4 :: Float# -> Float#
+ti4 x = x-0.1# -- parsed as: (-) x 0.1#
+ where (-) = minusFloat#
+
+ti5 :: Double# -> Double#
+ti5 x = x-0.1## -- parsed as: (-) x 0.1##
+ where (-) = (-##)
=====================================
testsuite/tests/parser/should_compile/all.T
=====================================
@@ -153,7 +153,8 @@ test('proposal-229b', normal, compile, [''])
test('proposal-229d', normal, compile, [''])
test('proposal-229e', normal, compile, [''])
test('LexicalNegation', normal, compile, [''])
-test('LexNegVsNegLit', normal, compile, [''])
+test('NegativeLiterals', normal, compile, [''])
+test('NegativeLiteralsNoExt', normal, compile, [''])
# We omit 'profasm' because it fails with:
# Cannot load -prof objects when GHC is built with -dynamic
View it on GitLab: https://gitlab.haskell.org/ghc/ghc/-/compare/b87eb02be35440dfc941e5e9a12dbcc01849e6d3...b7a902db47e91377b501400c66ab779003ad1182
--
View it on GitLab: https://gitlab.haskell.org/ghc/ghc/-/compare/b87eb02be35440dfc941e5e9a12dbcc01849e6d3...b7a902db47e91377b501400c66ab779003ad1182
You're receiving this email because of your account on gitlab.haskell.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://mail.haskell.org/pipermail/ghc-commits/attachments/20200727/ab71a115/attachment-0001.html>
More information about the ghc-commits
mailing list