[Git][ghc/ghc][wip/az/ghc-cpp] More plumbing. Ready for testing tomorrow.

Alan Zimmerman (@alanz) gitlab at gitlab.haskell.org
Tue Feb 4 23:46:30 UTC 2025



Alan Zimmerman pushed to branch wip/az/ghc-cpp at Glasgow Haskell Compiler / GHC


Commits:
76d8e943 by Alan Zimmerman at 2025-02-04T23:46:06+00:00
More plumbing. Ready for testing tomorrow.

- - - - -


7 changed files:

- compiler/GHC/Parser/PreProcess.hs
- compiler/GHC/Parser/PreProcess/Macro.hs
- compiler/GHC/Parser/PreProcess/ParsePP.hs
- compiler/GHC/Parser/PreProcess/Types.hs
- utils/check-cpp/ParsePP.hs
- − utils/check-cpp/Parser.hs
- utils/check-cpp/PreProcess.hs


Changes:

=====================================
compiler/GHC/Parser/PreProcess.hs
=====================================
@@ -14,7 +14,6 @@ module GHC.Parser.PreProcess (
     PpState,
 ) where
 
-import Data.Char
 import Data.Map qualified as Map
 import Debug.Trace (trace)
 import GHC.Data.FastString
@@ -41,14 +40,12 @@ initParserState = Lexer.initParserState initPpState
 
 -- ---------------------------------------------------------------------
 
-data CppState
-    = CppIgnoring
-    | CppNormal
-    deriving (Show)
-
--- ---------------------------------------------------------------------
-
+-- | Continuation based lexer, provides input to GHC.Parser
+lexer :: Bool -> (Located Token -> PP a) -> PP a
 lexer = ppLexer
+
+-- | Debug version of @lexer@
+lexerDbg :: Bool -> (Located Token -> PP a) -> PP a
 lexerDbg = ppLexerDbg
 
 ppLexer, ppLexerDbg :: Bool -> (Located Token -> PP a) -> PP a
@@ -70,7 +67,7 @@ ppLexer queueComments cont =
                     ppLexer queueComments cont
              in
                 case tk of
-                -- case (trace ("M.ppLexer:tk=" ++ show (unLoc tk)) tk) of
+                    -- case (trace ("M.ppLexer:tk=" ++ show (unLoc tk)) tk) of
                     L _ ITeof -> do
                         mInp <- popIncludeLoc
                         case mInp of
@@ -93,8 +90,6 @@ ppLexer queueComments cont =
 
 -- ---------------------------------------------------------------------
 
-type PP = P PpState
-
 preprocessElse :: PP ()
 preprocessElse = do
     accepting <- getAccepting
@@ -117,23 +112,21 @@ processCppToks fs = do
 
 processCpp :: [FastString] -> PP ()
 processCpp fs = do
-    -- traceM $ "processCpp: fs=" ++ show fs
-    -- let s = cppInitial fs
-    let s = cppInitial fs
+    let s = concatMap unpackFS fs
     case parseDirective s of
         Left err -> error $ show (err, s)
         Right (CppInclude filename) -> do
             ppInclude filename
         Right (CppDefine name def) -> do
-            ppDefine name def
+            ppDefine (MacroName name Nothing) def
         Right (CppIf cond) -> do
-            _ <- ppIf cond
+            ppIf cond
             return ()
         Right (CppIfdef name) -> do
-            defined <- ppIsDefined name
+            defined <- ppIsDefined (MacroName name Nothing)
             setAccepting defined
         Right (CppIfndef name) -> do
-            defined <- ppIsDefined name
+            defined <- ppIsDefined (MacroName name Nothing)
             setAccepting (not defined)
         Right CppElse -> do
             accepting <- getAccepting
@@ -144,64 +137,8 @@ processCpp fs = do
             setAccepting True
             return ()
 
-    -- return (trace ("processCpp:s=" ++ show s) ())
     return ()
 
--- ---------------------------------------------------------------------
--- Preprocessor state functions
-
-getCppState :: PP CppState
-getCppState = do
-    accepting <- getAccepting
-    if accepting
-        then return CppNormal
-        else return CppIgnoring
-
--- pp_context stack start -----------------
-
-pushContext :: Token -> PP ()
-pushContext new =
-    P $ \s -> POk s{pp = (pp s){pp_context = new : pp_context (pp s)}} ()
-
-popContext :: PP ()
-popContext =
-    P $ \s ->
-        let
-            new_context = case pp_context (pp s) of
-                [] -> []
-                (_ : t) -> t
-         in
-            POk s{pp = (pp s){pp_context = new_context}} ()
-
-peekContext :: PP Token
-peekContext =
-    P $ \s ->
-        let
-            r = case pp_context (pp s) of
-                [] -> ITeof -- Anthing really, for now, except a CPP one
-                (h : _) -> h
-         in
-            POk s r
-
-setAccepting :: Bool -> PP ()
-setAccepting on =
-    P $ \s -> POk s{pp = (pp s){pp_accepting = on}} ()
-
-getAccepting :: PP Bool
-getAccepting = P $ \s -> POk s (pp_accepting (pp s))
-
--- -------------------------------------
-
-pushContinuation :: Located Token -> PP ()
-pushContinuation new =
-    P $ \s -> POk s{pp = (pp s){pp_continuation = new : pp_continuation (pp s)}} ()
-
-popContinuation :: PP [Located Token]
-popContinuation =
-    P $ \s -> POk s{pp = (pp s){pp_continuation = []}} (pp_continuation (pp s))
-
--- pp_context stack end -------------------
-
 -- pp_include start -----------------------
 
 getInclude :: String -> PP (Maybe StringBuffer)
@@ -235,51 +172,19 @@ ppInclude filename = do
             pushIncludeLoc origInput
             let loc = PsLoc (mkRealSrcLoc (mkFastString filename) 1 1) (BufPos 0)
             Lexer.setInput (Lexer.AI loc src)
-    -- return $ trace ("ppInclude:mSrc=[" ++ show mSrc ++ "]") ()
-
--- return $ trace ("ppInclude:filename=[" ++ filename ++ "]") ()
-
-ppDefine :: String -> String -> PP ()
-ppDefine name val = P $ \s ->
-    -- POk s{pp = (pp s){pp_defines = Map.insert (trace ("ppDefine:def=[" ++ name ++ "]") (MacroName name Nothing)) val (pp_defines (pp s))}} ()
-    POk s{pp = (pp s){pp_defines = Map.insert (MacroName name Nothing) val (pp_defines (pp s))}} ()
 
-ppIsDefined :: String -> PP Bool
-ppIsDefined def = P $ \s ->
-    POk s (Map.member (MacroName def Nothing) (pp_defines (pp s)))
-    -- POk s (Map.member (trace ("ppIsDefined:def=[" ++ def ++ "]") (MacroName def Nothing)) (pp_defines (pp s)))
-
-ppIf :: String -> PP Bool
+ppIf :: String -> PP ()
 ppIf str = P $ \s ->
-    -- -- POk s (Map.member def (pp_defines (pp s)))
-    -- POk s (Map.member (trace ("ppIsDefined:def=[" ++ def ++ "]") def) (pp_defines (pp s)))
     let
         s' = cppIf (pp s) str
      in
-        POk s{pp = s'} (pp_accepting s')
-
--- | Take a @FastString@ of the form "#define FOO\n" and strip off all but "FOO"
-cleanTokenString :: FastString -> String
-cleanTokenString fs = r
-  where
-    ss = dropWhile (\c -> not $ isSpace c) (unpackFS fs)
-    r = init ss
-
--- parseDefine :: FastString -> Maybe (String, [String])
--- parseDefine fs = r
---   where
---     -- r = Just (cleanTokenString s, "")
---     r = case parseCppParser cppDefinition (unpackFS fs) of
---         Left _ -> Nothing
---         Right v -> Just v
+        POk s{pp = s'} ()
 
 -- =====================================================================
 
 {- | Do cpp initial processing, as per https://gcc.gnu.org/onlinedocs/cpp/Initial-processing.html
 See Note [GhcCPP Initial Processing]
 -}
-cppInitial :: [FastString] -> String
-cppInitial fs = concatMap unpackFS fs
 
 {-
 Note [GhcCPP Initial Processing]
@@ -296,4 +201,61 @@ directive.
    and is handled in the Lexer.
 4. All comments are replaced with a single space
 
+Note [GhcCPP Processing Overview]
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+GHC.Parser calls `GHC.PreProcess.lexer` to provide it with the next
+token to parse, until it gets the EOF token.
+
+Without GHC_CPP, this simply calls `GHC.Parser.Lexer.lexer` to get the
+next token. But `GHC.PreProcess.lexer` runs its own loop between the
+two.
+
+- It calls `GHC.Parser.Lexer.lexer`
+
+- If the GhcCpp option is not set, it returns a normal token, which is
+  passed to the parser.
+
+- If the GhcCpp option is set, it may in addition return an `ITcpp`
+  token.
+
+  This is either one containing a whole line starting with a
+  preprocessor directive, or a continuation of the prior line if it
+  was a directive ending with a backlash
+
+- The lexing loop in this file accumulates these continuation tokens
+  until it has a full preprocessor line.
+
+- It does basic token-based analysis of this, to determine the
+  specific PP directive it refers to
+
+- The preprocessor can be in one of two states: `CppNormal` or
+  `CppIgnoring`.
+
+  When it is in `CppNormal` it passes non-PP tokens to the parser as
+  normal.
+
+  When it is in `CppIgnoring` it does not pass the non-PP tokens to
+  the parser, but inserts them into the parser queued comments store,
+  as if each was a comment.
+
+- When it has a full preprocessor directive, this is processed as expected.
+  `#define` : records a macro definition in the PP state
+  `#include` : not currently processed
+
+  `#ifdef` / `#ifndef` : If the following token is the name of a macro, switch to
+  `CppNormal` or `CppIgnoring` as appropriate
+
+  `#if` : perform macro expansion on the text, until it reaches a
+  fixpoint. Then parse it with `GHC.Parser.PreProcess.Parser/Lexer` as
+  an expression, and evaluate it. Set the state according to the outcome.
+
+- The `#if` / `#ifdef` / `#ifndef` directives also open a new macro
+  scope. Any macros defined will be stored in this scope.
+
+- `#else` : flip the state between `CppIgnoring` and `CppNormal`, and
+  pop the scope. Start a new scope.
+
+- `#endif` : pop the scope, set the state according to the surrounding
+  scope.
+
 -}


=====================================
compiler/GHC/Parser/PreProcess/Macro.hs
=====================================
@@ -22,7 +22,6 @@ details
 
 -- TODO: Parse tokens with original locations in them.
 
-import Data.Map qualified as Map
 import Data.Maybe
 
 import GHC.Parser.PreProcess.Eval
@@ -46,37 +45,31 @@ process s str = (s0, o)
         CppIfdef name -> ifdef s name
         CppIf ifstr -> cppIf s ifstr
         CppIfndef name -> ifndef s name
-        CppElse -> undefined
-        CppEndif -> undefined
+        CppElse -> cppElse s
+        CppEndif -> popScope' s
 
 -- ---------------------------------------------------------------------
 
 define :: PpState -> String -> MacroDef -> PpState
-define s name toks = s{pp_defines = Map.insert (MacroName name Nothing) toks (pp_defines s)}
+define s name toks = addDefine' s (MacroName name Nothing) toks
 
 ifdef :: PpState -> String -> PpState
-ifdef s name =
-    case Map.lookup (MacroName name Nothing) (pp_defines s) of
-        Just _ -> s{pp_accepting = True}
-        _ -> s{pp_accepting = False}
+ifdef s name = pushAccepting' s (ppIsDefined' s (MacroName name Nothing))
 
 ifndef :: PpState -> String -> PpState
-ifndef s name =
-    case Map.lookup (MacroName name Nothing) (pp_defines s) of
-        Just _ -> s{pp_accepting = False}
-        _ -> s{pp_accepting = True}
+ifndef s name = pushAccepting' s (not $ ppIsDefined' s (MacroName name Nothing))
 
+--    We evaluate to an Int, which we convert to a bool
 cppIf :: PpState -> String -> PpState
-cppIf s str = r
+cppIf s str = pushAccepting' s (toBool v)
   where
     expanded = expand s str
-    -- toks0 = cppLex expanded
-    -- r = error (show toks0)
     v = case Parser.parseExpr expanded of
         Left err -> error $ show err
         Right tree -> eval tree
-    --    We evaluate to an Int, which we convert to a bool
-    r = s{pp_accepting = toBool v}
+
+cppElse :: PpState -> PpState
+cppElse s = setAccepting' s (not $ getAccepting' s)
 
 -- ---------------------------------------------------------------------
 
@@ -96,7 +89,7 @@ expandOne s tok = r
     r =
         fromMaybe
             (t_str tok)
-            (Map.lookup (MacroName (t_str tok) Nothing) (pp_defines s))
+            (ppDefinition' s (MacroName (t_str tok) Nothing))
 
 -- ---------------------------------------------------------------------
 


=====================================
compiler/GHC/Parser/PreProcess/ParsePP.hs
=====================================
@@ -1,11 +1,10 @@
 module GHC.Parser.PreProcess.ParsePP (
-    cppLex,
     parseDirective,
+    cppLex,
 ) where
 
-import Data.List
+import Data.List (intercalate)
 import GHC.Parser.Errors.Ppr ()
-
 import GHC.Parser.PreProcess.Lexer
 import GHC.Parser.PreProcess.ParserM (Token (..), init_state)
 import GHC.Parser.PreProcess.Types
@@ -15,7 +14,7 @@ import GHC.Prelude
 -- First parse to CPP tokens, using a C++-like language spec
 -- https://gcc.gnu.org/onlinedocs/cpp/Tokenization.html
 
--- Parse a CPP directive, using tokens from the CPP lexer
+-- | Parse a CPP directive, using tokens from the CPP lexer
 parseDirective :: String -> Either String CppDirective
 parseDirective s =
     case cppLex s of
@@ -29,23 +28,30 @@ parseDirective s =
                 ("#" : "ifdef" : ts) -> Right $ cppIfdef ts
                 ("#" : "else" : ts) -> Right $ cppElse ts
                 ("#" : "endif" : ts) -> Right $ cppEndif ts
-                other -> Left ("unexpected directive: " ++ (intercalate " " other))
+                other -> Left ("unexpected directive: " ++ (combineToks other))
+
+{- | Comply with the CPP requirement to not combine adjacent tokens.
+This will automatically insert a space in place of a comment, as
+comments cannot occur within a token.
+-}
+combineToks :: [String] -> String
+combineToks ss = intercalate " " ss
 
 cppDefine :: [String] -> Either String CppDirective
 cppDefine [] = Left "error:empty #define directive"
-cppDefine (n : ts) = Right $ CppDefine n (intercalate " " ts)
+cppDefine (n : ts) = Right $ CppDefine n (combineToks ts)
 
 cppInclude :: [String] -> CppDirective
-cppInclude ts = CppInclude (intercalate " " ts)
+cppInclude ts = CppInclude (combineToks ts)
 
 cppIf :: [String] -> CppDirective
-cppIf ts = CppIf (intercalate " " ts)
+cppIf ts = CppIf (combineToks ts)
 
 cppIfdef :: [String] -> CppDirective
-cppIfdef ts = CppIfdef (intercalate " " ts)
+cppIfdef ts = CppIfdef (combineToks ts)
 
 cppIfndef :: [String] -> CppDirective
-cppIfndef ts = CppIfndef (intercalate " " ts)
+cppIfndef ts = CppIfndef (combineToks ts)
 
 cppElse :: [String] -> CppDirective
 cppElse _ts = CppElse


=====================================
compiler/GHC/Parser/PreProcess/Types.hs
=====================================
@@ -1,34 +1,45 @@
 module GHC.Parser.PreProcess.Types where
 
+import Data.List.NonEmpty (NonEmpty (..), (<|))
+import Data.List.NonEmpty qualified as NonEmpty
+import Data.Map (Map)
+import Data.Map qualified as Map
 import GHC.Data.StringBuffer
-import GHC.Parser.Lexer (Token (..))
-import qualified GHC.Parser.Lexer as Lexer
+import GHC.Parser.Lexer (P (..), PState (..), ParseResult (..), Token (..))
+import GHC.Parser.Lexer qualified as Lexer
+import GHC.Prelude
 import GHC.Types.SrcLoc
 
-import Data.Map (Map)
-import qualified Data.Map as Map
-import GHC.Prelude
+-- ---------------------------------------------------------------------
+
+type PP = P PpState
+
+data CppState
+    = CppIgnoring
+    | CppNormal
+    deriving (Show)
 
 -- ---------------------------------------------------------------------
 
 initPpState :: PpState
 initPpState =
     PpState
-        { pp_defines = Map.empty
-        , pp_includes = Map.empty
+        { pp_includes = Map.empty
         , pp_include_stack = []
         , pp_continuation = []
-        , pp_context = []
-        , pp_accepting = True
+        , pp_scope = (PpScope Map.empty True) :| []
         }
 
 data PpState = PpState
-    { pp_defines :: !(Map MacroName MacroDef)
-    , pp_includes :: !(Map String StringBuffer)
+    { pp_includes :: !(Map String StringBuffer)
     , pp_include_stack :: ![Lexer.AlexInput]
     , pp_continuation :: ![Located Token]
-    , pp_context :: ![Token]
-    -- ^ What preprocessor directive we are currently processing
+    , pp_scope :: !(NonEmpty PpScope)
+    }
+    deriving (Show)
+
+data PpScope = PpScope
+    { pp_defines :: !(Map MacroName MacroDef)
     , pp_accepting :: !Bool
     }
     deriving (Show)
@@ -83,3 +94,144 @@ data CompOp
     deriving (Show, Eq)
 
 -- ---------------------------------------------------------------------
+-- Preprocessor state functions
+
+getCppState :: PP CppState
+getCppState = do
+    accepting <- getAccepting
+    if accepting
+        then return CppNormal
+        else return CppIgnoring
+
+-- pp_scope stack start -----------------
+
+pushScope :: PpScope -> PP ()
+pushScope new =
+    P $ \s -> POk s{pp = (pp s){pp_scope = new <| (pp_scope (pp s))}} ()
+
+pushScope' :: PpState -> PpScope -> PpState
+pushScope' s new = s{pp_scope = new <| (pp_scope s)}
+
+popScope :: PP ()
+popScope =
+    P $ \s ->
+        let
+            new_scope = case pp_scope (pp s) of
+                c :| [] -> c :| []
+                _ :| (h : t) -> h :| t
+         in
+            POk s{pp = (pp s){pp_scope = new_scope}} ()
+
+popScope' :: PpState -> PpState
+popScope' s =
+    let
+        new_scope = case pp_scope s of
+            c :| [] -> c :| []
+            _ :| (h : t) -> h :| t
+     in
+        s{pp_scope = new_scope}
+
+getScope :: PP PpScope
+getScope =
+    P $ \s -> POk s (getScope' (pp s))
+
+getScope' :: PpState -> PpScope
+getScope' s = NonEmpty.head $ pp_scope s
+
+setScope :: PpScope -> PP ()
+setScope scope =
+    P $ \s ->
+        let
+            new_scope = case pp_scope (pp s) of
+                _ :| rest -> scope :| rest
+         in
+            POk s{pp = (pp s){pp_scope = new_scope}} ()
+
+setScope' :: PpState -> PpScope -> PpState
+setScope' s scope =
+    let
+        new_scope = case pp_scope s of
+            _ :| rest -> scope :| rest
+     in
+        s{pp_scope = new_scope}
+
+setAccepting :: Bool -> PP ()
+setAccepting on = do
+    scope <- getScope
+    setScope (scope{pp_accepting = on})
+
+pushAccepting' :: PpState -> Bool -> PpState
+pushAccepting' s on = pushScope' s (PpScope Map.empty on)
+
+setAccepting' :: PpState -> Bool -> PpState
+setAccepting' s on = setScope' s (scope{pp_accepting = on})
+  where
+    scope = getScope' s
+
+getAccepting :: PP Bool
+getAccepting = P $ \s -> POk s (pp_accepting (NonEmpty.head $ pp_scope (pp s)))
+
+getAccepting' :: PpState -> Bool
+getAccepting' s = pp_accepting (NonEmpty.head $ pp_scope s)
+
+addDefine :: MacroName -> MacroDef -> PP ()
+addDefine name def = do
+    scope <- getScope
+    setScope (scope{pp_defines = Map.insert name def (pp_defines scope)})
+
+addDefine' :: PpState -> MacroName -> MacroDef -> PpState
+addDefine' s name def = r
+  where
+    scope = getScope' s
+    r = setScope' s (scope{pp_defines = Map.insert name def (pp_defines scope)})
+
+ppDefine :: MacroName -> MacroDef -> PP ()
+ppDefine name val = addDefine name val
+
+ppIsDefined :: MacroName -> PP Bool
+ppIsDefined name = do
+    -- Look up the chain of scopes, until we find one that works, or end
+    let
+        lookup [] = False
+        lookup (h : t) =
+            if Map.member name (pp_defines h)
+                then True
+                else lookup t
+    pp <- getPpState
+    let scopes = NonEmpty.toList (pp_scope pp)
+    return $ lookup scopes
+
+ppIsDefined' :: PpState -> MacroName -> Bool
+ppIsDefined' s name = lookup scopes
+  where
+    -- Look up the chain of scopes, until we find one that works, or end
+    lookup [] = False
+    lookup (h : t) =
+        if Map.member name (pp_defines h)
+            then True
+            else lookup t
+    scopes = NonEmpty.toList (pp_scope s)
+
+ppDefinition' :: PpState -> MacroName -> Maybe MacroDef
+ppDefinition' s name = lookup scopes
+  where
+    -- Look up the chain of scopes, until we find one that works, or end
+    lookup [] = Nothing
+    lookup (h : t) =
+        if Map.member name (pp_defines h)
+            then Map.lookup name (pp_defines h)
+            else lookup t
+    scopes = NonEmpty.toList (pp_scope s)
+
+getPpState :: PP PpState
+getPpState = P $ \s -> POk s (pp s)
+
+-- -------------------------------------
+
+pushContinuation :: Located Token -> PP ()
+pushContinuation new =
+    P $ \s -> POk s{pp = (pp s){pp_continuation = new : pp_continuation (pp s)}} ()
+
+popContinuation :: PP [Located Token]
+popContinuation =
+    P $ \s -> POk s{pp = (pp s){pp_continuation = []}} (pp_continuation (pp s))


=====================================
utils/check-cpp/ParsePP.hs
=====================================
@@ -23,30 +23,40 @@ import Lexer
 -- Parse a CPP directive, using tokens from the CPP lexer
 parseDirective :: String -> Either String CppDirective
 parseDirective s =
-  case cppLex s of
-    Left e -> Left e
-    Right toks ->
-      case map t_str toks of
-        ("#":"define":ts) -> cppDefine ts
-        ("#":"include":ts) -> Right $ cppInclude ts
-        ("#":"if":ts) -> Right $ cppIf ts
-        ("#":"ifndef":ts) -> Right $ cppIfndef ts
-        ("#":"ifdef":ts) -> Right $ cppIfdef ts
-        ("#":"else":ts) -> Right $ cppElse ts
-        ("#":"endif":ts) -> Right $ cppEndif ts
-        other -> Left ("unexpected directive: " ++ (intercalate " " other))
-
-
+    case cppLex s of
+        Left e -> Left e
+        Right toks ->
+            case map t_str toks of
+                ("#" : "define" : ts) -> cppDefine ts
+                ("#" : "include" : ts) -> Right $ cppInclude ts
+                ("#" : "if" : ts) -> Right $ cppIf ts
+                ("#" : "ifndef" : ts) -> Right $ cppIfndef ts
+                ("#" : "ifdef" : ts) -> Right $ cppIfdef ts
+                ("#" : "else" : ts) -> Right $ cppElse ts
+                ("#" : "endif" : ts) -> Right $ cppEndif ts
+                other -> Left ("unexpected directive: " ++ (intercalate " " other))
+
+cppDefine :: [String] -> Either String CppDirective
 cppDefine [] = Left "error:empty #define directive"
-cppDefine (n:ts) = Right $ CppDefine n (intercalate " " ts)
+cppDefine (n : ts) = Right $ CppDefine n (intercalate " " ts)
 
+cppInclude :: [String] -> CppDirective
 cppInclude ts = CppInclude (intercalate " " ts)
+
+cppIf :: [String] -> CppDirective
 cppIf ts = CppIf (intercalate " " ts)
+
+cppIfdef :: [String] -> CppDirective
 cppIfdef ts = CppIfdef (intercalate " " ts)
+
+cppIfndef :: [String] -> CppDirective
 cppIfndef ts = CppIfndef (intercalate " " ts)
+
+cppElse :: [String] -> CppDirective
 cppElse _ts = CppElse
-cppEndif _ts = CppEndif
 
+cppEndif :: [String] -> CppDirective
+cppEndif _ts = CppEndif
 
 -- ---------------------------------------------------------------------
 
@@ -59,6 +69,7 @@ cppLex s = case lexCppTokenStream s init_state of
 
 doATest :: String -> Either String CppDirective
 doATest str = parseDirective str
+
 -- doATest str = parseDirectiveOld str
 
 t0 :: Either String CppDirective


=====================================
utils/check-cpp/Parser.hs deleted
=====================================
@@ -1,881 +0,0 @@
-{-# OPTIONS_GHC -w #-}
-{-# LANGUAGE CPP #-}
-{-# LANGUAGE MagicHash #-}
-{-# LANGUAGE BangPatterns #-}
-{-# LANGUAGE TypeSynonymInstances #-}
-{-# LANGUAGE FlexibleInstances #-}
-{-# LANGUAGE PatternGuards #-}
-{-# LANGUAGE NoStrictData #-}
-{-# LANGUAGE UnboxedTuples #-}
-{-# LANGUAGE PartialTypeSignatures #-}
-#if __GLASGOW_HASKELL__ >= 710
-{-# LANGUAGE PartialTypeSignatures #-}
-#endif
-module Parser (parseExpr) where
-
-import Lexer (lex_tok)
-import ParserM (Token(..), ParserM, run_parser, get_pos, show_pos,
-                happyError)
-import Types
--- Needed when invoking happy -ad
-import qualified GHC.Internal.Data.Tuple as Happy_Prelude
-import qualified Data.Function as Happy_Prelude
-import qualified Data.Bool as Happy_Prelude
-import qualified Data.Function as Happy_Prelude
-import qualified Data.Maybe as Happy_Prelude
-import qualified Data.Int as Happy_Prelude
-import qualified Data.String as Happy_Prelude
-import qualified Data.List as Happy_Prelude
-import qualified Control.Monad as Happy_Prelude
-import qualified Text.Show as Happy_Prelude
-import qualified GHC.Num as Happy_Prelude
-import qualified GHC.Err as Happy_Prelude
-import qualified Data.Array as Happy_Data_Array
-import qualified Data.Bits as Bits
-import qualified GHC.Exts as Happy_GHC_Exts
-import qualified System.IO as Happy_System_IO
-import qualified System.IO.Unsafe as Happy_System_IO_Unsafe
-import qualified Debug.Trace as Happy_Debug_Trace
-import Control.Applicative(Applicative(..))
-import Control.Monad (ap)
-
--- parser produced by Happy Version 2.1.4
-
-data HappyAbsSyn t7
-        = HappyTerminal (Token)
-        | HappyErrorToken Happy_Prelude.Int
-        | HappyAbsSyn5 (Expr)
-        | HappyAbsSyn7 t7
-
-{-# NOINLINE happyTokenStrings #-}
-happyTokenStrings = ["'{'","'}'","'['","']'","'#'","'##'","'('","')'","'<:'","':>'","'<%'","'%>'","'%:'","'%:%:'","';'","':'","'...'","'new'","'delete'","'?'","'::'","'.'","'.*'","'+'","'-'","'*'","'/'","'%'","'^'","'&'","'|'","'~'","'!'","'='","'<'","'>'","'+='","'-='","'*='","'/='","'%='","'^='","'&='","'|='","'<<'","'>>'","'>>='","'<<='","'=='","'!='","'<='","'>='","'&&'","'||'","'++'","'--'","','","'->*'","'->'","'and'","'and_eq'","'bitand'","'bitor'","'compl'","'not'","'not_eq'","'or'","'or_eq'","'xor'","'xor_eq'","lower_name","upper_name","integer","string","other","%eof"]
-
-happyActOffsets :: HappyAddr
-happyActOffsets = HappyA# "\xf9\xff\xff\xff\xe2\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe0\xff\xff\xff\xf9\xff\xff\xff\x00\x00\x00\x00\xfa\xff\xff\xff\xf9\xff\xff\xff\xf9\xff\xff\xff\xf9\xff\xff\xff\xf9\xff\xff\xff\xf9\xff\xff\xff\xf9\xff\xff\xff\xf9\xff\xff\xff\xe6\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xec\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"#
-
-happyGotoOffsets :: HappyAddr
-happyGotoOffsets = HappyA# "\x06\x00\x00\x00\x49\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x22\x00\x00\x00\x27\x00\x00\x00\x37\x00\x00\x00\x3a\x00\x00\x00\x3d\x00\x00\x00\x44\x00\x00\x00\x47\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"#
-
-happyDefActions :: HappyAddr
-happyDefActions = HappyA# "\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\xf4\xff\xff\xff\xf3\xff\xff\xff\xf2\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xfd\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfb\xff\xff\xff\xfa\xff\xff\xff\xf7\xff\xff\xff\xf5\xff\xff\xff\xf9\xff\xff\xff\xf8\xff\xff\xff\xf6\xff\xff\xff\xfc\xff\xff\xff"#
-
-happyCheck :: HappyAddr
-happyCheck = HappyA# "\xff\xff\xff\xff\x08\x00\x00\x00\xff\xff\xff\xff\x09\x00\x00\x00\x24\x00\x00\x00\x25\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\xff\xff\xff\xff\x24\x00\x00\x00\x25\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\xff\xff\xff\xff\x24\x00\x00\x00\x25\x00\x00\x00\x32\x00\x00\x00\xff\xff\xff\xff\x34\x00\x00\x00\x35\x00\x00\x00\x36\x00\x00\x00\x37\x00\x00\x00\x32\x00\x00\x00\xff\xff\xff\xff\x34\x00\x00\x00\x35\x00\x00\x00\x36\x00\x00\x00\xff\xff\xff\xff\x24\x00\x00\x00\x25\x00\x00\x00\x34\x00\x00\x00\x35\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x24\x00\x00\x00\x25\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x48\x00\x00\x00\x49\x00\x00\x00\x32\x00\x00\x00\x4d\x00\x00\x00\x34\x00\x00\x00\x35\x00\x00\x00\x36\x00\x00\x00\x37\x00\x00\x00\xff\xff\xff\xff\x32\x00\x00\x00\xff\xff\xff\xff\x34\x00\x00\x00\x35\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\xff\xff\xff\xff\x48\x00\x00\x00\x49\x00\x00\x00\x4a\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"#
-
-happyTable :: HappyAddr
-happyTable = HappyA# "\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x19\x00\x00\x00\x0b\x00\x00\x00\x0c\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x0c\x00\x00\x00\x09\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x0c\x00\x00\x00\x0d\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00\x0f\x00\x00\x00\x10\x00\x00\x00\x11\x00\x00\x00\x0d\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00\x0f\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x0c\x00\x00\x00\x0e\x00\x00\x00\x0f\x00\x00\x00\x17\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x0b\x00\x00\x00\x0c\x00\x00\x00\x16\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x05\x00\x00\x00\x06\x00\x00\x00\x0d\x00\x00\x00\xff\xff\xff\xff\x0e\x00\x00\x00\x0f\x00\x00\x00\x10\x00\x00\x00\x11\x00\x00\x00\x00\x00\x00\x00\x0d\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00\x0f\x00\x00\x00\x15\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x14\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x13\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x06\x00\x00\x00\x09\x00\x00\x00\x12\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x11\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"#
-
-happyReduceArr = Happy_Data_Array.array (1, 13) [
-        (1 , happyReduce_1),
-        (2 , happyReduce_2),
-        (3 , happyReduce_3),
-        (4 , happyReduce_4),
-        (5 , happyReduce_5),
-        (6 , happyReduce_6),
-        (7 , happyReduce_7),
-        (8 , happyReduce_8),
-        (9 , happyReduce_9),
-        (10 , happyReduce_10),
-        (11 , happyReduce_11),
-        (12 , happyReduce_12),
-        (13 , happyReduce_13)
-        ]
-
-happyRuleArr :: HappyAddr
-happyRuleArr = HappyA# "\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00"#
-
-happyCatchStates :: [Happy_Prelude.Int]
-happyCatchStates = []
-
-happy_n_terms = 78 :: Happy_Prelude.Int
-happy_n_nonterms = 3 :: Happy_Prelude.Int
-
-happy_n_starts = 1 :: Happy_Prelude.Int
-
-#if __GLASGOW_HASKELL__ >= 710
-happyReduce_1 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn _) -> ParserM (HappyAbsSyn _)
-#endif
-happyReduce_1 = happySpecReduce_1  0# happyReduction_1
-happyReduction_1 (HappyAbsSyn5  happy_var_1)
-         =  HappyAbsSyn5
-                 (happy_var_1
-        )
-happyReduction_1 _  = notHappyAtAll 
-
-#if __GLASGOW_HASKELL__ >= 710
-happyReduce_2 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn _) -> ParserM (HappyAbsSyn _)
-#endif
-happyReduce_2 = happySpecReduce_1  0# happyReduction_2
-happyReduction_2 (HappyTerminal happy_var_1)
-         =  HappyAbsSyn5
-                 (IntVal (read $ t_str happy_var_1)
-        )
-happyReduction_2 _  = notHappyAtAll 
-
-#if __GLASGOW_HASKELL__ >= 710
-happyReduce_3 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn _) -> ParserM (HappyAbsSyn _)
-#endif
-happyReduce_3 = happySpecReduce_3  0# happyReduction_3
-happyReduction_3 _
-        (HappyAbsSyn5  happy_var_2)
-        _
-         =  HappyAbsSyn5
-                 (happy_var_2
-        )
-happyReduction_3 _ _ _  = notHappyAtAll 
-
-#if __GLASGOW_HASKELL__ >= 710
-happyReduce_4 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn _) -> ParserM (HappyAbsSyn _)
-#endif
-happyReduce_4 = happySpecReduce_3  0# happyReduction_4
-happyReduction_4 (HappyAbsSyn5  happy_var_3)
-        _
-        (HappyAbsSyn5  happy_var_1)
-         =  HappyAbsSyn5
-                 (Logic LogicalOr happy_var_1 happy_var_3
-        )
-happyReduction_4 _ _ _  = notHappyAtAll 
-
-#if __GLASGOW_HASKELL__ >= 710
-happyReduce_5 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn _) -> ParserM (HappyAbsSyn _)
-#endif
-happyReduce_5 = happySpecReduce_3  0# happyReduction_5
-happyReduction_5 (HappyAbsSyn5  happy_var_3)
-        _
-        (HappyAbsSyn5  happy_var_1)
-         =  HappyAbsSyn5
-                 (Logic LogicalAnd happy_var_1 happy_var_3
-        )
-happyReduction_5 _ _ _  = notHappyAtAll 
-
-#if __GLASGOW_HASKELL__ >= 710
-happyReduce_6 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn _) -> ParserM (HappyAbsSyn _)
-#endif
-happyReduce_6 = happySpecReduce_3  0# happyReduction_6
-happyReduction_6 (HappyAbsSyn5  happy_var_3)
-        _
-        (HappyAbsSyn5  happy_var_1)
-         =  HappyAbsSyn5
-                 (Comp CmpEqual happy_var_1 happy_var_3
-        )
-happyReduction_6 _ _ _  = notHappyAtAll 
-
-#if __GLASGOW_HASKELL__ >= 710
-happyReduce_7 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn _) -> ParserM (HappyAbsSyn _)
-#endif
-happyReduce_7 = happySpecReduce_3  0# happyReduction_7
-happyReduction_7 (HappyAbsSyn5  happy_var_3)
-        _
-        (HappyAbsSyn5  happy_var_1)
-         =  HappyAbsSyn5
-                 (Comp CmpGt happy_var_1 happy_var_3
-        )
-happyReduction_7 _ _ _  = notHappyAtAll 
-
-#if __GLASGOW_HASKELL__ >= 710
-happyReduce_8 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn _) -> ParserM (HappyAbsSyn _)
-#endif
-happyReduce_8 = happySpecReduce_3  0# happyReduction_8
-happyReduction_8 (HappyAbsSyn5  happy_var_3)
-        _
-        (HappyAbsSyn5  happy_var_1)
-         =  HappyAbsSyn5
-                 (Comp CmpGtE happy_var_1 happy_var_3
-        )
-happyReduction_8 _ _ _  = notHappyAtAll 
-
-#if __GLASGOW_HASKELL__ >= 710
-happyReduce_9 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn _) -> ParserM (HappyAbsSyn _)
-#endif
-happyReduce_9 = happySpecReduce_3  0# happyReduction_9
-happyReduction_9 (HappyAbsSyn5  happy_var_3)
-        _
-        (HappyAbsSyn5  happy_var_1)
-         =  HappyAbsSyn5
-                 (Comp CmpLt happy_var_1 happy_var_3
-        )
-happyReduction_9 _ _ _  = notHappyAtAll 
-
-#if __GLASGOW_HASKELL__ >= 710
-happyReduce_10 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn _) -> ParserM (HappyAbsSyn _)
-#endif
-happyReduce_10 = happySpecReduce_3  0# happyReduction_10
-happyReduction_10 (HappyAbsSyn5  happy_var_3)
-        _
-        (HappyAbsSyn5  happy_var_1)
-         =  HappyAbsSyn5
-                 (Comp CmpLtE happy_var_1 happy_var_3
-        )
-happyReduction_10 _ _ _  = notHappyAtAll 
-
-#if __GLASGOW_HASKELL__ >= 710
-happyReduce_11 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn _) -> ParserM (HappyAbsSyn _)
-#endif
-happyReduce_11 = happySpecReduce_1  1# happyReduction_11
-happyReduction_11 (HappyAbsSyn7  happy_var_1)
-         =  HappyAbsSyn5
-                 (Var happy_var_1
-        )
-happyReduction_11 _  = notHappyAtAll 
-
-#if __GLASGOW_HASKELL__ >= 710
-happyReduce_12 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn _) -> ParserM (HappyAbsSyn _)
-#endif
-happyReduce_12 = happySpecReduce_1  2# happyReduction_12
-happyReduction_12 (HappyTerminal happy_var_1)
-         =  HappyAbsSyn7
-                 (t_str happy_var_1
-        )
-happyReduction_12 _  = notHappyAtAll 
-
-#if __GLASGOW_HASKELL__ >= 710
-happyReduce_13 :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn _) -> ParserM (HappyAbsSyn _)
-#endif
-happyReduce_13 = happySpecReduce_1  2# happyReduction_13
-happyReduction_13 (HappyTerminal happy_var_1)
-         =  HappyAbsSyn7
-                 (t_str happy_var_1
-        )
-happyReduction_13 _  = notHappyAtAll 
-
-happyTerminalToTok term = case term of {
-        TEOF "" -> 77#;
-        TOpenBrace {} -> 2#;
-        TCloseBrace {} -> 3#;
-        TOpenBracket {} -> 4#;
-        TCloseBracket {} -> 5#;
-        THash {} -> 6#;
-        THashHash {} -> 7#;
-        TOpenParen {} -> 8#;
-        TCloseParen {} -> 9#;
-        TLtColon {} -> 10#;
-        TColonGt{} -> 11#;
-        TLtPercent {} -> 12#;
-        TPercentGt {} -> 13#;
-        TPercentColon {} -> 14#;
-        TPercentColonTwice {} -> 15#;
-        TSemi {} -> 16#;
-        TColon {} -> 17#;
-        TDotDotDot {} -> 18#;
-        TNew {} -> 19#;
-        TDelete {} -> 20#;
-        TQuestion {} -> 21#;
-        TColonColon{} -> 22#;
-        TDot {} -> 23#;
-        TDotStar {} -> 24#;
-        TPlus {} -> 25#;
-        TMinus {} -> 26#;
-        TStar {} -> 27#;
-        TSlash {} -> 28#;
-        TPercent {} -> 29#;
-        TUpArrow {} -> 30#;
-        TAmpersand {} -> 31#;
-        TPipe {} -> 32#;
-        TTilde {} -> 33#;
-        TExclamation {} -> 34#;
-        TEqual {} -> 35#;
-        TOpenAngle {} -> 36#;
-        TCloseAngle {} -> 37#;
-        TPlusEqual {} -> 38#;
-        TMinusEqual {} -> 39#;
-        TStarEqual {} -> 40#;
-        TSlashEqual {} -> 41#;
-        TPercentEqual {} -> 42#;
-        TUpEqual {} -> 43#;
-        TAmpersandEqual {} -> 44#;
-        TPipeEqual {} -> 45#;
-        TLtLt {} -> 46#;
-        TGtGt {} -> 47#;
-        TGtGtEqual {} -> 48#;
-        TLtLtEqual {} -> 49#;
-        TEqualEqual {} -> 50#;
-        TExclaimEqual {} -> 51#;
-        TLtEqual {} -> 52#;
-        TGtEqual {} -> 53#;
-        TAmpersandTwice {} -> 54#;
-        TPipePipe {} -> 55#;
-        TPlusPlus {} -> 56#;
-        TMinusMinus {} -> 57#;
-        TComma {} -> 58#;
-        TMinusGtStar {} -> 59#;
-        TMinusGt {} -> 60#;
-        TAnd {} -> 61#;
-        TAndEq {} -> 62#;
-        TBitand {} -> 63#;
-        TBitor {} -> 64#;
-        TCompl {} -> 65#;
-        TNot {} -> 66#;
-        TNotEq {} -> 67#;
-        TOr {} -> 68#;
-        TOrEq {} -> 69#;
-        TXor {} -> 70#;
-        TXorEq {} -> 71#;
-        TLowerName {} -> 72#;
-        TUpperName {} -> 73#;
-        TInteger {} -> 74#;
-        TString {} -> 75#;
-        TOther {} -> 76#;
-        _ -> -1#;
-        }
-{-# NOINLINE happyTerminalToTok #-}
-
-happyLex kend kmore = lex_tok (\tk -> case tk of {
-        TEOF "" -> kend tk;
-        _ -> kmore (happyTerminalToTok tk) tk })
-{-# INLINE happyLex #-}
-
-happyNewToken action sts stk = happyLex (\tk -> happyDoAction 77# tk action sts stk) (\i tk -> happyDoAction i tk action sts stk)
-
-happyReport 77# = happyReport'
-happyReport _ = happyReport'
-
-
-happyThen :: () => (ParserM a) -> (a -> (ParserM b)) -> (ParserM b)
-happyThen = (Happy_Prelude.>>=)
-happyReturn :: () => a -> (ParserM a)
-happyReturn = (Happy_Prelude.return)
-#if __GLASGOW_HASKELL__ >= 710
-happyParse :: () => Happy_GHC_Exts.Int# -> ParserM (HappyAbsSyn _)
-
-happyNewToken :: () => Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn _) -> (ParserM (HappyAbsSyn _))
-
-happyDoAction :: () => Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn _) -> (ParserM (HappyAbsSyn _))
-
-happyReduceArr :: () => Happy_Data_Array.Array Happy_Prelude.Int (Happy_GHC_Exts.Int# -> Token -> Happy_GHC_Exts.Int# -> Happy_IntList -> HappyStk (HappyAbsSyn _) -> (ParserM (HappyAbsSyn _)))
-
-#endif
-happyThen1 :: () => ParserM a -> (a -> ParserM b) -> ParserM b
-happyThen1 = happyThen
-happyFmap1 f m = happyThen m (\a -> happyReturn (f a))
-happyReturn1 :: () => a -> (ParserM a)
-happyReturn1 = happyReturn
-happyReport' :: () => (Token) -> [Happy_Prelude.String] -> (ParserM a) -> (ParserM a)
-happyReport' = (\tokens expected resume -> happyError)
-
-happyAbort :: () => (ParserM a)
-happyAbort = Happy_Prelude.error "Called abort handler in non-resumptive parser"
-
-expr = happySomeParser where
- happySomeParser = happyThen (happyParse 0#) (\x -> case x of {HappyAbsSyn5 z -> happyReturn z; _other -> notHappyAtAll })
-
-happySeq = happyDontSeq
-
-
--- parseExpr :: String -> Either String Expr
-parseExpr = run_parser expr
-#define HAPPY_DEBUG 1
--- $Id: GenericTemplate.hs,v 1.26 2005/01/14 14:47:22 simonmar Exp $
-
-#if !defined(__GLASGOW_HASKELL__)
-#  error This code isn't being built with GHC.
-#endif
-
--- Get WORDS_BIGENDIAN (if defined)
-#include "MachDeps.h"
-
--- Do not remove this comment. Required to fix CPP parsing when using GCC and a clang-compiled alex.
-#define LT(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.<# m)) :: Happy_Prelude.Bool)
-#define GTE(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.>=# m)) :: Happy_Prelude.Bool)
-#define EQ(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.==# m)) :: Happy_Prelude.Bool)
-#define PLUS(n,m) (n Happy_GHC_Exts.+# m)
-#define MINUS(n,m) (n Happy_GHC_Exts.-# m)
-#define TIMES(n,m) (n Happy_GHC_Exts.*# m)
-#define NEGATE(n) (Happy_GHC_Exts.negateInt# (n))
-
-type Happy_Int = Happy_GHC_Exts.Int#
-data Happy_IntList = HappyCons Happy_Int Happy_IntList
-
-#define INVALID_TOK -1#
-#define ERROR_TOK 0#
-#define CATCH_TOK 1#
-
-#if defined(HAPPY_COERCE)
-#  define GET_ERROR_TOKEN(x)  (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# i) -> i })
-#  define MK_ERROR_TOKEN(i)   (Happy_GHC_Exts.unsafeCoerce# (Happy_GHC_Exts.I# i))
-#  define MK_TOKEN(x)         (happyInTok (x))
-#else
-#  define GET_ERROR_TOKEN(x)  (case x of { HappyErrorToken (Happy_GHC_Exts.I# i) -> i })
-#  define MK_ERROR_TOKEN(i)   (HappyErrorToken (Happy_GHC_Exts.I# i))
-#  define MK_TOKEN(x)         (HappyTerminal (x))
-#endif
-
-#if defined(HAPPY_DEBUG)
-#  define DEBUG_TRACE(s)    (happyTrace (s)) Happy_Prelude.$
-happyTrace string expr = Happy_System_IO_Unsafe.unsafePerformIO Happy_Prelude.$ do
-    Happy_System_IO.hPutStr Happy_System_IO.stderr string
-    Happy_Prelude.return expr
-#else
-#  define DEBUG_TRACE(s)    {- nothing -}
-#endif
-
-infixr 9 `HappyStk`
-data HappyStk a = HappyStk a (HappyStk a)
-
------------------------------------------------------------------------------
--- starting the parse
-
-happyParse start_state = happyNewToken start_state notHappyAtAll notHappyAtAll
-
------------------------------------------------------------------------------
--- Accepting the parse
-
--- If the current token is ERROR_TOK, it means we've just accepted a partial
--- parse (a %partial parser).  We must ignore the saved token on the top of
--- the stack in this case.
-happyAccept ERROR_TOK tk st sts (_ `HappyStk` ans `HappyStk` _) =
-        happyReturn1 ans
-happyAccept j tk st sts (HappyStk ans _) =
-        (happyTcHack j (happyTcHack st)) (happyReturn1 ans)
-
------------------------------------------------------------------------------
--- Arrays only: do the next action
-
-happyDoAction i tk st =
-  DEBUG_TRACE("state: " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# st) Happy_Prelude.++
-              ",\ttoken: " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# i) Happy_Prelude.++
-              ",\taction: ")
-  case happyDecodeAction (happyNextAction i st) of
-    HappyFail             -> DEBUG_TRACE("failing.\n")
-                             happyFail i tk st
-    HappyAccept           -> DEBUG_TRACE("accept.\n")
-                             happyAccept i tk st
-    HappyReduce rule      -> DEBUG_TRACE("reduce (rule " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# rule) Happy_Prelude.++ ")")
-                             (happyReduceArr Happy_Data_Array.! (Happy_GHC_Exts.I# rule)) i tk st
-    HappyShift  new_state -> DEBUG_TRACE("shift, enter state " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# new_state) Happy_Prelude.++ "\n")
-                             happyShift new_state i tk st
-
-{-# INLINE happyNextAction #-}
-happyNextAction i st = case happyIndexActionTable i st of
-  Happy_Prelude.Just (Happy_GHC_Exts.I# act) -> act
-  Happy_Prelude.Nothing                      -> happyIndexOffAddr happyDefActions st
-
-{-# INLINE happyIndexActionTable #-}
-happyIndexActionTable i st
-  | GTE(i, 0#), GTE(off, 0#), EQ(happyIndexOffAddr happyCheck off, i)
-  -- i >= 0:   Guard against INVALID_TOK (do the default action, which ultimately errors)
-  -- off >= 0: Otherwise it's a default action
-  -- equality check: Ensure that the entry in the compressed array is owned by st
-  = Happy_Prelude.Just (Happy_GHC_Exts.I# (happyIndexOffAddr happyTable off))
-  | Happy_Prelude.otherwise
-  = Happy_Prelude.Nothing
-  where
-    off = PLUS(happyIndexOffAddr happyActOffsets st, i)
-
-data HappyAction
-  = HappyFail
-  | HappyAccept
-  | HappyReduce Happy_Int -- rule number
-  | HappyShift Happy_Int  -- new state
-  deriving Happy_Prelude.Show
-
-{-# INLINE happyDecodeAction #-}
-happyDecodeAction :: Happy_Int -> HappyAction
-happyDecodeAction  0#                        = HappyFail
-happyDecodeAction -1#                        = HappyAccept
-happyDecodeAction action | LT(action, 0#)    = HappyReduce NEGATE(PLUS(action, 1#))
-                         | Happy_Prelude.otherwise = HappyShift MINUS(action, 1#)
-
-{-# INLINE happyIndexGotoTable #-}
-happyIndexGotoTable nt st = happyIndexOffAddr happyTable off
-  where
-    off = PLUS(happyIndexOffAddr happyGotoOffsets st, nt)
-
-{-# INLINE happyIndexOffAddr #-}
-happyIndexOffAddr :: HappyAddr -> Happy_Int -> Happy_Int
-happyIndexOffAddr (HappyA# arr) off =
-#if __GLASGOW_HASKELL__ >= 901
-  Happy_GHC_Exts.int32ToInt# -- qualified import because it doesn't exist on older GHC's
-#endif
-#ifdef WORDS_BIGENDIAN
-  -- The CI of `alex` tests this code path
-  (Happy_GHC_Exts.word32ToInt32# (Happy_GHC_Exts.wordToWord32# (Happy_GHC_Exts.byteSwap32# (Happy_GHC_Exts.word32ToWord# (Happy_GHC_Exts.int32ToWord32#
-#endif
-  (Happy_GHC_Exts.indexInt32OffAddr# arr off)
-#ifdef WORDS_BIGENDIAN
-  )))))
-#endif
-
-happyIndexRuleArr :: Happy_Int -> (# Happy_Int, Happy_Int #)
-happyIndexRuleArr r = (# nt, len #)
-  where
-    !(Happy_GHC_Exts.I# n_starts) = happy_n_starts
-    offs = TIMES(MINUS(r,n_starts),2#)
-    nt = happyIndexOffAddr happyRuleArr offs
-    len = happyIndexOffAddr happyRuleArr PLUS(offs,1#)
-
-data HappyAddr = HappyA# Happy_GHC_Exts.Addr#
-
------------------------------------------------------------------------------
--- Shifting a token
-
-happyShift new_state ERROR_TOK tk st sts stk@(x `HappyStk` _) =
-     -- See "Error Fixup" below
-     let i = GET_ERROR_TOKEN(x) in
-     DEBUG_TRACE("shifting the error token")
-     happyDoAction i tk new_state (HappyCons st sts) stk
-
-happyShift new_state i tk st sts stk =
-     happyNewToken new_state (HappyCons st sts) (MK_TOKEN(tk) `HappyStk` stk)
-
--- happyReduce is specialised for the common cases.
-
-happySpecReduce_0 nt fn j tk st sts stk
-     = happySeq fn (happyGoto nt j tk st (HappyCons st sts) (fn `HappyStk` stk))
-
-happySpecReduce_1 nt fn j tk old_st sts@(HappyCons st _) (v1 `HappyStk` stk')
-     = let r = fn v1 in
-       happyTcHack old_st (happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')))
-
-happySpecReduce_2 nt fn j tk old_st
-  (HappyCons _ sts@(HappyCons st _))
-  (v1 `HappyStk` v2 `HappyStk` stk')
-     = let r = fn v1 v2 in
-       happyTcHack old_st (happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')))
-
-happySpecReduce_3 nt fn j tk old_st
-  (HappyCons _ (HappyCons _ sts@(HappyCons st _)))
-  (v1 `HappyStk` v2 `HappyStk` v3 `HappyStk` stk')
-     = let r = fn v1 v2 v3 in
-       happyTcHack old_st (happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')))
-
-happyReduce k nt fn j tk st sts stk
-     = case happyDrop MINUS(k,(1# :: Happy_Int)) sts of
-         sts1@(HappyCons st1 _) ->
-                let r = fn stk in -- it doesn't hurt to always seq here...
-                st `happyTcHack` happyDoSeq r (happyGoto nt j tk st1 sts1 r)
-
-happyMonadReduce k nt fn j tk st sts stk =
-      case happyDrop k (HappyCons st sts) of
-        sts1@(HappyCons st1 _) ->
-          let drop_stk = happyDropStk k stk in
-          j `happyTcHack` happyThen1 (fn stk tk)
-                                     (\r -> happyGoto nt j tk st1 sts1 (r `HappyStk` drop_stk))
-
-happyMonad2Reduce k nt fn j tk st sts stk =
-      case happyDrop k (HappyCons st sts) of
-        sts1@(HappyCons st1 _) ->
-          let drop_stk = happyDropStk k stk
-              off = happyIndexOffAddr happyGotoOffsets st1
-              off_i = PLUS(off, nt)
-              new_state = happyIndexOffAddr happyTable off_i
-          in
-            j `happyTcHack` happyThen1 (fn stk tk)
-                                       (\r -> happyNewToken new_state sts1 (r `HappyStk` drop_stk))
-
-happyDrop 0# l               = l
-happyDrop n  (HappyCons _ t) = happyDrop MINUS(n,(1# :: Happy_Int)) t
-
-happyDropStk 0# l                 = l
-happyDropStk n  (x `HappyStk` xs) = happyDropStk MINUS(n,(1#::Happy_Int)) xs
-
------------------------------------------------------------------------------
--- Moving to a new state after a reduction
-
-happyGoto nt j tk st =
-   DEBUG_TRACE(", goto state " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# new_state) Happy_Prelude.++ "\n")
-   happyDoAction j tk new_state
-  where new_state = happyIndexGotoTable nt st
-
-{- Note [Error recovery]
-~~~~~~~~~~~~~~~~~~~~~~~~
-When there is no applicable action for the current lookahead token `tk`,
-happy enters error recovery mode. Depending on whether the grammar file
-declares the two action form `%error { abort } { report }` for
-    Resumptive Error Handling,
-it works in one (not resumptive) or two phases (resumptive):
-
- 1. Fixup mode:
-    Try to see if there is an action for the error token ERROR_TOK. If there
-    is, do *not* emit an error and pretend instead that an `error` token was
-    inserted.
-    When there is no ERROR_TOK action, report an error.
-
-    In non-resumptive error handling, calling the single error handler
-    (e.g. `happyError`) will throw an exception and abort the parser.
-    However, in resumptive error handling we enter *error resumption mode*.
-
- 2. Error resumption mode:
-    After reporting the error (with `report`), happy will attempt to find
-    a good state stack to resume parsing in.
-    For each candidate stack, it discards input until one of the candidates
-    resumes (i.e. shifts the current input).
-    If no candidate resumes before the end of input, resumption failed and
-    calls the `abort` function, to much the same effect as in non-resumptive
-    error handling.
-
-    Candidate stacks are declared by the grammar author using the special
-    `catch` terminal and called "catch frames".
-    This mechanism is described in detail in Note [happyResume].
-
-The `catch` resumption mechanism (2) is what usually is associated with
-`error` in `bison` or `menhir`. Since `error` is used for the Fixup mechanism
-(1) above, we call the corresponding token `catch`.
-Furthermore, in constrast to `bison`, our implementation of `catch`
-non-deterministically considers multiple catch frames on the stack for
-resumption (See Note [Multiple catch frames]).
-
-Note [happyResume]
-~~~~~~~~~~~~~~~~~~
-`happyResume` implements the resumption mechanism from Note [Error recovery].
-It is best understood by example. Consider
-
-Exp :: { String }
-Exp : '1'                { "1" }
-    | catch              { "catch" }
-    | Exp '+' Exp %shift { $1 Happy_Prelude.++ " + " Happy_Prelude.++ $3 } -- %shift: associate 1 + 1 + 1 to the right
-    | '(' Exp ')'        { "(" Happy_Prelude.++ $2 Happy_Prelude.++ ")" }
-
-The idea of the use of `catch` here is that upon encountering a parse error
-during expression parsing, we can gracefully degrade using the `catch` rule,
-still producing a partial syntax tree and keep on parsing to find further
-syntax errors.
-
-Let's trace the parser state for input 11+1, which will error out after shifting 1.
-After shifting, we have the following item stack (growing downwards and omitting
-transitive closure items):
-
-  State 0: %start_parseExp -> . Exp
-  State 5: Exp -> '1' .
-
-(Stack as a list of state numbers: [5,0].)
-As Note [Error recovery] describes, we will first try Fixup mode.
-That fails because no production can shift the `error` token.
-Next we try Error resumption mode. This works as follows:
-
-  1. Pop off the item stack until we find an item that can shift the `catch`
-     token. (Implemented in `pop_items`.)
-       * State 5 cannot shift catch. Pop.
-       * State 0 can shift catch, which would transition into
-          State 4: Exp -> catch .
-     So record the *stack* `[4,0]` after doing the shift transition.
-     We call this a *catch frame*, where the top is a *catch state*,
-     corresponding to an item in which we just shifted a `catch` token.
-     There can be multiple such catch stacks, see Note [Multiple catch frames].
-
-  2. Discard tokens from the input until the lookahead can be shifted in one
-     of the catch stacks. (Implemented in `discard_input_until_exp` and
-     `some_catch_state_shifts`.)
-       * We cannot shift the current lookahead '1' in state 4, so we discard
-       * We *can* shift the next lookahead '+' in state 4, but only after
-         reducing, which pops State 4 and goes to State 3:
-           State 3: %start_parseExp -> Exp .
-                    Exp -> Exp . '+' Exp
-         Here we can shift '+'.
-     As you can see, to implement this machinery we need to simulate
-     the operation of the LALR automaton, especially reduction
-     (`happySimulateReduce`).
-
-Note [Multiple catch frames]
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-For fewer spurious error messages, it can be beneficial to trace multiple catch
-items. Consider
-
-Exp : '1'
-    | catch
-    | Exp '+' Exp %shift
-    | '(' Exp ')'
-
-Let's trace the parser state for input (;+1, which will error out after shifting (.
-After shifting, we have the following item stack (growing downwards):
-
-  State 0: %start_parseExp -> . Exp
-  State 6: Exp -> '(' . Exp ')'
-
-Upon error, we want to find items in the stack which can shift a catch token.
-Note that both State 0 and State 6 can shift a catch token, transitioning into
-  State 4: Exp -> catch .
-Hence we record the catch frames `[4,6,0]` and `[4,0]` for possible resumption.
-
-Which catch frame do we pick for resumption?
-Note that resuming catch frame `[4,0]` will parse as "catch+1", whereas
-resuming the innermost frame `[4,6,0]` corresponds to parsing "(catch+1".
-The latter would keep discarding input until the closing ')' is found.
-So we will discard + and 1, leading to a spurious syntax error at the end of
-input, aborting the parse and never producing a partial syntax tree. Bad!
-
-It is far preferable to resume with catch frame `[4,0]`, where we can resume
-successfully on input +, so that is what we do.
-
-In general, we pick the catch frame for resumption that discards the least
-amount of input for a successful shift, preferring the topmost such catch frame.
--}
-
--- happyFail :: Happy_Int -> Token -> Happy_Int -> _
--- This function triggers Note [Error recovery].
--- If the current token is ERROR_TOK, phase (1) has failed and we might try
--- phase (2).
-happyFail ERROR_TOK = happyFixupFailed
-happyFail i         = happyTryFixup i
-
--- Enter Error Fixup (see Note [Error recovery]):
--- generate an error token, save the old token and carry on.
--- When a `happyShift` accepts the error token, we will pop off the error token
--- to resume parsing with the current lookahead `i`.
-happyTryFixup i tk action sts stk =
-  DEBUG_TRACE("entering `error` fixup.\n")
-  happyDoAction ERROR_TOK tk action sts (MK_ERROR_TOKEN(i) `HappyStk` stk)
-  -- NB: `happyShift` will simply pop the error token and carry on with
-  --     `tk`. Hence we don't change `tk` in the call here
-
--- See Note [Error recovery], phase (2).
--- Enter resumption mode after reporting the error by calling `happyResume`.
-happyFixupFailed tk st sts (x `HappyStk` stk) =
-  let i = GET_ERROR_TOKEN(x) in
-  DEBUG_TRACE("`error` fixup failed.\n")
-  let resume   = happyResume i tk st sts stk
-      expected = happyExpectedTokens st sts in
-  happyReport i tk expected resume
-
--- happyResume :: Happy_Int -> Token -> Happy_Int -> _
--- See Note [happyResume]
-happyResume i tk st sts stk = pop_items [] st sts stk
-  where
-    !(Happy_GHC_Exts.I# n_starts) = happy_n_starts   -- this is to test whether we have a start token
-    !(Happy_GHC_Exts.I# eof_i) = happy_n_terms Happy_Prelude.- 1   -- this is the token number of the EOF token
-    happy_list_to_list :: Happy_IntList -> [Happy_Prelude.Int]
-    happy_list_to_list (HappyCons st sts)
-      | LT(st, n_starts)
-      = [(Happy_GHC_Exts.I# st)]
-      | Happy_Prelude.otherwise
-      = (Happy_GHC_Exts.I# st) : happy_list_to_list sts
-
-    -- See (1) of Note [happyResume]
-    pop_items catch_frames st sts stk
-      | LT(st, n_starts)
-      = DEBUG_TRACE("reached start state " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# st) Happy_Prelude.++ ", ")
-        if Happy_Prelude.null catch_frames_new
-          then DEBUG_TRACE("no resumption.\n")
-               happyAbort
-          else DEBUG_TRACE("now discard input, trying to anchor in states " Happy_Prelude.++ Happy_Prelude.show (Happy_Prelude.map (happy_list_to_list . Happy_Prelude.fst) (Happy_Prelude.reverse catch_frames_new)) Happy_Prelude.++ ".\n")
-               discard_input_until_exp i tk (Happy_Prelude.reverse catch_frames_new)
-      | (HappyCons st1 sts1) <- sts, _ `HappyStk` stk1 <- stk
-      = pop_items catch_frames_new st1 sts1 stk1
-      where
-        !catch_frames_new
-          | HappyShift new_state <- happyDecodeAction (happyNextAction CATCH_TOK st)
-          , DEBUG_TRACE("can shift catch token in state " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# st) Happy_Prelude.++ ", into state " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# new_state) Happy_Prelude.++ "\n")
-            Happy_Prelude.null (Happy_Prelude.filter (\(HappyCons _ (HappyCons h _),_) -> EQ(st,h)) catch_frames)
-          = (HappyCons new_state (HappyCons st sts), MK_ERROR_TOKEN(i) `HappyStk` stk):catch_frames -- MK_ERROR_TOKEN(i) is just some dummy that should not be accessed by user code
-          | Happy_Prelude.otherwise
-          = DEBUG_TRACE("already shifted or can't shift catch in " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# st) Happy_Prelude.++ "\n")
-            catch_frames
-
-    -- See (2) of Note [happyResume]
-    discard_input_until_exp i tk catch_frames
-      | Happy_Prelude.Just (HappyCons st (HappyCons catch_st sts), catch_frame) <- some_catch_state_shifts i catch_frames
-      = DEBUG_TRACE("found expected token in state " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# st) Happy_Prelude.++ " after shifting from " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# catch_st) Happy_Prelude.++ ": " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# i) Happy_Prelude.++ "\n")
-        happyDoAction i tk st (HappyCons catch_st sts) catch_frame
-      | EQ(i,eof_i) -- is i EOF?
-      = DEBUG_TRACE("reached EOF, cannot resume. abort parse :(\n")
-        happyAbort
-      | Happy_Prelude.otherwise
-      = DEBUG_TRACE("discard token " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# i) Happy_Prelude.++ "\n")
-        happyLex (\eof_tk -> discard_input_until_exp eof_i eof_tk catch_frames) -- eof
-                 (\i tk   -> discard_input_until_exp i tk catch_frames)         -- not eof
-
-    some_catch_state_shifts _ [] = DEBUG_TRACE("no catch state could shift.\n") Happy_Prelude.Nothing
-    some_catch_state_shifts i catch_frames@(((HappyCons st sts),_):_) = try_head i st sts catch_frames
-      where
-        try_head i st sts catch_frames = -- PRECONDITION: head catch_frames = (HappyCons st sts)
-          DEBUG_TRACE("trying token " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# i) Happy_Prelude.++ " in state " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# st) Happy_Prelude.++ ": ")
-          case happyDecodeAction (happyNextAction i st) of
-            HappyFail     -> DEBUG_TRACE("fail.\n")   some_catch_state_shifts i (Happy_Prelude.tail catch_frames)
-            HappyAccept   -> DEBUG_TRACE("accept.\n") Happy_Prelude.Just (Happy_Prelude.head catch_frames)
-            HappyShift _  -> DEBUG_TRACE("shift.\n")  Happy_Prelude.Just (Happy_Prelude.head catch_frames)
-            HappyReduce r -> case happySimulateReduce r st sts of
-              (HappyCons st1 sts1) -> try_head i st1 sts1 catch_frames
-
-happySimulateReduce r st sts =
-  DEBUG_TRACE("simulate reduction of rule " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# r) Happy_Prelude.++ ", ")
-  let (# nt, len #) = happyIndexRuleArr r in
-  DEBUG_TRACE("nt " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# nt) Happy_Prelude.++ ", len: " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# len) Happy_Prelude.++ ", new_st ")
-  let !(sts1@(HappyCons st1 _)) = happyDrop len (HappyCons st sts)
-      new_st = happyIndexGotoTable nt st1 in
-  DEBUG_TRACE(Happy_Prelude.show (Happy_GHC_Exts.I# new_st) Happy_Prelude.++ ".\n")
-  (HappyCons new_st sts1)
-
-happyTokenToString :: Happy_Prelude.Int -> Happy_Prelude.String
-happyTokenToString i = happyTokenStrings Happy_Prelude.!! (i Happy_Prelude.- 2) -- 2: errorTok, catchTok
-
-happyExpectedTokens :: Happy_Int -> Happy_IntList -> [Happy_Prelude.String]
--- Upon a parse error, we want to suggest tokens that are expected in that
--- situation. This function computes such tokens.
--- It works by examining the top of the state stack.
--- For every token number that does a shift transition, record that token number.
--- For every token number that does a reduce transition, simulate that reduction
--- on the state state stack and repeat.
--- The recorded token numbers are then formatted with 'happyTokenToString' and
--- returned.
-happyExpectedTokens st sts =
-  DEBUG_TRACE("constructing expected tokens.\n")
-  Happy_Prelude.map happyTokenToString (search_shifts st sts [])
-  where
-    search_shifts st sts shifts = Happy_Prelude.foldr (add_action st sts) shifts (distinct_actions st)
-    add_action st sts (Happy_GHC_Exts.I# i, Happy_GHC_Exts.I# act) shifts =
-      DEBUG_TRACE("found action in state " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# st) Happy_Prelude.++ ", input " Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# i) Happy_Prelude.++ ", " Happy_Prelude.++ Happy_Prelude.show (happyDecodeAction act) Happy_Prelude.++ "\n")
-      case happyDecodeAction act of
-        HappyFail     -> shifts
-        HappyAccept   -> shifts -- This would always be %eof or error... Not helpful
-        HappyShift _  -> Happy_Prelude.insert (Happy_GHC_Exts.I# i) shifts
-        HappyReduce r -> case happySimulateReduce r st sts of
-          (HappyCons st1 sts1) -> search_shifts st1 sts1 shifts
-    distinct_actions st
-      -- The (token number, action) pairs of all actions in the given state
-      = ((-1), (Happy_GHC_Exts.I# (happyIndexOffAddr happyDefActions st)))
-      : [ (i, act) | i <- [begin_i..happy_n_terms], act <- get_act row_off i ]
-      where
-        row_off = happyIndexOffAddr happyActOffsets st
-        begin_i = 2 -- +2: errorTok,catchTok
-    get_act off (Happy_GHC_Exts.I# i) -- happyIndexActionTable with cached row offset
-      | let off_i = PLUS(off,i)
-      , GTE(off_i,0#)
-      , EQ(happyIndexOffAddr happyCheck off_i,i)
-      = [(Happy_GHC_Exts.I# (happyIndexOffAddr happyTable off_i))]
-      | Happy_Prelude.otherwise
-      = []
-
--- Internal happy errors:
-
-notHappyAtAll :: a
-notHappyAtAll = Happy_Prelude.error "Internal Happy parser panic. This is not supposed to happen! Please open a bug report at https://github.com/haskell/happy/issues.\n"
-
------------------------------------------------------------------------------
--- Hack to get the typechecker to accept our action functions
-
-happyTcHack :: Happy_Int -> a -> a
-happyTcHack x y = y
-{-# INLINE happyTcHack #-}
-
------------------------------------------------------------------------------
--- Seq-ing.  If the --strict flag is given, then Happy emits
---      happySeq = happyDoSeq
--- otherwise it emits
---      happySeq = happyDontSeq
-
-happyDoSeq, happyDontSeq :: a -> b -> b
-happyDoSeq   a b = a `Happy_GHC_Exts.seq` b
-happyDontSeq a b = b
-
------------------------------------------------------------------------------
--- Don't inline any functions from the template.  GHC has a nasty habit
--- of deciding to inline happyGoto everywhere, which increases the size of
--- the generated parser quite a bit.
-
-{-# NOINLINE happyDoAction #-}
-{-# NOINLINE happyTable #-}
-{-# NOINLINE happyCheck #-}
-{-# NOINLINE happyActOffsets #-}
-{-# NOINLINE happyGotoOffsets #-}
-{-# NOINLINE happyDefActions #-}
-
-{-# NOINLINE happyShift #-}
-{-# NOINLINE happySpecReduce_0 #-}
-{-# NOINLINE happySpecReduce_1 #-}
-{-# NOINLINE happySpecReduce_2 #-}
-{-# NOINLINE happySpecReduce_3 #-}
-{-# NOINLINE happyReduce #-}
-{-# NOINLINE happyMonadReduce #-}
-{-# NOINLINE happyGoto #-}
-{-# NOINLINE happyFail #-}
-
--- end of Happy Template.


=====================================
utils/check-cpp/PreProcess.hs
=====================================
@@ -21,29 +21,14 @@ import Types
 
 -- ---------------------------------------------------------------------
 
+-- | Set parser options for parsing OPTIONS pragmas
+initPragState :: Lexer.ParserOpts -> StringBuffer -> RealSrcLoc -> PState PpState
+initPragState = Lexer.initPragState initPpState
+
+-- | Creates a parse state from a 'ParserOpts' value
+initParserState :: Lexer.ParserOpts -> StringBuffer -> RealSrcLoc -> PState PpState
+initParserState = Lexer.initParserState initPpState
 
--- initPpState :: PpState
--- initPpState =
---     PpState
---         { pp_defines = Map.empty
---         , pp_includes = Map.empty
---         , pp_include_stack = []
---         , pp_continuation = []
---         , pp_context = []
---         , pp_accepting = True
---         }
-
--- data PpState = PpState
---     { pp_defines :: !(Map String [String])
---     , pp_includes :: !(Map String StringBuffer)
---     , pp_include_stack :: ![Lexer.AlexInput]
---     , pp_continuation :: ![Located Token]
---     , pp_context :: ![Token] -- What preprocessor directive we are currently processing
---     , pp_accepting :: !Bool
---     }
---     deriving (Show)
-
--- deriving instance Show Lexer.AlexInput
 -- ---------------------------------------------------------------------
 
 data CppState
@@ -53,6 +38,9 @@ data CppState
 
 -- ---------------------------------------------------------------------
 
+lexer = ppLexer
+lexerDbg = ppLexerDbg
+
 ppLexer, ppLexerDbg :: Bool -> (Located Token -> PP a) -> PP a
 -- Use this instead of 'lexer' in GHC.Parser to dump the tokens for debugging.
 ppLexerDbg queueComments cont = ppLexer queueComments contDbg
@@ -119,11 +107,9 @@ processCppToks fs = do
 
 processCpp :: [FastString] -> PP ()
 processCpp fs = do
-    -- traceM $ "processCpp: fs=" ++ show fs
-    -- let s = cppInitial fs
     let s = cppInitial fs
     case parseDirective s of
-        Left err -> error $ show (err,s)
+        Left err -> error $ show (err, s)
         Right (CppInclude filename) -> do
             ppInclude filename
         Right (CppDefine name def) -> do
@@ -146,7 +132,6 @@ processCpp fs = do
             setAccepting True
             return ()
 
-    -- return (trace ("processCpp:s=" ++ show s) ())
     return ()
 
 -- ---------------------------------------------------------------------
@@ -237,44 +222,22 @@ ppInclude filename = do
             pushIncludeLoc origInput
             let loc = PsLoc (mkRealSrcLoc (mkFastString filename) 1 1) (BufPos 0)
             Lexer.setInput (Lexer.AI loc src)
-    return $ trace ("ppInclude:mSrc=[" ++ show mSrc ++ "]") ()
-
--- return $ trace ("ppInclude:filename=[" ++ filename ++ "]") ()
 
 ppDefine :: String -> String -> PP ()
 ppDefine name val = P $ \s ->
-    -- POk s{pp = (pp s){pp_defines = Set.insert (cleanTokenString def) (pp_defines (pp s))}} ()
-    POk s{pp = (pp s){pp_defines = Map.insert (trace ("ppDefine:def=[" ++ name ++ "]") (MacroName name Nothing)) val (pp_defines (pp s))}} ()
+    POk s{pp = (pp s){pp_defines = Map.insert (MacroName name Nothing) val (pp_defines (pp s))}} ()
 
 ppIsDefined :: String -> PP Bool
 ppIsDefined def = P $ \s ->
-    -- POk s (Map.member def (pp_defines (pp s)))
-    POk s (Map.member (trace ("ppIsDefined:def=[" ++ def ++ "]") (MacroName def Nothing)) (pp_defines (pp s)))
+    POk s (Map.member (MacroName def Nothing) (pp_defines (pp s)))
 
 ppIf :: String -> PP Bool
 ppIf str = P $ \s ->
-    -- -- POk s (Map.member def (pp_defines (pp s)))
-    -- POk s (Map.member (trace ("ppIsDefined:def=[" ++ def ++ "]") def) (pp_defines (pp s)))
     let
         s' = cppIf (pp s) str
      in
         POk s{pp = s'} (pp_accepting s')
 
--- | Take a @FastString@ of the form "#define FOO\n" and strip off all but "FOO"
-cleanTokenString :: FastString -> String
-cleanTokenString fs = r
-  where
-    ss = dropWhile (\c -> not $ isSpace c) (unpackFS fs)
-    r = init ss
-
--- parseDefine :: FastString -> Maybe (String, [String])
--- parseDefine fs = r
---   where
---     -- r = Just (cleanTokenString s, "")
---     r = case parseCppParser cppDefinition (unpackFS fs) of
---         Left _ -> Nothing
---         Right v -> Just v
-
 -- =====================================================================
 
 {- | Do cpp initial processing, as per https://gcc.gnu.org/onlinedocs/cpp/Initial-processing.html



View it on GitLab: https://gitlab.haskell.org/ghc/ghc/-/commit/76d8e94361190d4b9dade8a70a9eff50205bb493

-- 
View it on GitLab: https://gitlab.haskell.org/ghc/ghc/-/commit/76d8e94361190d4b9dade8a70a9eff50205bb493
You're receiving this email because of your account on gitlab.haskell.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://mail.haskell.org/pipermail/ghc-commits/attachments/20250204/989c2188/attachment-0001.html>


More information about the ghc-commits mailing list