[Git][ghc/ghc][wip/az/ghc-cpp] 2 commits: Tidy up before re-visiting the continuation mechanic

Alan Zimmerman (@alanz) gitlab at gitlab.haskell.org
Mon Sep 25 22:30:00 UTC 2023



Alan Zimmerman pushed to branch wip/az/ghc-cpp at Glasgow Haskell Compiler / GHC


Commits:
45e3a9ce by Alan Zimmerman at 2023-09-25T19:52:12+01:00
Tidy up before re-visiting the continuation mechanic

- - - - -
8a56ed7c by Alan Zimmerman at 2023-09-25T23:29:22+01:00
Switch preprocessor to continuation passing style

Proof of concept, needs tidying up

- - - - -


3 changed files:

- compiler/GHC/Parser.y
- compiler/GHC/Parser/Lexer.x
- utils/check-cpp/Main.hs


Changes:

=====================================
compiler/GHC/Parser.y
=====================================
@@ -758,8 +758,8 @@ TH_QQUASIQUOTE  { L _ (ITqQuasiQuote _) }
 'defined'       { L _ ITcppDefined }
 
 %monad { P } { >>= } { return }
--- %lexer { (lexer True) } { L _ ITeof }
-%lexer { (lexerDbg True) } { L _ ITeof }
+%lexer { (lexer True) } { L _ ITeof }
+-- %lexer { (lexerDbg True) } { L _ ITeof }
   -- Replace 'lexer' above with 'lexerDbg'
   -- to dump the tokens fed to the parser.
 %tokentype { (Located Token) }


=====================================
compiler/GHC/Parser/Lexer.x
=====================================
@@ -1288,14 +1288,14 @@ begin code _span _str _len _buf2 = do pushLexState code; lexToken
 pop :: Action
 pop _span _buf _len _buf2 =
   do _ <- popLexState
-     -- lexToken
-     trace "pop" $ do lexToken
+     lexToken
+     -- trace "pop" $ do lexToken
 
 cppToken :: Int ->  Token -> Action
 cppToken code t span _buf _len _buf2 =
   do pushLexState code
-     -- return (L span t)
-     trace ("cppToken:" ++ show (code, t)) $ do return (L span t)
+     return (L span t)
+     -- trace ("cppToken:" ++ show (code, t)) $ do return (L span t)
 
 -- See Note [Nested comment line pragmas]
 failLinePrag1 :: Action
@@ -2748,7 +2748,8 @@ data PState = PState {
 data PpState = PpState {
         pp_defines :: !(Set String),
         pp_pushed_back :: !(Maybe (Located Token)),
-        pp_context :: ![PpContext],
+        -- pp_context :: ![PpContext],
+        pp_context :: ![Token], -- What preprocessor directive we are currently processing
         pp_accepting :: !Bool
      }
     deriving (Show)
@@ -2979,12 +2980,12 @@ nextIsEOF = do
   return $ atEnd s
 
 pushLexState :: Int -> P ()
--- pushLexState ls = P $ \s at PState{ lex_state=l } -> POk s{lex_state=ls:l} ()
-pushLexState ls = P $ \s at PState{ lex_state= l } -> POk s{lex_state= trace ("pushLexState:" ++ show ls) ls:l} ()
+pushLexState ls = P $ \s at PState{ lex_state=l } -> POk s{lex_state=ls:l} ()
+-- pushLexState ls = P $ \s at PState{ lex_state= l } -> POk s{lex_state= trace ("pushLexState:" ++ show ls) ls:l} ()
 
 popLexState :: P Int
--- popLexState = P $ \s at PState{ lex_state=ls:l } -> POk s{ lex_state=l } ls
-popLexState = P $ \s at PState{ lex_state=ls:l } -> POk s{ lex_state= trace ("popLexState:" ++ show (ls,l)) l } ls
+popLexState = P $ \s at PState{ lex_state=ls:l } -> POk s{ lex_state=l } ls
+-- popLexState = P $ \s at PState{ lex_state=ls:l } -> POk s{ lex_state= trace ("popLexState:" ++ show (ls,l)) l } ls
 
 getLexState :: P Int
 getLexState = P $ \s at PState{ lex_state=ls:_ } -> POk s ls
@@ -3227,8 +3228,8 @@ disableHaddock opts = upd_bitmap (xunset HaddockBit)
 
 -- | Set parser options for parsing OPTIONS pragmas
 initPragState :: ParserOpts -> StringBuffer -> RealSrcLoc -> PState
--- initPragState options buf loc = (initParserState options buf loc)
-initPragState options buf loc = (initParserState options buf (trace ("initPragState:" ++ show bol) loc))
+initPragState options buf loc = (initParserState options buf loc)
+-- initPragState options buf loc = (initParserState options buf (trace ("initPragState:" ++ show bol) loc))
    { lex_state = [bol, option_prags, 0]
    }
 
@@ -3715,8 +3716,8 @@ lexToken = do
   inp@(AI loc1 buf) <- getInput
   sc <- getLexState
   exts <- getExts
-  -- case alexScanUser exts inp sc of
-  case alexScanUser exts inp (trace ("lexToken:state=" ++ show sc) sc) of
+  case alexScanUser exts inp sc of
+  -- case alexScanUser exts inp (trace ("lexToken:state=" ++ show sc) sc) of
     AlexEOF -> do
         let span = mkPsSpan loc1 loc1
         lc <- getLastLocIncludingComments


=====================================
utils/check-cpp/Main.hs
=====================================
@@ -1,96 +1,100 @@
+-- Note: this file formatted with fourmolu
 
 import Control.Monad.IO.Class
+import Data.Data hiding (Fixity)
+import qualified Data.Set as Set
+import Debug.Trace (trace)
+import GHC
 import qualified GHC.Data.EnumSet as EnumSet
-import qualified GHC.LanguageExtensions as LangExt
 import GHC.Data.FastString
 import GHC.Data.Maybe
-import Data.Set (Set)
-import qualified Data.Set as Set
-import GHC.Data.OrdList
 import GHC.Data.StringBuffer
-import GHC.Types.Error
-import GHC.Types.Unique.FM
-import GHC.Utils.Error
-import GHC.Utils.Misc (readHexSignificandExponentPair, readSignificandExponentPair)
-import GHC.Utils.Outputable
-import GHC.Utils.Panic
-
-import GHC
 import GHC.Driver.Config.Parser
 import GHC.Driver.Errors.Types
-import qualified GHC.Driver.Session as GHC
-import GHC.Hs
-import GHC.Hs.Doc
-import GHC.Types.Basic (InlineSpec (..), RuleMatchInfo (..))
-import GHC.Types.SourceText
-import GHC.Types.SrcLoc
-import qualified Control.Monad.IO.Class as GHC
-import qualified GHC.Data.FastString   as GHC
-import qualified GHC.Data.StringBuffer as GHC
-import qualified GHC.Driver.Config.Parser as GHC
-import qualified GHC.Driver.Env        as GHC
 import qualified GHC.Driver.Errors.Types as GHC
-import qualified GHC.Driver.Phases     as GHC
-import qualified GHC.Driver.Pipeline   as GHC
-import qualified GHC.Fingerprint.Type  as GHC
-import qualified GHC.Parser.Lexer      as GHC
-import qualified GHC.Settings          as GHC
-import qualified GHC.Types.Error       as GHC
-import qualified GHC.Types.SourceError as GHC
-import qualified GHC.Types.SourceFile  as GHC
-import qualified GHC.Types.SrcLoc      as GHC
-import qualified GHC.Utils.Error       as GHC
-import qualified GHC.Utils.Fingerprint as GHC
-import qualified GHC.Utils.Outputable  as GHC
-
-import GHC.Parser.CharClass
-
-import Debug.Trace (trace)
-import GHC.Driver.Flags
-import GHC.Parser.Annotation
-import GHC.Parser.Errors.Basic
+import qualified GHC.Driver.Session as GHC
+import GHC.Hs.Dump
+import qualified GHC.LanguageExtensions as LangExt
 import GHC.Parser.Errors.Ppr ()
-import GHC.Parser.Errors.Types
-import GHC.Parser.Lexer (P (..), Token (..), PState(..), PpState(..), PpContext(..), ParseResult(..) )
+import GHC.Parser.Lexer (P (..), PState (..), ParseResult (..), PpState (..), Token (..))
+import qualified GHC.Parser.Lexer as GHC
 import qualified GHC.Parser.Lexer as Lexer
-import GHC.Prelude
+import GHC.Types.Error
+import GHC.Types.SrcLoc
+import GHC.Utils.Error
+import GHC.Utils.Outputable
 
--- =====================================================================
--- Temporary home until moved into Parser/Preprocessor
--- data PpState = PpState
---     { pp_defines :: !(Set.Set String)
---     , pp_pushed_back :: !(Maybe Token)
---     , pp_context :: ![PpContext]
---     , pp_accepting :: !Bool
---     }
---     deriving (Show)
+-- ---------------------------------------------------------------------
 
--- data PpContext = PpContextIf [Token]
---     deriving (Show)
+showAst :: (Data a) => a -> String
+showAst ast =
+    showSDocUnsafe
+        $ showAstData BlankSrcSpanFile NoBlankEpAnnotations ast
 
--- initPpState :: PpState
--- initPpState = PpState{pp_defines = Set.empty, pp_pushed_back = Nothing, pp_context = [], pp_accepting = True}
+-- =====================================================================
 
 ppLexer, ppLexerDbg :: Bool -> (Located Token -> P a) -> P a
 -- Use this instead of 'lexer' in GHC.Parser to dump the tokens for debugging.
 ppLexerDbg queueComments cont = ppLexer queueComments contDbg
   where
     contDbg tok = trace ("pptoken: " ++ show (unLoc tok)) (cont tok)
-ppLexer _queueComments cont = do
-    tok <- ppLexToken
-    trace ("ppLexer:" ++ show (unLoc tok)) $ do
-     tok' <- case tok of
-        L _ ITcppIf -> preprocessIf tok
-        L _ ITcppDefine -> preprocessDefine tok
-        L _ ITcppIfdef -> preprocessIfDef tok
-        L _ ITcppElse -> preprocessElse tok
-        L _ ITcppEndif -> preprocessEnd tok
-        L l _ -> do
-            accepting <- getAccepting
-            if accepting
-                then return tok
-                else return (L l (ITcppIgnored [tok]))
-     cont tok'
+
+-- NOTE: instead of pulling tokens and calling cont, consider putting
+-- this inside Lexer.lexer, much like the queueComments stuff
+-- That sorts out
+--    - ALR
+--    - queueing comments
+-- ppLexer _queueComments cont = do
+--     tok <- ppLexToken
+--     trace ("ppLexer:" ++ show (unLoc tok)) $ do
+--         tok' <- case tok of
+--             L _ ITcppIf -> preprocessIf tok
+--             L _ ITcppDefine -> preprocessDefine tok
+--             L _ ITcppIfdef -> preprocessIfDef tok
+--             L _ ITcppElse -> preprocessElse tok
+--             L _ ITcppEndif -> preprocessEnd tok
+--             L l _ -> do
+--                 accepting <- getAccepting
+--                 if accepting
+--                     then return tok
+--                     else return (L l (ITcppIgnored [tok]))
+--         cont tok'
+
+ppLexer queueComments cont =
+    Lexer.lexer
+        queueComments
+        ( \tk@(L lt _) ->
+            let
+                contInner t = (trace ("ppLexer: tk=" ++ show (unLoc tk, unLoc t)) cont) t
+                contPush = pushContext (unLoc tk) >> contInner (L lt (ITcppIgnored [tk]))
+             in
+                case tk of
+                    L _ ITcppDefine -> contPush
+                    L _ ITcppIf -> contPush
+                    L _ ITcppIfdef -> contPush
+                    L _ ITcppElse -> do
+                      tk' <- preprocessElse tk
+                      contInner tk'
+                    L _ ITcppEndif -> do
+                      tk' <- preprocessEnd tk
+                      contInner tk'
+                    L l tok -> do
+                        state <- getCppState
+                        case (trace ("CPP state:" ++ show state) state) of
+                            CppIgnoring -> contInner (L l (ITcppIgnored [tk]))
+                            CppInDefine -> do
+                                ppDefine (trace ("ppDefine:" ++ show tok) (show tok))
+                                popContext
+                                contInner (L l (ITcppIgnored [tk]))
+                            CppInIfdef -> do
+                                defined <- ppIsDefined (show tok)
+                                if defined
+                                    then setAccepting True
+                                    else setAccepting False
+                                popContext
+                                contInner (L l (ITcppIgnored [tk]))
+                            _ -> contInner tk
+        )
 
 -- Swallow tokens until ITcppEndif
 preprocessIf :: Located Token -> P (Located Token)
@@ -98,17 +102,18 @@ preprocessIf tok = go [tok]
   where
     go :: [Located Token] -> P (Located Token)
     go acc = do
-        tok <- ppLexToken
-        case tok of
-            L l ITcppEndif -> return $ L l (ITcppIgnored (reverse (tok : acc)))
-            _ -> go (tok : acc)
-
-preprocessDefine :: Located Token -> P (Located Token)
-preprocessDefine tok@(L l ITcppDefine) = do
-    L l cond <- ppLexToken
-    ppDefine (show cond)
-    return (L l (ITcppIgnored [tok]))
-preprocessDefine tok = return tok
+        tok' <- ppLexToken
+        case tok' of
+            L l ITcppEndif -> return $ L l (ITcppIgnored (reverse (tok' : acc)))
+            _ -> go (tok' : acc)
+
+-- preprocessDefine :: Located Token -> P (Located Token)
+-- preprocessDefine tok@(L l ITcppDefine) = do
+--     L ll cond <- ppLexToken
+--     -- ppDefine (show cond)
+--     ppDefine (trace ("ppDefine:" ++ show cond) (show cond))
+--     return (L l (ITcppIgnored [tok, L ll cond]))
+-- preprocessDefine tok = return tok
 
 preprocessIfDef :: Located Token -> P (Located Token)
 preprocessIfDef tok@(L l ITcppIfdef) = do
@@ -116,10 +121,10 @@ preprocessIfDef tok@(L l ITcppIfdef) = do
     defined <- ppIsDefined (show cond)
     if defined
         then do
-            pushContext (PpContextIf [tok])
+            pushContext ITcppIfdef
             setAccepting True
         else setAccepting False
-    return (L l (ITcppIgnored [tok]))
+    return (L l (ITcppIgnored [tok, L ll cond]))
 preprocessIfDef tok = return tok
 
 preprocessElse :: Located Token -> P (Located Token)
@@ -137,12 +142,51 @@ preprocessEnd tok@(L l _) = do
 -- ---------------------------------------------------------------------
 -- Preprocessor state functions
 
+data CppState
+    = CppIgnoring
+    | CppInDefine
+    | CppInIfdef
+    | CppNormal
+    deriving (Show)
+
+getCppState :: P CppState
+getCppState = do
+    context <- peekContext
+    accepting <- getAccepting
+    case context of
+        ITcppDefine -> return CppInDefine
+        ITcppIfdef -> return CppInIfdef
+        _ ->
+            if accepting
+                then return CppNormal
+                else return CppIgnoring
+
 -- pp_context stack start -----------------
 
-pushContext :: PpContext -> P ()
+pushContext :: Token -> P ()
 pushContext new =
     P $ \s -> POk s{pp = (pp s){pp_context = new : pp_context (pp s)}} ()
 
+popContext :: P ()
+popContext =
+    P $ \s ->
+        let
+            new_context = case pp_context (pp s) of
+                [] -> []
+                (_ : t) -> t
+         in
+            POk s{pp = (pp s){pp_context = new_context}} ()
+
+peekContext :: P Token
+peekContext =
+    P $ \s ->
+        let
+            r = case pp_context (pp s) of
+                [] -> ITeof -- Anthing really, for now, except a CPP one
+                (h : _) -> h
+         in
+            POk s r
+
 setAccepting :: Bool -> P ()
 setAccepting on =
     P $ \s -> POk s{pp = (pp s){pp_accepting = on}} ()
@@ -160,11 +204,11 @@ pushBack tok = P $ \s ->
         then
             PFailed
                 $ s
-                    -- { errors =
-                    --     ("pushBack: " ++ show tok ++ ", we already have a token:" ++ show (pp_pushed_back (pp s)))
-                    --         : errors s
-                    -- }
-        else
+        else -- { errors =
+        --     ("pushBack: " ++ show tok ++ ", we already have a token:" ++ show (pp_pushed_back (pp s)))
+        --         : errors s
+        -- }
+
             let
                 ppVal = pp s
                 pp' = ppVal{pp_pushed_back = Just tok}
@@ -182,11 +226,11 @@ ppLexToken :: P (Located Token)
 ppLexToken = do
     mtok <- getPushBack
     case mtok of
-      Just t -> return t
-      Nothing -> do
-        -- TODO: do we need this? Issues with ALR, comments, etc being bypassed
-        (L sp tok) <- Lexer.lexToken
-        return (L (mkSrcSpanPs sp) tok)
+        Just t -> return t
+        Nothing -> do
+            -- TODO: do we need this? Issues with ALR, comments, etc being bypassed
+            (L sp tok) <- Lexer.lexToken
+            return (L (mkSrcSpanPs sp) tok)
 
 -- pp_pushed_back token end ----------------
 
@@ -208,82 +252,85 @@ type LibDir = FilePath
 -- parseString :: LibDir -> String -> IO (WarningMessages, Either ErrorMessages [Located Token])
 parseString :: LibDir -> String -> IO [Located Token]
 parseString libdir str = ghcWrapper libdir $ do
-  dflags0 <- initDynFlags
-  let dflags = dflags0 { extensionFlags = EnumSet.insert LangExt.GhcCpp (extensionFlags dflags0)}
-  let pflags = initParserOpts dflags
-  -- return $ strParser str dflags "fake_test_file.hs"
-  liftIO $ putStrLn "-- parsing ----------"
-  liftIO $ putStrLn str
-  liftIO $ putStrLn "---------------------"
-  return $ strGetToks pflags "fake_test_file.hs" str
+    dflags0 <- initDynFlags
+    let dflags = dflags0{extensionFlags = EnumSet.insert LangExt.GhcCpp (extensionFlags dflags0)}
+    let pflags = initParserOpts dflags
+    -- return $ strParser str dflags "fake_test_file.hs"
+    liftIO $ putStrLn "-- parsing ----------"
+    liftIO $ putStrLn str
+    liftIO $ putStrLn "---------------------"
+    return $ strGetToks pflags "fake_test_file.hs" str
 
 strGetToks :: Lexer.ParserOpts -> FilePath -> String -> [Located Token]
 strGetToks popts filename str = reverse $ lexAll pstate
- where
-  pstate = Lexer.initParserState popts buf loc
-  loc  = mkRealSrcLoc (mkFastString filename) 1 1
-  buf = stringToStringBuffer str
-  -- cpp_enabled = Lexer.GhcCppBit `Lexer.xtest` Lexer.pExtsBitmap popts
-
-  lexAll state = case unP (ppLexerDbg True return) state of
-                   POk _      t@(L _ ITeof) -> [t]
-                   POk state' t -> t : lexAll state'
-                                            -- (trace ("lexAll: " ++ show (unLoc t)) state')
-                   PFailed pst -> error $ "failed" ++ showErrorMessages (GHC.GhcPsMessage <$> GHC.getPsErrorMessages pst)
-                   _ -> [L (mkSrcSpanPs (last_loc state)) ITeof]
+  where
+    pstate = Lexer.initParserState popts buf loc
+    loc = mkRealSrcLoc (mkFastString filename) 1 1
+    buf = stringToStringBuffer str
+    -- cpp_enabled = Lexer.GhcCppBit `Lexer.xtest` Lexer.pExtsBitmap popts
+
+    lexAll state = case unP (ppLexerDbg True return) state of
+        POk _ t@(L _ ITeof) -> [t]
+        POk state' t -> t : lexAll state'
+        -- (trace ("lexAll: " ++ show (unLoc t)) state')
+        PFailed pst -> error $ "failed" ++ showErrorMessages (GHC.GhcPsMessage <$> GHC.getPsErrorMessages pst)
+
+-- _ -> [L (mkSrcSpanPs (last_loc state)) ITeof]
 
 showErrorMessages :: Messages GhcMessage -> String
 showErrorMessages msgs =
-  renderWithContext defaultSDocContext
-    $ vcat
-    $ pprMsgEnvelopeBagWithLocDefault
-    $ getMessages
-    $ msgs
-
--- | Parse a file, using the emulated haskell parser, returning the
--- resulting tokens only
-strParser :: String         -- ^ Haskell module source text (full Unicode is supported)
-          -> DynFlags       -- ^ the flags
-          -> FilePath       -- ^ the filename (for source locations)
-          -> (WarningMessages, Either ErrorMessages [Located Token])
-
+    renderWithContext defaultSDocContext
+        $ vcat
+        $ pprMsgEnvelopeBagWithLocDefault
+        $ getMessages
+        $ msgs
+
+{- | Parse a file, using the emulated haskell parser, returning the
+resulting tokens only
+-}
+strParser ::
+    -- | Haskell module source text (full Unicode is supported)
+    String ->
+    -- | the flags
+    DynFlags ->
+    -- | the filename (for source locations)
+    FilePath ->
+    (WarningMessages, Either ErrorMessages [Located Token])
 strParser str dflags filename =
-   let
-       loc  = mkRealSrcLoc (mkFastString filename) 1 1
-       buf  = stringToStringBuffer str
-   in
-   case unP parseModuleNoHaddock (Lexer.initParserState (initParserOpts dflags) buf loc) of
-
-     PFailed pst ->
-         let (warns,errs) = Lexer.getPsMessages pst in
-         (GhcPsMessage <$> warns, Left $ GhcPsMessage <$> errs)
-
-     POk pst rdr_module ->
-         let (warns,_) = Lexer.getPsMessages pst in
-         (GhcPsMessage <$> warns, Right rdr_module)
-
-
-initDynFlags :: GHC.GhcMonad m => m GHC.DynFlags
+    let
+        loc = mkRealSrcLoc (mkFastString filename) 1 1
+        buf = stringToStringBuffer str
+     in
+        case unP parseModuleNoHaddock (Lexer.initParserState (initParserOpts dflags) buf loc) of
+            PFailed pst ->
+                let (warns, errs) = Lexer.getPsMessages pst
+                 in (GhcPsMessage <$> warns, Left $ GhcPsMessage <$> errs)
+            POk pst rdr_module ->
+                let (warns, _) = Lexer.getPsMessages pst
+                 in (GhcPsMessage <$> warns, Right rdr_module)
+
+initDynFlags :: (GHC.GhcMonad m) => m GHC.DynFlags
 initDynFlags = do
-  -- Based on GHC backpack driver doBackPack
-  dflags0         <- GHC.getSessionDynFlags
-  let parser_opts0 = initParserOpts dflags0
-  -- (_, src_opts)   <- GHC.liftIO $ GHC.getOptionsFromFile parser_opts0 file
-  -- (dflags1, _, _) <- GHC.parseDynamicFilePragma dflags0 src_opts
-  -- Turn this on last to avoid T10942
-  let dflags2 = dflags0 `GHC.gopt_set` GHC.Opt_KeepRawTokenStream
-  -- Prevent parsing of .ghc.environment.* "package environment files"
-  (dflags3, _, _) <- GHC.parseDynamicFlagsCmdLine
-    dflags2
-    [GHC.noLoc "-hide-all-packages"]
-  _ <- GHC.setSessionDynFlags dflags3
-  return dflags3
+    -- Based on GHC backpack driver doBackPack
+    dflags0 <- GHC.getSessionDynFlags
+    -- let parser_opts0 = initParserOpts dflags0
+    -- (_, src_opts)   <- GHC.liftIO $ GHC.getOptionsFromFile parser_opts0 file
+    -- (dflags1, _, _) <- GHC.parseDynamicFilePragma dflags0 src_opts
+    -- Turn this on last to avoid T10942
+    let dflags2 = dflags0 `GHC.gopt_set` GHC.Opt_KeepRawTokenStream
+    -- Prevent parsing of .ghc.environment.* "package environment files"
+    (dflags3, _, _) <-
+        GHC.parseDynamicFlagsCmdLine
+            dflags2
+            [GHC.noLoc "-hide-all-packages"]
+    _ <- GHC.setSessionDynFlags dflags3
+    return dflags3
 
 -- | Internal function. Default runner of GHC.Ghc action in IO.
 ghcWrapper :: LibDir -> GHC.Ghc a -> IO a
 ghcWrapper libdir a =
-  GHC.defaultErrorHandler GHC.defaultFatalMessager GHC.defaultFlushOut
-    $ GHC.runGhc (Just libdir) a
+    GHC.defaultErrorHandler GHC.defaultFatalMessager GHC.defaultFlushOut
+        $ GHC.runGhc (Just libdir) a
 
 -- ---------------------------------------------------------------------
 
@@ -337,12 +384,11 @@ happyAccept _j tk _st _sts stk =
 
 happyShift :: Int -> Int -> Located Token -> Int -> [Int] -> [Located Token] -> P [Located Token]
 happyShift new_state _i tk st sts stk = do
-    happyNewToken new_state (st:sts) (tk:stk)
+    happyNewToken new_state (st : sts) (tk : stk)
+
 -- happyShift new_state i tk st sts stk =
 --      happyNewToken new_state (HappyCons (st) (sts)) ((happyInTok (tk))`HappyStk`stk)
 
-
-
 happyFail :: [String] -> Int -> Located Token -> p2 -> p3 -> p4 -> P a
 happyFail explist i tk _old_st _ _stk =
     trace ("failing" ++ show explist)
@@ -363,16 +409,27 @@ happyError = Lexer.srcParseFail
 -- =====================================================================
 -- ---------------------------------------------------------------------
 
+printToks :: Int -> [Located Token] -> IO ()
+printToks indent toks = mapM_ go toks
+  where
+    go (L _ (ITcppIgnored ts)) = do
+      putStr "ITcppIgnored ["
+      printToks (indent + 4) ts
+      putStrLn "]"
+    go (L _ tk) = putStrLn (show tk)
+
 -- Testing
 
+libdirNow :: LibDir
+libdirNow = "/home/alanz/mysrc/git.haskell.org/worktree/bisect/_build/stage1/lib"
 
-libdir = "/home/alanz/mysrc/git.haskell.org/worktree/bisect/_build/stage1/lib"
 t0 :: IO ()
-
 t0 = do
-  tks <- parseString libdir "#define FOO\n#ifdef FOO\nx = 1\n#endif\n"
-  putStrLn $ show (reverse $ map unLoc tks)
+    tks <- parseString libdirNow "#define FOO\n#ifdef FOO\nx = 1\n#endif\n"
+    -- putStrLn $ show (reverse $ map unLoc tks)
+    printToks 0 (reverse tks)
 
+t1 :: IO ()
 t1 = do
-  tks <- parseString libdir "data X = X\n"
-  putStrLn $ show (reverse $ map unLoc tks)
+    tks <- parseString libdirNow "data X = X\n"
+    putStrLn $ show (reverse $ map unLoc tks)



View it on GitLab: https://gitlab.haskell.org/ghc/ghc/-/compare/80702e09f71abbb5c0927a94717f3d877c37e43c...8a56ed7c32a1d76f255ef3e66935977363aa7a23

-- 
View it on GitLab: https://gitlab.haskell.org/ghc/ghc/-/compare/80702e09f71abbb5c0927a94717f3d877c37e43c...8a56ed7c32a1d76f255ef3e66935977363aa7a23
You're receiving this email because of your account on gitlab.haskell.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://mail.haskell.org/pipermail/ghc-commits/attachments/20230925/416f6db5/attachment-0001.html>


More information about the ghc-commits mailing list